diff --git a/docs/docs/modules/indexes/vector_stores/integrations/opensearch.md b/docs/docs/modules/indexes/vector_stores/integrations/opensearch.md new file mode 100644 index 000000000000..7a60deb97cb0 --- /dev/null +++ b/docs/docs/modules/indexes/vector_stores/integrations/opensearch.md @@ -0,0 +1,112 @@ +--- +sidebar_class_name: node-only +--- + +# OpenSearch + +:::tip Compatibility +Only available on Node.js. +::: + +[OpenSearch](https://opensearch.org/) is a fork of [Elasticsearch](https://www.elastic.co/elasticsearch/) that is fully compatible with the Elasticsearch API. Read more about their support for Approximate Nearest Neighbors [here](https://opensearch.org/docs/latest/search-plugins/knn/approximate-knn/). + +Langchain.js accepts [@opensearch-project/opensearch](https://opensearch.org/docs/latest/clients/javascript/index/) as the client for OpenSearch vectorstore. + +## Setup + +```bash npm2yarn +npm install -S @opensearch-project/opensearch +``` + +You'll also need to have an OpenSearch instance running. You can use the [official Docker image](https://opensearch.org/docs/latest/opensearch/install/docker/) to get started. You can also find an example docker-compose file [here](https://github.com/hwchase17/langchainjs/blob/main/examples/src/indexes/vector_stores/opensearch/docker-compose.yml). + +## Index docs + +```typescript +import { Client } from "@opensearch-project/opensearch"; +import { Document } from "langchain/document"; +import { OpenAIEmbeddings } from "langchain/embeddings"; +import { OpenSearchVectorStore } from "langchain/vectorstores"; + +const client = new Client({ + nodes: [process.env.OPENSEARCH_URL ?? "http://127.0.0.1:9200"], +}); + +const docs = [ + new Document({ + metadata: { foo: "bar" }, + pageContent: "opensearch is also a vector db", + }), + new Document({ + metadata: { foo: "bar" }, + pageContent: "the quick brown fox jumped over the lazy dog", + }), + new Document({ + metadata: { baz: "qux" }, + pageContent: "lorem ipsum dolor sit amet", + }), + new Document({ + metadata: { baz: "qux" }, + pageContent: + "OpenSearch is a scalable, flexible, and extensible open-source software suite for search, analytics, and observability applications", + }), +]; + +await OpenSearchVectorStore.fromDocuments(docs, new OpenAIEmbeddings(), { + client, + indexName: process.env.OPENSEARCH_INDEX, // Will default to `documents` +}); +``` + +## Query docs + +```typescript +import { Client } from "@opensearch-project/opensearch"; +import { VectorDBQAChain } from "langchain/chains"; +import { OpenAIEmbeddings } from "langchain/embeddings"; +import { OpenAI } from "langchain/llms"; +import { OpenSearchVectorStore } from "langchain/vectorstores"; + +const client = new Client({ + nodes: [process.env.OPENSEARCH_URL ?? "http://127.0.0.1:9200"], +}); + +const vectorStore = new OpenSearchVectorStore(new OpenAIEmbeddings(), { + client, +}); + +/* Search the vector DB independently with meta filters */ +const results = await vectorStore.similaritySearch("hello world", 1); +console.log(JSON.stringify(results, null, 2)); +/* [ + { + "pageContent": "Hello world", + "metadata": { + "id": 2 + } + } + ] */ + +/* Use as part of a chain (currently no metadata filters) */ +const model = new OpenAI(); +const chain = VectorDBQAChain.fromLLM(model, vectorStore, { + k: 1, + returnSourceDocuments: true, +}); +const response = await chain.call({ query: "What is opensearch?" }); + +console.log(JSON.stringify(response, null, 2)); +/* + { + "text": " Opensearch is a collection of technologies that allow search engines to publish search results in a standard format, making it easier for users to search across multiple sites.", + "sourceDocuments": [ + { + "pageContent": "What's this?", + "metadata": { + "id": 3 + } + } + ] + } + */ +``` diff --git a/examples/.env.example b/examples/.env.example index e82f539a5269..c58141fad70a 100644 --- a/examples/.env.example +++ b/examples/.env.example @@ -2,6 +2,7 @@ ANTHROPIC_API_KEY=ADD_YOURS_HERE # https://www.anthropic.com/ COHERE_API_KEY=ADD_YOURS_HERE # https://dashboard.cohere.ai/api-keys HUGGINGFACEHUB_API_KEY=ADD_YOURS_HERE # https://huggingface.co/settings/tokens OPENAI_API_KEY=ADD_YOURS_HERE # https://platform.openai.com/account/api-keys +OPENSEARCH_URL=ADD_YOURS_HERE # http://127.0.0.1:9200 PINECONE_API_KEY=ADD_YOURS_HERE # https://app.pinecone.io/organizations PINECONE_ENVIRONMENT=ADD_YOURS_HERE PINECONE_INDEX=ADD_YOURS_HERE # E.g. "trec-question-classification" when using "Cohere Trec" example index @@ -9,4 +10,4 @@ REPLICATE_API_KEY=ADD_YOURS_HERE # https://replicate.com/account SERPAPI_API_KEY=ADD_YOURS_HERE # https://serpapi.com/manage-api-key SERPER_API_KEY=ADD_YOURS_HERE # https://serper.dev/api-key SUPABASE_PRIVATE_KEY=ADD_YOURS_HERE # https://app.supabase.com/project/YOUR_PROJECT_ID/settings/api -SUPABASE_URL=ADD_YOURS_HERE # # https://app.supabase.com/project/YOUR_PROJECT_ID/settings/api \ No newline at end of file +SUPABASE_URL=ADD_YOURS_HERE # # https://app.supabase.com/project/YOUR_PROJECT_ID/settings/api diff --git a/examples/package.json b/examples/package.json index 5161b95a429f..78ebfd7d505c 100644 --- a/examples/package.json +++ b/examples/package.json @@ -23,6 +23,7 @@ "license": "MIT", "dependencies": { "@getmetal/metal-sdk": "^1.0.12", + "@opensearch-project/opensearch": "^2.2.0", "@pinecone-database/pinecone": "^0.0.12", "@prisma/client": "^4.11.0", "@supabase/supabase-js": "^2.10.0", diff --git a/examples/src/indexes/vector_stores/opensearch/docker-compose.yml b/examples/src/indexes/vector_stores/opensearch/docker-compose.yml new file mode 100644 index 000000000000..4278767bdf12 --- /dev/null +++ b/examples/src/indexes/vector_stores/opensearch/docker-compose.yml @@ -0,0 +1,42 @@ +# Reference: +# https://opensearch.org/docs/latest/install-and-configure/install-opensearch/docker/#sample-docker-composeyml +version: '3' +services: + opensearch: + image: opensearchproject/opensearch:2.6.0 + container_name: opensearch + environment: + - cluster.name=opensearch + - node.name=opensearch + - discovery.type=single-node + - bootstrap.memory_lock=true + - "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m" + - "DISABLE_INSTALL_DEMO_CONFIG=true" + - "DISABLE_SECURITY_PLUGIN=true" + ulimits: + memlock: + soft: -1 + hard: -1 + volumes: + - opensearch_data:/usr/share/opensearch/data + ports: + - 9200:9200 + - 9600:9600 + networks: + - opensearch + opensearch-dashboards: + image: opensearchproject/opensearch-dashboards:latest # Make sure the version of opensearch-dashboards matches the version of opensearch installed on other nodes + container_name: opensearch-dashboards + ports: + - 5601:5601 # Map host port 5601 to container port 5601 + expose: + - "5601" # Expose port 5601 for web access to OpenSearch Dashboards + environment: + OPENSEARCH_HOSTS: '["http://opensearch:9200"]' # Define the OpenSearch nodes that OpenSearch Dashboards will query + DISABLE_SECURITY_DASHBOARDS_PLUGIN: "true" # disables security dashboards plugin in OpenSearch Dashboards + networks: + - opensearch +networks: + opensearch: +volumes: + opensearch_data: \ No newline at end of file diff --git a/examples/src/indexes/vector_stores/opensearch/opensearch.ts b/examples/src/indexes/vector_stores/opensearch/opensearch.ts new file mode 100644 index 000000000000..4546eed55bf6 --- /dev/null +++ b/examples/src/indexes/vector_stores/opensearch/opensearch.ts @@ -0,0 +1,22 @@ +import { Client } from "@opensearch-project/opensearch"; +import { OpenAIEmbeddings } from "langchain/embeddings/openai"; +import { OpenSearchVectorStore } from "langchain/vectorstores/opensearch"; + +export async function run() { + const client = new Client({ + nodes: [process.env.OPENSEARCH_URL ?? "http://127.0.0.1:9200"], + }); + + const vectorStore = await OpenSearchVectorStore.fromTexts( + ["Hello world", "Bye bye", "What's this?"], + [{ id: 2 }, { id: 1 }, { id: 3 }], + new OpenAIEmbeddings(), + { + client, + indexName: "documents", + } + ); + + const resultOne = await vectorStore.similaritySearch("Hello world", 1); + console.log(resultOne); +} diff --git a/langchain/.env.example b/langchain/.env.example index 92a35cb8f0e5..68627635fcc3 100644 --- a/langchain/.env.example +++ b/langchain/.env.example @@ -2,6 +2,7 @@ ANTHROPIC_API_KEY=ADD_YOURS_HERE COHERE_API_KEY=ADD_YOURS_HERE HUGGINGFACEHUB_API_KEY=ADD_YOURS_HERE OPENAI_API_KEY=ADD_YOURS_HERE +OPENSEARCH_URL=http://127.0.0.1:9200 PINECONE_API_KEY=ADD_YOURS_HERE PINECONE_ENVIRONMENT=ADD_YOURS_HERE PINECONE_INDEX=ADD_YOURS_HERE diff --git a/langchain/.gitignore b/langchain/.gitignore index 96cf92961784..9fdc5dcea7c4 100644 --- a/langchain/.gitignore +++ b/langchain/.gitignore @@ -91,6 +91,9 @@ vectorstores/pinecone.d.ts vectorstores/supabase.cjs vectorstores/supabase.js vectorstores/supabase.d.ts +vectorstores/opensearch.cjs +vectorstores/opensearch.js +vectorstores/opensearch.d.ts vectorstores/milvus.cjs vectorstores/milvus.js vectorstores/milvus.d.ts diff --git a/langchain/package.json b/langchain/package.json index 457e2a856beb..c435a2a7ae11 100644 --- a/langchain/package.json +++ b/langchain/package.json @@ -103,6 +103,9 @@ "vectorstores/supabase.cjs", "vectorstores/supabase.js", "vectorstores/supabase.d.ts", + "vectorstores/opensearch.cjs", + "vectorstores/opensearch.js", + "vectorstores/opensearch.d.ts", "vectorstores/milvus.cjs", "vectorstores/milvus.js", "vectorstores/milvus.d.ts", @@ -264,6 +267,7 @@ "@getmetal/metal-sdk": "^1.0.12", "@huggingface/inference": "^1.5.1", "@jest/globals": "^29.5.0", + "@opensearch-project/opensearch": "^2.2.0", "@pinecone-database/pinecone": "^0.0.12", "@supabase/supabase-js": "^2.10.0", "@tsconfig/recommended": "^1.0.2", @@ -313,6 +317,7 @@ "@aws-sdk/client-s3": "^3.310.0", "@getmetal/metal-sdk": "*", "@huggingface/inference": "^1.5.1", + "@opensearch-project/opensearch": "*", "@pinecone-database/pinecone": "*", "@supabase/supabase-js": "^2.10.0", "@zilliz/milvus2-sdk-node": "^2.2.0", @@ -347,6 +352,9 @@ "@huggingface/inference": { "optional": true }, + "@opensearch-project/opensearch": { + "optional": true + }, "@pinecone-database/pinecone": { "optional": true }, @@ -609,6 +617,11 @@ "import": "./vectorstores/supabase.js", "require": "./vectorstores/supabase.cjs" }, + "./vectorstores/opensearch": { + "types": "./vectorstores/opensearch.d.ts", + "import": "./vectorstores/opensearch.js", + "require": "./vectorstores/opensearch.cjs" + }, "./vectorstores/milvus": { "types": "./vectorstores/milvus.d.ts", "import": "./vectorstores/milvus.js", diff --git a/langchain/scripts/create-entrypoints.js b/langchain/scripts/create-entrypoints.js index 63c922a3c935..4d6d9314cd0e 100644 --- a/langchain/scripts/create-entrypoints.js +++ b/langchain/scripts/create-entrypoints.js @@ -46,6 +46,7 @@ const entrypoints = { "vectorstores/mongo": "vectorstores/mongo", "vectorstores/pinecone": "vectorstores/pinecone", "vectorstores/supabase": "vectorstores/supabase", + "vectorstores/opensearch": "vectorstores/opensearch", "vectorstores/milvus": "vectorstores/milvus", "vectorstores/prisma": "vectorstores/prisma", // text_splitter @@ -134,6 +135,7 @@ const requiresOptionalDependency = [ "vectorstores/mongo", "vectorstores/pinecone", "vectorstores/supabase", + "vectorstores/opensearch", "vectorstores/milvus", "document_loaders/web/cheerio", "document_loaders/web/puppeteer", diff --git a/langchain/src/vectorstores/opensearch.ts b/langchain/src/vectorstores/opensearch.ts new file mode 100644 index 000000000000..d24e948d8636 --- /dev/null +++ b/langchain/src/vectorstores/opensearch.ts @@ -0,0 +1,226 @@ +/* eslint-disable no-instanceof/no-instanceof */ +import { Client, RequestParams, errors } from "@opensearch-project/opensearch"; +import { v4 as uuid } from "uuid"; +import { Embeddings } from "../embeddings/base.js"; +import { Document } from "../document.js"; +import { VectorStore } from "./base.js"; + +type OpenSearchEngine = "nmslib" | "hnsw"; +type OpenSearchSpaceType = "l2" | "cosinesimil" | "ip"; + +interface VectorSearchOptions { + readonly engine?: OpenSearchEngine; + readonly spaceType?: OpenSearchSpaceType; + readonly m?: number; + readonly efConstruction?: number; + readonly efSearch?: number; +} + +export interface OpenSearchClientArgs { + readonly client: Client; + readonly indexName?: string; + + readonly vectorSearchOptions?: VectorSearchOptions; +} + +export class OpenSearchVectorStore extends VectorStore { + private readonly client: Client; + + private readonly indexName: string; + + private readonly engine: OpenSearchEngine; + + private readonly spaceType: OpenSearchSpaceType; + + private readonly efConstruction: number; + + private readonly efSearch: number; + + private readonly m: number; + + constructor(embeddings: Embeddings, args: OpenSearchClientArgs) { + super(embeddings, args); + + this.spaceType = args.vectorSearchOptions?.spaceType ?? "l2"; + this.engine = args.vectorSearchOptions?.engine ?? "nmslib"; + this.m = args.vectorSearchOptions?.m ?? 16; + this.efConstruction = args.vectorSearchOptions?.efConstruction ?? 512; + this.efSearch = args.vectorSearchOptions?.efSearch ?? 512; + + this.client = args.client; + this.indexName = args.indexName ?? "documents"; + } + + async addDocuments(documents: Document[]): Promise { + const texts = documents.map(({ pageContent }) => pageContent); + return this.addVectors( + await this.embeddings.embedDocuments(texts), + documents + ); + } + + async addVectors(vectors: number[][], documents: Document[]): Promise { + await this.ensureIndexExists( + vectors[0].length, + this.engine, + this.spaceType, + this.efSearch, + this.efConstruction, + this.m + ); + const operations = vectors.flatMap((embedding, idx) => [ + { + index: { + _index: this.indexName, + _id: uuid(), + }, + }, + { + embedding, + metadata: documents[idx].metadata, + text: documents[idx].pageContent, + }, + ]); + await this.client.bulk({ body: operations }); + await this.client.indices.refresh({ index: this.indexName }); + } + + async similaritySearchVectorWithScore( + query: number[], + k: number, + filter?: object | undefined + ): Promise<[Document, number][]> { + const search: RequestParams.Search = { + index: this.indexName, + body: { + query: { + bool: { + filter: { bool: { must: this.buildMetadataTerms(filter) } }, + must: [ + { + knn: { + embedding: { vector: query, k }, + }, + }, + ], + }, + }, + size: k, + }, + }; + + const { body } = await this.client.search(search); + + return body.hits.hits.map((hit: any) => [ + new Document({ + pageContent: hit._source.text, + metadata: hit._source.metadata, + }), + hit._score, + ]); + } + + static fromTexts( + texts: string[], + metadatas: object[] | object, + embeddings: Embeddings, + args: OpenSearchClientArgs + ): Promise { + const documents = texts.map((text, idx) => { + const metadata = Array.isArray(metadatas) ? metadatas[idx] : metadatas; + return new Document({ pageContent: text, metadata }); + }); + + return OpenSearchVectorStore.fromDocuments(documents, embeddings, args); + } + + static async fromDocuments( + docs: Document[], + embeddings: Embeddings, + dbConfig: OpenSearchClientArgs + ): Promise { + const store = new OpenSearchVectorStore(embeddings, dbConfig); + await store.addDocuments(docs).then(() => store); + return store; + } + + private async ensureIndexExists( + dimension: number, + engine = "nmslib", + spaceType = "l2", + efSearch = 512, + efConstruction = 512, + m = 16 + ): Promise { + const body = { + settings: { + index: { + number_of_shards: 5, + number_of_replicas: 1, + knn: true, + "knn.algo_param.ef_search": efSearch, + }, + }, + mappings: { + dynamic_templates: [ + { + // map all metadata properties to be keyword + "metadata.*": { + match_mapping_type: "*", + mapping: { type: "keyword" }, + }, + }, + ], + properties: { + text: { type: "text" }, + metadata: { type: "object" }, + embedding: { + type: "knn_vector", + dimension, + method: { + name: "hnsw", + engine, + space_type: spaceType, + parameters: { ef_construction: efConstruction, m }, + }, + }, + }, + }, + }; + + const indexExists = await this.doesIndexExist(); + if (indexExists) return; + + await this.client.indices.create({ index: this.indexName, body }); + } + + private buildMetadataTerms( + filter?: object + ): { term: Record }[] { + if (filter == null) return []; + const result = []; + for (const [key, value] of Object.entries(filter)) { + result.push({ term: { [`metadata.${key}`]: value } }); + } + return result; + } + + async doesIndexExist(): Promise { + try { + await this.client.cat.indices({ index: this.indexName }); + return true; + } catch (err: unknown) { + if (err instanceof errors.ResponseError && err.statusCode === 404) { + return false; + } + throw err; + } + } + + async deleteIfExists(): Promise { + const indexExists = await this.doesIndexExist(); + if (!indexExists) return; + + await this.client.indices.delete({ index: this.indexName }); + } +} diff --git a/langchain/src/vectorstores/tests/opensearch.int.test.ts b/langchain/src/vectorstores/tests/opensearch.int.test.ts new file mode 100644 index 000000000000..0df33b96ac64 --- /dev/null +++ b/langchain/src/vectorstores/tests/opensearch.int.test.ts @@ -0,0 +1,46 @@ +/* eslint-disable no-process-env */ +import { test, expect } from "@jest/globals"; +import { Client } from "@opensearch-project/opensearch"; +import { OpenAIEmbeddings } from "../../embeddings/openai.js"; +import { OpenSearchVectorStore } from "../opensearch.js"; +import { Document } from "../../document.js"; + +test("OpenSearchVectorStore integration", async () => { + if (!process.env.OPENSEARCH_URL) { + throw new Error("OPENSEARCH_URL not set"); + } + + const client = new Client({ + nodes: [process.env.OPENSEARCH_URL], + }); + + const indexName = "test_index"; + + const embeddings = new OpenAIEmbeddings(undefined, { + baseOptions: { temperature: 0 }, + }); + const store = new OpenSearchVectorStore(embeddings, { client, indexName }); + await store.deleteIfExists(); + + expect(store).toBeDefined(); + + await store.addDocuments([ + { pageContent: "hello", metadata: { a: 2 } }, + { pageContent: "car", metadata: { a: 1 } }, + { pageContent: "adjective", metadata: { a: 1 } }, + { pageContent: "hi", metadata: { a: 1 } }, + ]); + + const results1 = await store.similaritySearch("hello!", 1); + + expect(results1).toHaveLength(1); + expect(results1).toEqual([ + new Document({ metadata: { a: 2 }, pageContent: "hello" }), + ]); + + const results2 = await store.similaritySearchWithScore("hello!", 1, { + a: 1, + }); + + expect(results2).toHaveLength(1); +}); diff --git a/langchain/tsconfig.json b/langchain/tsconfig.json index f86dc0f08652..8d3389135326 100644 --- a/langchain/tsconfig.json +++ b/langchain/tsconfig.json @@ -62,6 +62,7 @@ "src/vectorstores/mongo.ts", "src/vectorstores/pinecone.ts", "src/vectorstores/supabase.ts", + "src/vectorstores/opensearch.ts", "src/vectorstores/milvus.ts", "src/vectorstores/prisma.ts", "src/text_splitter.ts", diff --git a/yarn.lock b/yarn.lock index 60a3e3efd04e..f9bd6c1bd5d9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5873,6 +5873,19 @@ __metadata: languageName: node linkType: hard +"@opensearch-project/opensearch@npm:^2.2.0": + version: 2.2.0 + resolution: "@opensearch-project/opensearch@npm:2.2.0" + dependencies: + aws4: ^1.11.0 + debug: ^4.3.1 + hpagent: ^1.2.0 + ms: ^2.1.3 + secure-json-parse: ^2.4.0 + checksum: cceb5bb2c194a7d4bfab3c1b4a3230ea1457ae8976f39bb9e0c5e0067dc450a418a4649536988f0d48a746d7d3ed2002c32d9fde48dfc3112158e964bafa6e76 + languageName: node + linkType: hard + "@pinecone-database/pinecone@npm:^0.0.12": version: 0.0.12 resolution: "@pinecone-database/pinecone@npm:0.0.12" @@ -8750,6 +8763,13 @@ __metadata: languageName: node linkType: hard +"aws4@npm:^1.11.0": + version: 1.12.0 + resolution: "aws4@npm:1.12.0" + checksum: 68f79708ac7c335992730bf638286a3ee0a645cf12575d557860100767c500c08b30e24726b9f03265d74116417f628af78509e1333575e9f8d52a80edfe8cbc + languageName: node + linkType: hard + "axe-core@npm:^4.6.2": version: 4.6.3 resolution: "axe-core@npm:4.6.3" @@ -10994,7 +11014,7 @@ __metadata: languageName: node linkType: hard -"debug@npm:4, debug@npm:4.3.4, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.3.2, debug@npm:^4.3.3, debug@npm:^4.3.4": +"debug@npm:4, debug@npm:4.3.4, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.3, debug@npm:^4.3.4": version: 4.3.4 resolution: "debug@npm:4.3.4" dependencies: @@ -12977,6 +12997,7 @@ __metadata: resolution: "examples@workspace:examples" dependencies: "@getmetal/metal-sdk": ^1.0.12 + "@opensearch-project/opensearch": ^2.2.0 "@pinecone-database/pinecone": ^0.0.12 "@prisma/client": ^4.11.0 "@supabase/supabase-js": ^2.10.0 @@ -14458,6 +14479,13 @@ __metadata: languageName: node linkType: hard +"hpagent@npm:^1.2.0": + version: 1.2.0 + resolution: "hpagent@npm:1.2.0" + checksum: b029da695edae438cee4da2a437386f9db4ac27b3ceb7306d02e1b586c9c194741ed2e943c8a222e0cfefaf27ee3f863aca7ba1721b0950a2a19bf25bc0d85e2 + languageName: node + linkType: hard + "html-encoding-sniffer@npm:^2.0.1": version: 2.0.1 resolution: "html-encoding-sniffer@npm:2.0.1" @@ -17077,6 +17105,7 @@ __metadata: "@getmetal/metal-sdk": ^1.0.12 "@huggingface/inference": ^1.5.1 "@jest/globals": ^29.5.0 + "@opensearch-project/opensearch": ^2.2.0 "@pinecone-database/pinecone": ^0.0.12 "@supabase/supabase-js": ^2.10.0 "@tsconfig/recommended": ^1.0.2 @@ -17138,6 +17167,7 @@ __metadata: "@aws-sdk/client-s3": ^3.310.0 "@getmetal/metal-sdk": "*" "@huggingface/inference": ^1.5.1 + "@opensearch-project/opensearch": "*" "@pinecone-database/pinecone": "*" "@supabase/supabase-js": ^2.10.0 "@zilliz/milvus2-sdk-node": ^2.2.0 @@ -17167,6 +17197,8 @@ __metadata: optional: true "@huggingface/inference": optional: true + "@opensearch-project/opensearch": + optional: true "@pinecone-database/pinecone": optional: true "@supabase/supabase-js": @@ -18377,7 +18409,7 @@ __metadata: languageName: node linkType: hard -"ms@npm:2.1.3, ms@npm:^2.0.0, ms@npm:^2.1.1": +"ms@npm:2.1.3, ms@npm:^2.0.0, ms@npm:^2.1.1, ms@npm:^2.1.3": version: 2.1.3 resolution: "ms@npm:2.1.3" checksum: aa92de608021b242401676e35cfa5aa42dd70cbdc082b916da7fb925c542173e36bce97ea3e804923fe92c0ad991434e4a38327e15a1b5b5f945d66df615ae6d @@ -22446,6 +22478,13 @@ __metadata: languageName: node linkType: hard +"secure-json-parse@npm:^2.4.0": + version: 2.7.0 + resolution: "secure-json-parse@npm:2.7.0" + checksum: d9d7d5a01fc6db6115744ba23cf9e67ecfe8c524d771537c062ee05ad5c11b64c730bc58c7f33f60bd6877f96b86f0ceb9ea29644e4040cb757f6912d4dd6737 + languageName: node + linkType: hard + "selderee@npm:^0.11.0": version: 0.11.0 resolution: "selderee@npm:0.11.0"