Skip to content

Commit

Permalink
Merge pull request #62 from token-js/pate/commonjs-chalk
Browse files Browse the repository at this point in the history
docs: Fix type error in docs
  • Loading branch information
RPate97 authored Jul 13, 2024
2 parents 5ffe2d9 + 595533e commit c348f52
Show file tree
Hide file tree
Showing 13 changed files with 22 additions and 33 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,21 +43,21 @@ const messages: ChatCompletionMessageParam[] = [
]

// Call OpenAI
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
provider: 'openai',
model: 'gpt-4o',
messages,
})

// Call Gemini
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
provider: 'gemini',
model: 'gemini-1.5-pro',
messages,
})

// Call Anthropic
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
provider: 'anthropic',
model: 'claude-2.0',
messages,
Expand Down
12 changes: 6 additions & 6 deletions docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'openai',
model: 'gpt-4o',
Expand Down Expand Up @@ -100,7 +100,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'anthropic',
model: 'claude-2.0',
Expand Down Expand Up @@ -129,7 +129,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'gemini',
model: 'gemini-1.5-pro',
Expand Down Expand Up @@ -160,7 +160,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'bedrock',
model: 'amazon.titan-text-express-v1',
Expand Down Expand Up @@ -189,7 +189,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'cohere',
model: 'command-r',
Expand Down Expand Up @@ -218,7 +218,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'mistral',
model: 'mistral-large-2402',
Expand Down
2 changes: 1 addition & 1 deletion docs/providers/ai21.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'ai21',
model: 'jamba-instruct',
Expand Down
2 changes: 1 addition & 1 deletion docs/providers/anthropic.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'anthropic',
model: 'claude-2.0',
Expand Down
2 changes: 1 addition & 1 deletion docs/providers/bedrock.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'bedrock',
model: 'amazon.titan-text-express-v1',
Expand Down
2 changes: 1 addition & 1 deletion docs/providers/cohere.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'cohere',
model: 'command-r',
Expand Down
2 changes: 1 addition & 1 deletion docs/providers/gemini.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'gemini',
model: 'gemini-1.5-pro',
Expand Down
2 changes: 1 addition & 1 deletion docs/providers/groq.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'groq',
model: 'llama3-70b-8192',
Expand Down
2 changes: 1 addition & 1 deletion docs/providers/mistral.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'mistral',
model: 'mistral-large-2402',
Expand Down
2 changes: 1 addition & 1 deletion docs/providers/openai.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'openai',
model: 'gpt-4o',
Expand Down
2 changes: 1 addition & 1 deletion docs/providers/perplexity.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ const messages: ChatCompletionMessageParam = [{
}]

// Call the create function
const result: ChatCompletionMessageParam[] = await tokenjs.chat.completions.create({
const result = await tokenjs.chat.completions.create({
// Specify the target model and provider
provider: 'perplexity',
model: 'llama-3-70b-instruct',
Expand Down
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "token.js",
"version": "0.0.2",
"description": "",
"description": "Integrate 9 LLM providers with a single Typescript SDK using OpenAIs format.",
"main": "dist/index",
"types": "dist/index",
"files": [
Expand Down Expand Up @@ -29,7 +29,7 @@
"@google/generative-ai": "^0.14.1",
"@mistralai/mistralai": "^0.5.0",
"axios": "^1.7.2",
"chalk": "^5.3.0",
"chalk": "^4.1.2",
"cohere-ai": "^7.10.6",
"groq-sdk": "^0.5.0",
"mime-types": "^2.1.35",
Expand Down
15 changes: 2 additions & 13 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit c348f52

Please sign in to comment.