Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat (provider/togetherai): Add TogetherAI provider. #3781

Closed
wants to merge 13 commits into from
Closed
1 change: 1 addition & 0 deletions examples/ai-core/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
"@ai-sdk/groq": "1.0.1",
"@ai-sdk/mistral": "1.0.2",
"@ai-sdk/openai": "1.0.2",
"@ai-sdk/togetherai": "0.0.0",
"@ai-sdk/xai": "1.0.2",
"@opentelemetry/sdk-node": "0.54.2",
"@opentelemetry/auto-instrumentations-node": "0.47.0",
Expand Down
15 changes: 15 additions & 0 deletions examples/ai-core/src/embed/togetherai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { togetherai } from '@ai-sdk/togetherai';
import { embed } from 'ai';
import 'dotenv/config';

async function main() {
const { embedding, usage } = await embed({
model: togetherai.textEmbeddingModel('BAAI/bge-base-en-v1.5'),
value: 'sunny day at the beach',
});

console.log(embedding);
console.log(usage);
}

main().catch(console.error);
30 changes: 30 additions & 0 deletions examples/ai-core/src/generate-object/togetherai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import { togetherai } from '@ai-sdk/togetherai';
import { generateObject } from 'ai';
import 'dotenv/config';
import { z } from 'zod';

async function main() {
const result = await generateObject({
model: togetherai.chatModel('mistralai/Mistral-7B-Instruct-v0.1'),
schema: z.object({
recipe: z.object({
name: z.string(),
ingredients: z.array(
z.object({
name: z.string(),
amount: z.string(),
}),
),
steps: z.array(z.string()),
}),
}),
prompt: 'Generate a lasagna recipe.',
});

console.log(JSON.stringify(result.object.recipe, null, 2));
console.log();
console.log('Token usage:', result.usage);
console.log('Finish reason:', result.finishReason);
}

main().catch(console.error);
60 changes: 60 additions & 0 deletions examples/ai-core/src/generate-text/togetherai-tool-call.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
import { togetherai } from '@ai-sdk/togetherai';
import { generateText, tool } from 'ai';
import 'dotenv/config';
import { z } from 'zod';
import { weatherTool } from '../tools/weather-tool';

async function main() {
const result = await generateText({
model: togetherai('meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo'),
maxTokens: 512,
tools: {
weather: weatherTool,
cityAttractions: tool({
parameters: z.object({ city: z.string() }),
}),
},
prompt:
'What is the weather in San Francisco and what attractions should I visit?',
});

// typed tool calls:
for (const toolCall of result.toolCalls) {
switch (toolCall.toolName) {
case 'cityAttractions': {
toolCall.args.city; // string
break;
}

case 'weather': {
toolCall.args.location; // string
break;
}
}
}

// typed tool results for tools with execute method:
for (const toolResult of result.toolResults) {
switch (toolResult.toolName) {
// NOT AVAILABLE (NO EXECUTE METHOD)
// case 'cityAttractions': {
// toolResult.args.city; // string
// toolResult.result;
// break;
// }

case 'weather': {
toolResult.args.location; // string
toolResult.result.location; // string
toolResult.result.temperature; // number
break;
}
}
}

console.log('Text:', result.text);
console.log('Tool Calls:', JSON.stringify(result.toolCalls, null, 2));
console.log('Tool Results:', JSON.stringify(result.toolResults, null, 2));
}

main().catch(console.error);
8 changes: 4 additions & 4 deletions examples/ai-core/src/generate-text/togetherai.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
import { createOpenAI } from '@ai-sdk/openai';
import { createTogetherAI } from '@ai-sdk/togetherai';
import { generateText } from 'ai';
import 'dotenv/config';

const togetherai = createOpenAI({
name: 'togetherai',
const togetherai = createTogetherAI({
apiKey: process.env.TOGETHER_AI_API_KEY!,
baseURL: 'https://api.together.xyz/v1/',
});

async function main() {
const { text, usage } = await generateText({
model: togetherai('google/gemma-2-9b-it'),
// model: togetherai('google/gemma-2-9b-it'),
model: togetherai('meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo'),
prompt: 'Invent a new holiday and describe its traditions.',
});

Expand Down
33 changes: 33 additions & 0 deletions examples/ai-core/src/stream-object/togetherai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import { togetherai } from '@ai-sdk/togetherai';
import { streamObject } from 'ai';
import 'dotenv/config';
import { z } from 'zod';

async function main() {
const result = streamObject({
model: togetherai.chatModel('mistralai/Mistral-7B-Instruct-v0.1'),
schema: z.object({
characters: z.array(
z.object({
name: z.string(),
class: z
.string()
.describe('Character class, e.g. warrior, mage, or thief.'),
description: z.string(),
}),
),
}),
prompt:
'Generate 3 character descriptions for a fantasy role playing game.',
});

for await (const partialObject of result.partialObjectStream) {
console.clear();
console.log(partialObject);
}

console.log();
console.log('Token usage:', await result.usage);
}

main().catch(console.error);
70 changes: 70 additions & 0 deletions examples/ai-core/src/stream-text/togetherai-tool-call.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import { togetherai } from '@ai-sdk/togetherai';
import { streamText, CoreMessage, ToolCallPart, ToolResultPart } from 'ai';
import 'dotenv/config';
import { weatherTool } from '../tools/weather-tool';

const messages: CoreMessage[] = [];

async function main() {
let toolResponseAvailable = false;

const result = streamText({
model: togetherai('meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo'),
maxTokens: 512,
tools: {
weather: weatherTool,
},
toolChoice: 'required',
prompt:
'What is the weather in San Francisco and what attractions should I visit?',
});

let fullResponse = '';
const toolCalls: ToolCallPart[] = [];
const toolResponses: ToolResultPart[] = [];

for await (const delta of result.fullStream) {
switch (delta.type) {
case 'text-delta': {
fullResponse += delta.textDelta;
process.stdout.write(delta.textDelta);
break;
}

case 'tool-call': {
toolCalls.push(delta);

process.stdout.write(
`\nTool call: '${delta.toolName}' ${JSON.stringify(delta.args)}`,
);
break;
}

case 'tool-result': {
toolResponses.push(delta);

process.stdout.write(
`\nTool response: '${delta.toolName}' ${JSON.stringify(
delta.result,
)}`,
);
break;
}
}
}
process.stdout.write('\n\n');

messages.push({
role: 'assistant',
content: [{ type: 'text', text: fullResponse }, ...toolCalls],
});

if (toolResponses.length > 0) {
messages.push({ role: 'tool', content: toolResponses });
}

toolResponseAvailable = toolCalls.length > 0;
console.log('Messages:', messages[0].content);
}

main().catch(console.error);
1 change: 1 addition & 0 deletions packages/openai-compatible/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# @ai-sdk/openai-compatible
7 changes: 7 additions & 0 deletions packages/openai-compatible/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# AI SDK - OpenAI Compatible Provider

This package aims to speed and support the implementation of new
OpenAI-compatible providers. The intent is to allow more effective code sharing
across multiple concrete provider implementations.

The primary OpenAI provider is heavier-weight than what this package offers.
70 changes: 70 additions & 0 deletions packages/openai-compatible/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
{
"name": "@ai-sdk/openai-compatible",
"version": "0.0.0",
"license": "Apache-2.0",
"sideEffects": false,
"main": "./dist/index.js",
"module": "./dist/index.mjs",
"types": "./dist/index.d.ts",
"files": [
"dist/**/*",
"internal/dist/**/*",
"CHANGELOG.md"
],
"scripts": {
"build": "tsup",
"build:watch": "tsup --watch",
"clean": "rm -rf dist && rm -rf internal/dist",
"lint": "eslint \"./**/*.ts*\"",
"type-check": "tsc --noEmit",
"prettier-check": "prettier --check \"./**/*.ts*\"",
"test": "pnpm test:node && pnpm test:edge",
"test:edge": "vitest --config vitest.edge.config.js --run",
"test:node": "vitest --config vitest.node.config.js --run"
},
"exports": {
"./package.json": "./package.json",
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.mjs",
"require": "./dist/index.js"
},
"./internal": {
"types": "./internal/dist/index.d.ts",
"import": "./internal/dist/index.mjs",
"module": "./internal/dist/index.mjs",
"require": "./internal/dist/index.js"
}
},
"dependencies": {
"@ai-sdk/provider": "1.0.0",
"@ai-sdk/provider-utils": "2.0.0"
},
"devDependencies": {
"@types/node": "^18",
"@vercel/ai-tsconfig": "workspace:*",
"tsup": "^8",
"typescript": "5.6.3",
"zod": "3.23.8"
},
"peerDependencies": {
"zod": "^3.0.0"
},
"engines": {
"node": ">=18"
},
"publishConfig": {
"access": "public"
},
"homepage": "https://sdk.vercel.ai/docs",
"repository": {
"type": "git",
"url": "git+https://github.com/vercel/ai.git"
},
"bugs": {
"url": "https://github.com/vercel/ai/issues"
},
"keywords": [
"ai"
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';

describe('user messages', () => {
it('should convert messages with only a text part to a string content', async () => {
const result = convertToOpenAICompatibleChatMessages([
{
role: 'user',
content: [{ type: 'text', text: 'Hello' }],
},
]);

expect(result).toEqual([{ role: 'user', content: 'Hello' }]);
});
});

describe('tool calls', () => {
it('should stringify arguments to tool calls', () => {
const result = convertToOpenAICompatibleChatMessages([
{
role: 'assistant',
content: [
{
type: 'tool-call',
args: { foo: 'bar123' },
toolCallId: 'quux',
toolName: 'thwomp',
},
],
},
{
role: 'tool',
content: [
{
type: 'tool-result',
toolCallId: 'quux',
toolName: 'thwomp',
result: { oof: '321rab' },
},
],
},
]);

expect(result).toEqual([
{
role: 'assistant',
content: '',
tool_calls: [
{
type: 'function',
id: 'quux',
function: {
name: 'thwomp',
arguments: JSON.stringify({ foo: 'bar123' }),
},
},
],
},
{
role: 'tool',
content: JSON.stringify({ oof: '321rab' }),
tool_call_id: 'quux',
},
]);
});
});
Loading
Loading