Skip to content

Commit

Permalink
Task: Add E2E tests and change docs
Browse files Browse the repository at this point in the history
  • Loading branch information
zya committed Oct 11, 2023
1 parent ae80f83 commit 645afc6
Show file tree
Hide file tree
Showing 10 changed files with 213 additions and 9 deletions.
3 changes: 3 additions & 0 deletions .example.env
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
OPENAI_API_KEY=<Your OpenAI API key>
COHERE_API_KEY=<Your Cohere API key>
ANTHROPIC_API_KEY=<Your Anthropic API key>
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,6 @@ lib/*.js
test/*.js
*.map

dist
dist

.env
4 changes: 3 additions & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,5 +42,7 @@
},
"[typescriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
}
},
"jest.jestCommandLine": "npm run test -- ",
"jest.autoRun": "off"
}
41 changes: 39 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
🚅 LiteLLM.js
</h1>
<p align="center">
<p align="center">JavaScript implementation of <a href="https://github.com/BerriAI/litellm">LiteLLM</a>. Work in progress 🚧 🚧 🚧 </p>
<p align="center">JavaScript implementation of <a href="https://github.com/BerriAI/litellm">LiteLLM</a>. </p>
</p>

# Usage
Expand Down Expand Up @@ -31,6 +31,7 @@ for await (const part of stream) {
process.stdout.write(part.choices[0]?.delta?.content || "");
}
```

# Features
We aim to support all features that [LiteLLM python package](https://github.com/BerriAI/litellm) supports.

Expand Down Expand Up @@ -60,4 +61,40 @@ We aim to support all features that [LiteLLM python package](https://github.com/
| [nlp_cloud](https://docs.litellm.ai/docs/providers/nlp_cloud) |||
| [aleph alpha](https://docs.litellm.ai/docs/providers/aleph_alpha) |||
| [petals](https://docs.litellm.ai/docs/providers/petals) |||
| [deepinfra](https://docs.litellm.ai/docs/providers/deepinfra) |||
| [deepinfra](https://docs.litellm.ai/docs/providers/deepinfra) |||

# Development

## Clone the repo
```
git clone https://github.com/zya/litellmjs.git
```

## Install dependencies
```
npm install
```

## Run unit tests
```
npm t
```

## Run E2E tests
First copy the example env file.

```
cp .example.env .env
```

Then fill the variables with your API keys to be able to run the E2E tests.

```
OPENAI_API_KEY=<Your OpenAI API key>
....
```

Then run the command below to run the tests
```
npm run test:e2e
```
2 changes: 2 additions & 0 deletions jest.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,6 @@ module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
modulePathIgnorePatterns: ['./dist'],
runner: 'groups',
setupFiles: ['dotenv/config'],
};
39 changes: 38 additions & 1 deletion package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 7 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
"description": "JS Implementation of LiteLLM",
"main": "dist/src/index.js",
"scripts": {
"test": "jest",
"test": "jest --group=-e2e",
"test:e2e": "jest --group=e2e",
"build": "tsc",
"prebuild": "npm run test",
"prepublish": "npm run build",
Expand All @@ -18,16 +19,19 @@
"@typescript-eslint/eslint-plugin": "^6.7.4",
"chai": "^4.3.10",
"cohere-ai": "^6.2.2",
"dotenv": "^16.3.1",
"eslint": "^8.50.0",
"eslint-config-prettier": "^9.0.0",
"eslint-config-standard-with-typescript": "^39.1.0",
"eslint-plugin-import": "^2.28.1",
"eslint-plugin-prettier": "^5.0.0",
"eslint-plugin-promise": "^6.1.1",
"jest": "^29.7.0",
"jest-runner-groups": "^2.2.0",
"prettier": "3.0.3",
"ts-jest": "^29.1.1",
"typescript": "^5.2.2"
"typescript": "^5.2.2",
"zod": "^3.22.4"
},
"dependencies": {
"js-tiktoken": "^1.0.7",
Expand All @@ -40,4 +44,4 @@
"type": "git",
"url": "https://github.com/zya/litellmjs.git"
}
}
}
17 changes: 16 additions & 1 deletion src/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,20 @@
import { getHandler } from './handlers/getHandler';
import { HandlerParams, Result } from './types';
import {
HandlerParams,
HandlerParamsNotStreaming,
HandlerParamsStreaming,
Result,
ResultNotStreaming,
ResultStreaming,
} from './types';

export async function completion(
params: HandlerParamsNotStreaming,
): Promise<ResultNotStreaming>;

export async function completion(
params: HandlerParamsStreaming,
): Promise<ResultStreaming>;

export async function completion(params: HandlerParams): Promise<Result> {
const handler = getHandler(params.model);
Expand Down
51 changes: 51 additions & 0 deletions tests/e2e.test copy.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import { completion } from '../src';
import { ResultStreaming } from '../src/types';

const PROMPT = 'How are you today?';
const MODELS = [
{
model: 'gpt-3.5-turbo',
},
{
model: 'ollama/llama2',
},
{
model: 'command-nightly',
},
];
/**
* Admin dashboard tests
*
* @group e2e
*/
describe('e2e', () => {
it.each(MODELS)(
'gets response from supported model $model',
async ({ model }) => {
jest.setTimeout(10000);
const result = await completion({
model,
messages: [{ role: 'user', content: PROMPT }],
stream: false,
});
expect(result).toBeTruthy();
expect(result);
},
);

it.each(MODELS)(
'gets streaming response from supported model $model',
async ({ model }) => {
jest.setTimeout(10000);
const result: ResultStreaming = await completion({
model,
messages: [{ role: 'user', content: PROMPT }],
stream: true,
});

for await (const chunk of result) {
expect(chunk.choices[0].delta.content).not.toBeNull();
}
},
);
});
51 changes: 51 additions & 0 deletions tests/e2e.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import { completion } from '../src';
import { ResultStreaming } from '../src/types';

const PROMPT = 'How are you today?';
const MODELS = [
{
model: 'gpt-3.5-turbo',
},
{
model: 'ollama/llama2',
},
{
model: 'command-nightly',
},
];
/**
* Admin dashboard tests
*
* @group e2e
*/
describe('e2e', () => {
it.each(MODELS)(
'gets response from supported model $model',
async ({ model }) => {
jest.setTimeout(10000);
const result = await completion({
model,
messages: [{ role: 'user', content: PROMPT }],
stream: false,
});
expect(result).toBeTruthy();
expect(result);
},
);

it.each(MODELS)(
'gets streaming response from supported model $model',
async ({ model }) => {
jest.setTimeout(10000);
const result: ResultStreaming = await completion({
model,
messages: [{ role: 'user', content: PROMPT }],
stream: true,
});

for await (const chunk of result) {
expect(chunk.choices[0].delta.content).not.toBeNull();
}
},
);
});

0 comments on commit 645afc6

Please sign in to comment.