Skip to content

Commit

Permalink
Merge pull request #1189 from openai/release-please--branches--master…
Browse files Browse the repository at this point in the history
…--changes--next--components--openai

release: 4.73.0
  • Loading branch information
lukas-openai authored Nov 20, 2024
2 parents a92cc1d + 1e9391b commit 6c25833
Show file tree
Hide file tree
Showing 22 changed files with 104 additions and 436 deletions.
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "4.72.0"
".": "4.73.0"
}
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
configured_endpoints: 68
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-2f8ca92b9b1879fd535b685e4767338413fcd533d42f3baac13a9c41da3fce35.yml
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-aa9b01fc0c17eb0cbc200533fc20d6a49c5e764ceaf8049e08b294532be6e9ff.yml
29 changes: 29 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,34 @@
# Changelog

## 4.73.0 (2024-11-20)

Full Changelog: [v4.72.0...v4.73.0](https://github.com/openai/openai-node/compare/v4.72.0...v4.73.0)

### Features

* **api:** add gpt-4o-2024-11-20 model ([#1201](https://github.com/openai/openai-node/issues/1201)) ([0feeafd](https://github.com/openai/openai-node/commit/0feeafd21ba4b6281cc3b9dafa2919b1e2e4d1c3))
* bump model in all example snippets to gpt-4o ([6961c37](https://github.com/openai/openai-node/commit/6961c37f2e581bcc12ec2bbe77df2b9b260fe297))


### Bug Fixes

* **docs:** add missing await to pagination example ([#1190](https://github.com/openai/openai-node/issues/1190)) ([524b9e8](https://github.com/openai/openai-node/commit/524b9e82ae13a3b5093dcfbfd1169a798cf99ab4))


### Chores

* **client:** drop unused devDependency ([#1191](https://github.com/openai/openai-node/issues/1191)) ([8ee6c03](https://github.com/openai/openai-node/commit/8ee6c0335673f2ecf84ea11bdfc990adab607e20))
* **internal:** spec update ([#1195](https://github.com/openai/openai-node/issues/1195)) ([12f9334](https://github.com/openai/openai-node/commit/12f93346857196b93f94865cc3744d769e5e519c))
* **internal:** use reexports not destructuring ([#1181](https://github.com/openai/openai-node/issues/1181)) ([f555dd6](https://github.com/openai/openai-node/commit/f555dd6503bc4ccd4d13f4e1a1d36fbbfd51c369))


### Documentation

* bump models in example snippets to gpt-4o ([#1184](https://github.com/openai/openai-node/issues/1184)) ([4ec4027](https://github.com/openai/openai-node/commit/4ec402790cf3cfbccbf3ef9b61d577b0118977e8))
* change readme title ([#1198](https://github.com/openai/openai-node/issues/1198)) ([e34981c](https://github.com/openai/openai-node/commit/e34981c00f2f0360baffe870bcc38786030671bf))
* improve jsr documentation ([#1197](https://github.com/openai/openai-node/issues/1197)) ([ebdb4f7](https://github.com/openai/openai-node/commit/ebdb4f72cc01afbee649aca009fdaf413e61c507))
* **readme:** fix incorrect fileBatches.uploadAndPoll params ([#1200](https://github.com/openai/openai-node/issues/1200)) ([3968ef1](https://github.com/openai/openai-node/commit/3968ef1c4fa860ff246e0e803808752b261c18ce))

## 4.72.0 (2024-11-12)

Full Changelog: [v4.71.1...v4.72.0](https://github.com/openai/openai-node/compare/v4.71.1...v4.72.0)
Expand Down
45 changes: 25 additions & 20 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# OpenAI Node API Library
# OpenAI TypeScript and JavaScript API Library

[![NPM version](https://img.shields.io/npm/v/openai.svg)](https://npmjs.org/package/openai) ![npm bundle size](https://img.shields.io/bundlephobia/minzip/openai) [![JSR Version](https://jsr.io/badges/@openai/openai)](https://jsr.io/@openai/openai)

Expand All @@ -14,16 +14,21 @@ To learn how to use the OpenAI API, check out our [API Reference](https://platfo
npm install openai
```

You can also import from jsr:
### Installation from JSR

<!-- x-release-please-start-version -->
```sh
deno add jsr:@openai/openai
npx jsr add @openai/openai
```

These commands will make the module importable from the `@openai/openai` scope:

You can also [import directly from JSR](https://jsr.io/docs/using-packages#importing-with-jsr-specifiers) without an install step if you're using the Deno JavaScript runtime:

```ts
import OpenAI from 'jsr:@openai/openai';
```

<!-- x-release-please-end -->

## Usage

The full API of this library can be found in [api.md file](api.md) along with many [code examples](https://github.com/openai/openai-node/tree/master/examples). The code below shows how to get started using the chat completions API.
Expand All @@ -39,7 +44,7 @@ const client = new OpenAI({
async function main() {
const chatCompletion = await client.chat.completions.create({
messages: [{ role: 'user', content: 'Say this is a test' }],
model: 'gpt-3.5-turbo',
model: 'gpt-4o',
});
}

Expand All @@ -57,7 +62,7 @@ const client = new OpenAI();

async function main() {
const stream = await client.chat.completions.create({
model: 'gpt-4',
model: 'gpt-4o',
messages: [{ role: 'user', content: 'Say this is a test' }],
stream: true,
});
Expand Down Expand Up @@ -87,7 +92,7 @@ const client = new OpenAI({
async function main() {
const params: OpenAI.Chat.ChatCompletionCreateParams = {
messages: [{ role: 'user', content: 'Say this is a test' }],
model: 'gpt-3.5-turbo',
model: 'gpt-4o',
};
const chatCompletion: OpenAI.Chat.ChatCompletion = await client.chat.completions.create(params);
}
Expand Down Expand Up @@ -128,7 +133,7 @@ const fileList = [
...
];

const batch = await openai.vectorStores.fileBatches.uploadAndPoll(vectorStore.id, fileList);
const batch = await openai.vectorStores.fileBatches.uploadAndPoll(vectorStore.id, {files: fileList});
```

### Streaming Helpers
Expand Down Expand Up @@ -173,7 +178,7 @@ const openai = new OpenAI();

async function main() {
const stream = await openai.beta.chat.completions.stream({
model: 'gpt-4',
model: 'gpt-4o',
messages: [{ role: 'user', content: 'Say this is a test' }],
stream: true,
});
Expand Down Expand Up @@ -226,7 +231,7 @@ const client = new OpenAI();
async function main() {
const runner = client.beta.chat.completions
.runTools({
model: 'gpt-3.5-turbo',
model: 'gpt-4o',
messages: [{ role: 'user', content: 'How is the weather this week?' }],
tools: [
{
Expand Down Expand Up @@ -333,7 +338,7 @@ a subclass of `APIError` will be thrown:
```ts
async function main() {
const job = await client.fineTuning.jobs
.create({ model: 'gpt-3.5-turbo', training_file: 'file-abc123' })
.create({ model: 'gpt-4o', training_file: 'file-abc123' })
.catch(async (err) => {
if (err instanceof OpenAI.APIError) {
console.log(err.status); // 400
Expand Down Expand Up @@ -368,7 +373,7 @@ Error codes are as followed:
All object responses in the SDK provide a `_request_id` property which is added from the `x-request-id` response header so that you can quickly log failing requests and report them back to OpenAI.

```ts
const completion = await client.chat.completions.create({ messages: [{ role: 'user', content: 'Say this is a test' }], model: 'gpt-4' });
const completion = await client.chat.completions.create({ messages: [{ role: 'user', content: 'Say this is a test' }], model: 'gpt-4o' });
console.log(completion._request_id) // req_123
```

Expand All @@ -392,7 +397,7 @@ const azureADTokenProvider = getBearerTokenProvider(credential, scope);
const openai = new AzureOpenAI({ azureADTokenProvider });

const result = await openai.chat.completions.create({
model: 'gpt-4-1106-preview',
model: 'gpt-4o',
messages: [{ role: 'user', content: 'Say hello!' }],
});

Expand All @@ -415,7 +420,7 @@ const client = new OpenAI({
});

// Or, configure per-request:
await client.chat.completions.create({ messages: [{ role: 'user', content: 'How can I get the name of the current day in Node.js?' }], model: 'gpt-3.5-turbo' }, {
await client.chat.completions.create({ messages: [{ role: 'user', content: 'How can I get the name of the current day in JavaScript?' }], model: 'gpt-4o' }, {
maxRetries: 5,
});
```
Expand All @@ -432,7 +437,7 @@ const client = new OpenAI({
});

// Override per-request:
await client.chat.completions.create({ messages: [{ role: 'user', content: 'How can I list all files in a directory using Python?' }], model: 'gpt-3.5-turbo' }, {
await client.chat.completions.create({ messages: [{ role: 'user', content: 'How can I list all files in a directory using Python?' }], model: 'gpt-4o' }, {
timeout: 5 * 1000,
});
```
Expand Down Expand Up @@ -467,7 +472,7 @@ for (const fineTuningJob of page.data) {

// Convenience methods are provided for manually paginating:
while (page.hasNextPage()) {
page = page.getNextPage();
page = await page.getNextPage();
// ...
}
```
Expand All @@ -485,13 +490,13 @@ You can also use the `.withResponse()` method to get the raw `Response` along wi
const client = new OpenAI();

const response = await client.chat.completions
.create({ messages: [{ role: 'user', content: 'Say this is a test' }], model: 'gpt-3.5-turbo' })
.create({ messages: [{ role: 'user', content: 'Say this is a test' }], model: 'gpt-4o' })
.asResponse();
console.log(response.headers.get('X-My-Header'));
console.log(response.statusText); // access the underlying Response object

const { data: chatCompletion, response: raw } = await client.chat.completions
.create({ messages: [{ role: 'user', content: 'Say this is a test' }], model: 'gpt-3.5-turbo' })
.create({ messages: [{ role: 'user', content: 'Say this is a test' }], model: 'gpt-4o' })
.withResponse();
console.log(raw.headers.get('X-My-Header'));
console.log(chatCompletion);
Expand Down Expand Up @@ -622,7 +627,7 @@ TypeScript >= 4.5 is supported.
The following runtimes are supported:

- Node.js 18 LTS or later ([non-EOL](https://endoflife.date/nodejs)) versions.
- Deno v1.28.0 or higher, using `import OpenAI from "npm:openai"`.
- Deno v1.28.0 or higher.
- Bun 1.0 or later.
- Cloudflare Workers.
- Vercel Edge Runtime.
Expand Down
2 changes: 1 addition & 1 deletion jsr.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@openai/openai",
"version": "4.72.0",
"version": "4.73.0",
"exports": "./index.ts",
"publish": {
"exclude": [
Expand Down
3 changes: 1 addition & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "openai",
"version": "4.72.0",
"version": "4.73.0",
"description": "The official TypeScript library for the OpenAI API",
"author": "OpenAI <[email protected]>",
"types": "dist/index.d.ts",
Expand Down Expand Up @@ -47,7 +47,6 @@
"prettier": "^3.0.0",
"prettier-2": "npm:prettier@^2",
"ts-jest": "^29.1.0",
"ts-morph": "^19.0.0",
"ts-node": "^10.5.0",
"tsc-multi": "^1.1.0",
"tsconfig-paths": "^4.0.0",
Expand Down
2 changes: 1 addition & 1 deletion scripts/build
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ npm exec tsc-multi
# copy over handwritten .js/.mjs/.d.ts files
cp src/_shims/*.{d.ts,js,mjs,md} dist/_shims
cp src/_shims/auto/*.{d.ts,js,mjs} dist/_shims/auto
# we need to add exports = module.exports = OpenAI Node to index.js;
# we need to add exports = module.exports = OpenAI to index.js;
# No way to get that from index.ts because it would cause compile errors
# when building .mjs
node scripts/utils/fix-index-exports.cjs
Expand Down
37 changes: 17 additions & 20 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -306,25 +306,6 @@ export class OpenAI extends Core.APIClient {
static fileFromPath = Uploads.fileFromPath;
}

export {
OpenAIError,
APIError,
APIConnectionError,
APIConnectionTimeoutError,
APIUserAbortError,
NotFoundError,
ConflictError,
RateLimitError,
BadRequestError,
AuthenticationError,
InternalServerError,
PermissionDeniedError,
UnprocessableEntityError,
} from './error';

export import toFile = Uploads.toFile;
export import fileFromPath = Uploads.fileFromPath;

OpenAI.Completions = Completions;
OpenAI.Chat = Chat;
OpenAI.Embeddings = Embeddings;
Expand All @@ -340,7 +321,6 @@ OpenAI.Beta = Beta;
OpenAI.Batches = Batches;
OpenAI.BatchesPage = BatchesPage;
OpenAI.Uploads = UploadsAPIUploads;

export declare namespace OpenAI {
export type RequestOptions = Core.RequestOptions;

Expand Down Expand Up @@ -664,4 +644,21 @@ const API_KEY_SENTINEL = '<Missing Key>';

// ---------------------- End Azure ----------------------

export { toFile, fileFromPath } from './uploads';
export {
OpenAIError,
APIError,
APIConnectionError,
APIConnectionTimeoutError,
APIUserAbortError,
NotFoundError,
ConflictError,
RateLimitError,
BadRequestError,
AuthenticationError,
InternalServerError,
PermissionDeniedError,
UnprocessableEntityError,
} from './error';

export default OpenAI;
2 changes: 1 addition & 1 deletion src/resources/batches.ts
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ export interface BatchCreateParams {
* Your input file must be formatted as a
* [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input),
* and must be uploaded with the purpose `batch`. The file can contain up to 50,000
* requests, and can be up to 100 MB in size.
* requests, and can be up to 200 MB in size.
*/
input_file_id: string;

Expand Down
1 change: 1 addition & 0 deletions src/resources/chat/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ export type ChatModel =
| 'o1-mini'
| 'o1-mini-2024-09-12'
| 'gpt-4o'
| 'gpt-4o-2024-11-20'
| 'gpt-4o-2024-08-06'
| 'gpt-4o-2024-05-13'
| 'gpt-4o-realtime-preview'
Expand Down
5 changes: 3 additions & 2 deletions src/resources/chat/completions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -250,8 +250,9 @@ export interface ChatCompletionAudioParam {
format: 'wav' | 'mp3' | 'flac' | 'opus' | 'pcm16';

/**
* The voice the model uses to respond. Supported voices are `alloy`, `ash`,
* `ballad`, `coral`, `echo`, `sage`, `shimmer`, and `verse`.
* The voice the model uses to respond. Supported voices are `ash`, `ballad`,
* `coral`, `sage`, and `verse` (also supported but not recommended are `alloy`,
* `echo`, and `shimmer`; these voices are less expressive).
*/
voice: 'alloy' | 'ash' | 'ballad' | 'coral' | 'echo' | 'sage' | 'shimmer' | 'verse';
}
Expand Down
2 changes: 1 addition & 1 deletion src/resources/files.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ export class Files extends APIResource {
* [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input)
* models.
*
* The Batch API only supports `.jsonl` files up to 100 MB in size. The input also
* The Batch API only supports `.jsonl` files up to 200 MB in size. The input also
* has a specific required
* [format](https://platform.openai.com/docs/api-reference/batch/request-input).
*
Expand Down
2 changes: 1 addition & 1 deletion src/version.ts
Original file line number Diff line number Diff line change
@@ -1 +1 @@
export const VERSION = '4.72.0'; // x-release-please-version
export const VERSION = '4.73.0'; // x-release-please-version
2 changes: 1 addition & 1 deletion tests/api-resources/audio/transcriptions.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ describe('resource transcriptions', () => {
prompt: 'prompt',
response_format: 'json',
temperature: 0,
timestamp_granularities: ['word', 'segment'],
timestamp_granularities: ['word'],
});
});
});
8 changes: 3 additions & 5 deletions tests/api-resources/beta/assistants.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,13 @@ describe('resource assistants', () => {
response_format: 'auto',
temperature: 1,
tool_resources: {
code_interpreter: { file_ids: ['string', 'string', 'string'] },
code_interpreter: { file_ids: ['string'] },
file_search: {
vector_store_ids: ['string'],
vector_stores: [
{ chunking_strategy: { type: 'auto' }, file_ids: ['string', 'string', 'string'], metadata: {} },
],
vector_stores: [{ chunking_strategy: { type: 'auto' }, file_ids: ['string'], metadata: {} }],
},
},
tools: [{ type: 'code_interpreter' }, { type: 'code_interpreter' }, { type: 'code_interpreter' }],
tools: [{ type: 'code_interpreter' }],
top_p: 1,
});
});
Expand Down
15 changes: 1 addition & 14 deletions tests/api-resources/beta/threads/messages.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,20 +27,7 @@ describe('resource messages', () => {
const response = await client.beta.threads.messages.create('thread_id', {
content: 'string',
role: 'user',
attachments: [
{
file_id: 'file_id',
tools: [{ type: 'code_interpreter' }, { type: 'code_interpreter' }, { type: 'code_interpreter' }],
},
{
file_id: 'file_id',
tools: [{ type: 'code_interpreter' }, { type: 'code_interpreter' }, { type: 'code_interpreter' }],
},
{
file_id: 'file_id',
tools: [{ type: 'code_interpreter' }, { type: 'code_interpreter' }, { type: 'code_interpreter' }],
},
],
attachments: [{ file_id: 'file_id', tools: [{ type: 'code_interpreter' }] }],
metadata: {},
});
});
Expand Down
Loading

0 comments on commit 6c25833

Please sign in to comment.