Skip to content

Commit

Permalink
Merge pull request #273 from openai/release-please--branches--master-…
Browse files Browse the repository at this point in the history
…-changes--next--components--openai

chore(next => master): release 4.4.0
  • Loading branch information
schnerd authored Sep 1, 2023
2 parents 3f85654 + fca6fd9 commit b722d3f
Show file tree
Hide file tree
Showing 19 changed files with 209 additions and 95 deletions.
21 changes: 0 additions & 21 deletions .github/workflows/open-release-prs.yml

This file was deleted.

2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "4.3.1"
".": "4.4.0"
}
16 changes: 16 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,21 @@
# Changelog

## 4.4.0 (2023-09-01)

Full Changelog: [v4.3.1...v4.4.0](https://github.com/openai/openai-node/compare/v4.3.1...v4.4.0)

### Features

* **package:** add Bun export map ([#269](https://github.com/openai/openai-node/issues/269)) ([16f239c](https://github.com/openai/openai-node/commit/16f239c6b4e8526371b01c511d2e0ebba4c5c8c6))
* re-export chat completion types at the top level ([#268](https://github.com/openai/openai-node/issues/268)) ([1a71a39](https://github.com/openai/openai-node/commit/1a71a39421828fdde7b8605094363a5047d2fdc9))
* **tests:** unskip multipart form data tests ([#275](https://github.com/openai/openai-node/issues/275)) ([47d3e18](https://github.com/openai/openai-node/commit/47d3e18a3ee987d04b958dad1a51821ad5472d54))
* **types:** fix ambiguous auto-import for chat completions params ([#266](https://github.com/openai/openai-node/issues/266)) ([19c99fb](https://github.com/openai/openai-node/commit/19c99fb268d6d6c7fc7aaa66475c35f45d12b4bd))


### Bug Fixes

* revert import change which triggered circular import bug in webpack ([#274](https://github.com/openai/openai-node/issues/274)) ([6534e36](https://github.com/openai/openai-node/commit/6534e3620d7e2983e98b42cf95fa966deab1ab1d))

## 4.3.1 (2023-08-29)

Full Changelog: [v4.3.0...v4.3.1](https://github.com/openai/openai-node/compare/v4.3.0...v4.3.1)
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ const openai = new OpenAI({
});

async function main() {
const params: OpenAI.Chat.CompletionCreateParams = {
const params: OpenAI.Chat.ChatCompletionCreateParams = {
messages: [{ role: 'user', content: 'Say this is a test' }],
model: 'gpt-3.5-turbo',
};
Expand Down
1 change: 1 addition & 0 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Types:
- <code><a href="./src/resources/chat/completions.ts">ChatCompletion</a></code>
- <code><a href="./src/resources/chat/completions.ts">ChatCompletionChunk</a></code>
- <code><a href="./src/resources/chat/completions.ts">ChatCompletionMessage</a></code>
- <code><a href="./src/resources/chat/completions.ts">ChatCompletionMessageParam</a></code>
- <code><a href="./src/resources/chat/completions.ts">CreateChatCompletionRequestMessage</a></code>

Methods:
Expand Down
100 changes: 93 additions & 7 deletions ecosystem-tests/bun/openai.test.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,26 @@
import OpenAI, { toFile } from 'openai';
import fs from 'fs';
import { distance } from 'fastest-levenshtein';
import { test, expect } from 'bun:test';

const url = 'https://audio-samples.github.io/samples/mp3/blizzard_biased/sample-1.mp3';
const filename = 'sample-1.mp3';

const correctAnswer =
'It was anxious to find him no one that expectation of a man who were giving his father enjoyment. But he was avoided in sight in the minister to which indeed,';
const model = 'whisper-1';

const client = new OpenAI();

async function typeTests() {
// @ts-expect-error this should error if the `Uploadable` type was resolved correctly
await client.audio.transcriptions.create({ file: { foo: true }, model: 'whisper-1' });
// @ts-expect-error this should error if the `Uploadable` type was resolved correctly
await client.audio.transcriptions.create({ file: null, model: 'whisper-1' });
// @ts-expect-error this should error if the `Uploadable` type was resolved correctly
await client.audio.transcriptions.create({ file: 'test', model: 'whisper-1' });
}

function expectSimilar(received: any, comparedTo: string, expectedDistance: number) {
const message = () =>
[
Expand Down Expand Up @@ -38,11 +55,80 @@ test(`streaming works`, async function () {
expectSimilar(chunks.map((c) => c.choices[0]?.delta.content || '').join(''), 'This is a test', 10);
});

test(`toFile rejects`, async function () {
try {
await toFile(new TextEncoder().encode('foo'), 'foo.txt');
throw new Error(`expected toFile to reject`);
} catch (error) {
expect((error as any).message).toEqual(`file uploads aren't supported in this environment yet`);
}
// @ts-ignore avoid DOM lib for testing purposes
if (typeof File !== 'undefined') {
test.todo('handles builtinFile', async function () {
const file = await fetch(url)
.then((x) => x.arrayBuffer())
// @ts-ignore avoid DOM lib for testing purposes
.then((x) => new File([x], filename));

const result = await client.audio.transcriptions.create({ file, model });
expectSimilar(result.text, correctAnswer, 12);
});
}

test.todo('handles Response', async function () {
const file = await fetch(url);

const result = await client.audio.transcriptions.create({ file, model });
expectSimilar(result.text, correctAnswer, 12);
});

test.todo('handles fs.ReadStream', async function () {
const result = await client.audio.transcriptions.create({
file: fs.createReadStream('sample1.mp3'),
model,
});
expectSimilar(result.text, correctAnswer, 12);
});

const fineTune = `{"prompt": "<prompt text>", "completion": "<ideal generated text>"}`;

// @ts-ignore avoid DOM lib for testing purposes
if (typeof Blob !== 'undefined') {
test.todo('toFile handles builtin Blob', async function () {
const result = await client.files.create({
file: await toFile(
// @ts-ignore avoid DOM lib for testing purposes
new Blob([new TextEncoder().encode(fineTune)]),
'finetune.jsonl',
),
purpose: 'fine-tune',
});
expect(result.status).toEqual('uploaded');
});
}
test.todo('toFile handles Uint8Array', async function () {
const result = await client.files.create({
file: await toFile(
// @ts-ignore avoid DOM lib for testing purposes
new TextEncoder().encode(fineTune),
'finetune.jsonl',
),
purpose: 'fine-tune',
});
expect(result.status).toEqual('uploaded');
});
test.todo('toFile handles ArrayBuffer', async function () {
const result = await client.files.create({
file: await toFile(
// @ts-ignore avoid DOM lib for testing purposes
new TextEncoder().encode(fineTune).buffer,
'finetune.jsonl',
),
purpose: 'fine-tune',
});
expect(result.status).toEqual('uploaded');
});
test.todo('toFile handles DataView', async function () {
const result = await client.files.create({
file: await toFile(
// @ts-ignore avoid DOM lib for testing purposes
new DataView(new TextEncoder().encode(fineTune).buffer),
'finetune.jsonl',
),
purpose: 'fine-tune',
});
expect(result.status).toEqual('uploaded');
});
6 changes: 2 additions & 4 deletions ecosystem-tests/bun/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,7 @@
},
"devDependencies": {
"fastest-levenshtein": "^1.0.16",
"bun-types": "latest"
},
"peerDependencies": {
"typescript": "^5.0.0"
"bun-types": "latest",
"typescript": "^5.1.0"
}
}
12 changes: 6 additions & 6 deletions ecosystem-tests/cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ async function main() {
console.error('\n');

try {
await withRetry(fn, project, state.retry)
await withRetry(fn, project, state.retry);
console.error(`✅ - Successfully ran ${project}`);
} catch (err) {
if (err && (err as any).shortMessage) {
Expand All @@ -294,13 +294,13 @@ async function main() {
async function withRetry(fn: () => Promise<void>, identifier: string, retryAmount: number): Promise<void> {
do {
try {
return await fn()
return await fn();
} catch (err) {
console.error(`${identifier} failed due to ${err}; retries left ${retryAmount}`)
retryAmount--;
if (retryAmount === 0) throw err;
console.error(`${identifier} failed due to ${err}; retries left ${retryAmount}`);
}

retryAmount--;
} while (retryAmount > 0)
} while (retryAmount > 0);
}

function centerPad(text: string, width = text.length, char = ' '): string {
Expand Down
22 changes: 12 additions & 10 deletions examples/chat-params-types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ const openai = new OpenAI();
async function main() {
// ---------------- Explicit non-streaming params ------------

const params: OpenAI.Chat.CompletionCreateParams = {
const params: OpenAI.Chat.ChatCompletionCreateParams = {
model: 'gpt-4',
messages: [{ role: 'user', content: 'Say this is a test!' }],
};
Expand All @@ -18,7 +18,7 @@ async function main() {

// ---------------- Explicit streaming params ----------------

const streamingParams: OpenAI.Chat.CompletionCreateParams = {
const streamingParams: OpenAI.Chat.ChatCompletionCreateParams = {
model: 'gpt-4',
messages: [{ role: 'user', content: 'Say this is a test!' }],
stream: true,
Expand All @@ -32,12 +32,12 @@ async function main() {

// ---------------- Explicit (non)streaming types ----------------

const params1: OpenAI.Chat.CompletionCreateParamsNonStreaming = {
const params1: OpenAI.Chat.ChatCompletionCreateParamsNonStreaming = {
model: 'gpt-4',
messages: [{ role: 'user', content: 'Say this is a test!' }],
};

const params2: OpenAI.Chat.CompletionCreateParamsStreaming = {
const params2: OpenAI.Chat.ChatCompletionCreateParamsStreaming = {
model: 'gpt-4',
messages: [{ role: 'user', content: 'Say this is a test!' }],
stream: true,
Expand All @@ -52,9 +52,9 @@ async function main() {
// `role: string` is not assignable.
const streamingParams2 = {
model: 'gpt-4',
messages: [{ role: 'user', content: 'Say this is a test!' }],
stream: true,
} as const;
messages: [{ role: 'user' as const, content: 'Say this is a test!' }],
stream: true as const,
};

// TS knows this is a Stream instance.
const stream2 = await openai.chat.completions.create(streamingParams2);
Expand Down Expand Up @@ -95,11 +95,13 @@ async function main() {
// not the response will be streamed.
export async function createCompletionParams(
stream: true,
): Promise<OpenAI.Chat.CompletionCreateParamsStreaming>;
): Promise<OpenAI.Chat.ChatCompletionCreateParamsStreaming>;
export async function createCompletionParams(
stream: false,
): Promise<OpenAI.Chat.CompletionCreateParamsNonStreaming>;
export async function createCompletionParams(stream: boolean): Promise<OpenAI.Chat.CompletionCreateParams> {
): Promise<OpenAI.Chat.ChatCompletionCreateParamsNonStreaming>;
export async function createCompletionParams(
stream: boolean,
): Promise<OpenAI.Chat.ChatCompletionCreateParams> {
const params = {
model: 'gpt-3.5-turbo',
messages: [{ role: 'user' as const, content: 'Hello!' }],
Expand Down
6 changes: 3 additions & 3 deletions examples/function-call-stream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@ import OpenAI from 'openai';
import {
ChatCompletionMessage,
ChatCompletionChunk,
CreateChatCompletionRequestMessage,
ChatCompletionMessageParam,
} from 'openai/resources/chat';

// gets API Key from environment variable OPENAI_API_KEY
const openai = new OpenAI();

const functions: OpenAI.Chat.CompletionCreateParams.Function[] = [
const functions: OpenAI.Chat.ChatCompletionCreateParams.Function[] = [
{
name: 'list',
description: 'list queries books by genre, and returns a list of names of books',
Expand Down Expand Up @@ -63,7 +63,7 @@ async function callFunction(function_call: ChatCompletionMessage.FunctionCall):
}

async function main() {
const messages: CreateChatCompletionRequestMessage[] = [
const messages: ChatCompletionMessageParam[] = [
{
role: 'system',
content:
Expand Down
6 changes: 3 additions & 3 deletions examples/function-call.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
#!/usr/bin/env -S npm run tsn -T

import OpenAI from 'openai';
import { ChatCompletionMessage, CreateChatCompletionRequestMessage } from 'openai/resources/chat';
import { ChatCompletionMessage, ChatCompletionMessageParam } from 'openai/resources/chat';

// gets API Key from environment variable OPENAI_API_KEY
const openai = new OpenAI();

const functions: OpenAI.Chat.CompletionCreateParams.Function[] = [
const functions: OpenAI.Chat.ChatCompletionCreateParams.Function[] = [
{
name: 'list',
description: 'list queries books by genre, and returns a list of names of books',
Expand Down Expand Up @@ -58,7 +58,7 @@ async function callFunction(function_call: ChatCompletionMessage.FunctionCall):
}

async function main() {
const messages: CreateChatCompletionRequestMessage[] = [
const messages: ChatCompletionMessageParam[] = [
{
role: 'system',
content:
Expand Down
7 changes: 6 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "openai",
"version": "4.3.1",
"version": "4.4.0",
"description": "Client library for the OpenAI API",
"author": "OpenAI <[email protected]>",
"types": "dist/index.d.ts",
Expand All @@ -16,6 +16,11 @@
"require": "./dist/_shims/*.js",
"default": "./dist/_shims/*.mjs"
},
"bun": {
"types": "./dist/_shims/*.d.ts",
"require": "./dist/_shims/*.js",
"default": "./dist/_shims/*.mjs"
},
"browser": {
"types": "./dist/_shims/*.d.ts",
"require": "./dist/_shims/*.js",
Expand Down
4 changes: 4 additions & 0 deletions src/resources/chat/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,12 @@ export namespace Chat {
export import ChatCompletion = API.ChatCompletion;
export import ChatCompletionChunk = API.ChatCompletionChunk;
export import ChatCompletionMessage = API.ChatCompletionMessage;
export import ChatCompletionMessageParam = API.ChatCompletionMessageParam;
export import CreateChatCompletionRequestMessage = API.CreateChatCompletionRequestMessage;
export import ChatCompletionCreateParams = API.ChatCompletionCreateParams;
export import CompletionCreateParams = API.CompletionCreateParams;
export import ChatCompletionCreateParamsNonStreaming = API.ChatCompletionCreateParamsNonStreaming;
export import CompletionCreateParamsNonStreaming = API.CompletionCreateParamsNonStreaming;
export import ChatCompletionCreateParamsStreaming = API.ChatCompletionCreateParamsStreaming;
export import CompletionCreateParamsStreaming = API.CompletionCreateParamsStreaming;
}
Loading

0 comments on commit b722d3f

Please sign in to comment.