Skip to content

Commit

Permalink
fix: use new AI protocol format for friendly errors (#122)
Browse files Browse the repository at this point in the history
  • Loading branch information
codeincontext authored and simonrose121 committed Sep 16, 2024
1 parent d1622e3 commit f4f8c67
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 10 deletions.
22 changes: 13 additions & 9 deletions apps/nextjs/src/app/api/chat/errorHandling.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,12 @@ import { UserBannedError } from "@oakai/core/src/models/safetyViolations";
import { TracingSpan } from "@oakai/core/src/tracing/serverTracing";
import { RateLimitExceededError } from "@oakai/core/src/utils/rateLimiting/userBasedRateLimiter";
import { PrismaClientWithAccelerate } from "@oakai/db";
import invariant from "tiny-invariant";

import { consumeStream } from "@/utils/testHelpers/consumeStream";
import {
consumeStream,
extractStreamMessage,
} from "@/utils/testHelpers/consumeStream";

import { handleChatException } from "./errorHandling";

Expand Down Expand Up @@ -33,9 +37,9 @@ describe("handleChatException", () => {

expect(response.status).toBe(200);

const message = JSON.parse(
await consumeStream(response.body as ReadableStream),
);
invariant(response.body instanceof ReadableStream);
const message = extractStreamMessage(await consumeStream(response.body));

expect(message).toEqual({
type: "error",
value: "Threat detected",
Expand Down Expand Up @@ -83,9 +87,9 @@ describe("handleChatException", () => {

expect(response.status).toBe(200);

const message = JSON.parse(
await consumeStream(response.body as ReadableStream),
);
const consumed = await consumeStream(response.body as ReadableStream);
const message = extractStreamMessage(consumed);

expect(message).toEqual({
type: "error",
value: "Rate limit exceeded",
Expand All @@ -110,7 +114,7 @@ describe("handleChatException", () => {

expect(response.status).toBe(200);

const message = JSON.parse(
const message = extractStreamMessage(
await consumeStream(response.body as ReadableStream),
);
expect(message).toEqual({
Expand All @@ -119,4 +123,4 @@ describe("handleChatException", () => {
});
});
});
});
});
4 changes: 3 additions & 1 deletion apps/nextjs/src/app/api/chat/protocol.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ import {
import { StreamingTextResponse } from "ai";

export function streamingJSON(message: ErrorDocument | ActionDocument) {
const errorMessage = JSON.stringify(message);
const jsonContent = JSON.stringify(message);
const errorMessage = `0:"${jsonContent.replace(/"/g, '\\"')}"`;

const errorEncoder = new TextEncoder();

return new StreamingTextResponse(
Expand Down
9 changes: 9 additions & 0 deletions apps/nextjs/src/utils/testHelpers/consumeStream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,12 @@ export async function consumeStream(stream: ReadableStream): Promise<string> {

return result;
}

export function extractStreamMessage(streamedText: string) {
const content = streamedText.match(/0:"(.*)"/);
if (!content?.[1]) {
throw new Error("No message found in streamed text");
}
const strippedContent = content[1].replace(/\\"/g, '"');
return JSON.parse(strippedContent);
}

0 comments on commit f4f8c67

Please sign in to comment.