Skip to content

Commit

Permalink
Merge pull request #1495 from Yidadaa/refactor-api
Browse files Browse the repository at this point in the history
refactor: #1000 #1179 api layer for client-side only mode and local models
  • Loading branch information
Yidadaa authored May 14, 2023
2 parents b5b8593 + e9335d9 commit 2170392
Show file tree
Hide file tree
Showing 17 changed files with 511 additions and 594 deletions.
77 changes: 3 additions & 74 deletions app/api/openai/[...path]/route.ts
Original file line number Diff line number Diff line change
@@ -1,49 +1,8 @@
import { createParser } from "eventsource-parser";
import { prettyObject } from "@/app/utils/format";
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
import { requestOpenai } from "../../common";

async function createStream(res: Response) {
const encoder = new TextEncoder();
const decoder = new TextDecoder();

const stream = new ReadableStream({
async start(controller) {
function onParse(event: any) {
if (event.type === "event") {
const data = event.data;
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
if (data === "[DONE]") {
controller.close();
return;
}
try {
const json = JSON.parse(data);
const text = json.choices[0].delta.content;
const queue = encoder.encode(text);
controller.enqueue(queue);
} catch (e) {
controller.error(e);
}
}
}

const parser = createParser(onParse);
for await (const chunk of res.body as any) {
parser.feed(decoder.decode(chunk, { stream: true }));
}
},
});
return stream;
}

function formatResponse(msg: any) {
const jsonMsg = ["```json\n", JSON.stringify(msg, null, " "), "\n```"].join(
"",
);
return new Response(jsonMsg);
}

async function handle(
req: NextRequest,
{ params }: { params: { path: string[] } },
Expand All @@ -58,40 +17,10 @@ async function handle(
}

try {
const api = await requestOpenai(req);

const contentType = api.headers.get("Content-Type") ?? "";

// streaming response
if (contentType.includes("stream")) {
const stream = await createStream(api);
const res = new Response(stream);
res.headers.set("Content-Type", contentType);
return res;
}

// try to parse error msg
try {
const mayBeErrorBody = await api.json();
if (mayBeErrorBody.error) {
console.error("[OpenAI Response] ", mayBeErrorBody);
return formatResponse(mayBeErrorBody);
} else {
const res = new Response(JSON.stringify(mayBeErrorBody));
res.headers.set("Content-Type", "application/json");
res.headers.set("Cache-Control", "no-cache");
return res;
}
} catch (e) {
console.error("[OpenAI Parse] ", e);
return formatResponse({
msg: "invalid response from openai server",
error: e,
});
}
return await requestOpenai(req);
} catch (e) {
console.error("[OpenAI] ", e);
return formatResponse(e);
return NextResponse.json(prettyObject(e));
}
}

Expand Down
9 changes: 0 additions & 9 deletions app/api/openai/typing.ts

This file was deleted.

83 changes: 83 additions & 0 deletions app/client/api.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
import { ACCESS_CODE_PREFIX } from "../constant";
import { ModelConfig, ModelType, useAccessStore } from "../store";
import { ChatGPTApi } from "./platforms/openai";

export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];

export const Models = ["gpt-3.5-turbo", "gpt-4"] as const;
export type ChatModel = ModelType;

export interface RequestMessage {
role: MessageRole;
content: string;
}

export interface LLMConfig {
model: string;
temperature?: number;
top_p?: number;
stream?: boolean;
presence_penalty?: number;
frequency_penalty?: number;
}

export interface ChatOptions {
messages: RequestMessage[];
config: LLMConfig;

onUpdate?: (message: string, chunk: string) => void;
onFinish: (message: string) => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
}

export interface LLMUsage {
used: number;
total: number;
}

export abstract class LLMApi {
abstract chat(options: ChatOptions): Promise<void>;
abstract usage(): Promise<LLMUsage>;
}

export class ClientApi {
public llm: LLMApi;

constructor() {
this.llm = new ChatGPTApi();
}

config() {}

prompts() {}

masks() {}
}

export const api = new ClientApi();

export function getHeaders() {
const accessStore = useAccessStore.getState();
let headers: Record<string, string> = {
"Content-Type": "application/json",
};

const makeBearer = (token: string) => `Bearer ${token.trim()}`;
const validString = (x: string) => x && x.length > 0;

// use user's api key first
if (validString(accessStore.token)) {
headers.Authorization = makeBearer(accessStore.token);
} else if (
accessStore.enabledAccessControl() &&
validString(accessStore.accessCode)
) {
headers.Authorization = makeBearer(
ACCESS_CODE_PREFIX + accessStore.accessCode,
);
}

return headers;
}
37 changes: 37 additions & 0 deletions app/client/controller.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
// To store message streaming controller
export const ChatControllerPool = {
controllers: {} as Record<string, AbortController>,

addController(
sessionIndex: number,
messageId: number,
controller: AbortController,
) {
const key = this.key(sessionIndex, messageId);
this.controllers[key] = controller;
return key;
},

stop(sessionIndex: number, messageId: number) {
const key = this.key(sessionIndex, messageId);
const controller = this.controllers[key];
controller?.abort();
},

stopAll() {
Object.values(this.controllers).forEach((v) => v.abort());
},

hasPending() {
return Object.values(this.controllers).length > 0;
},

remove(sessionIndex: number, messageId: number) {
const key = this.key(sessionIndex, messageId);
delete this.controllers[key];
},

key(sessionIndex: number, messageIndex: number) {
return `${sessionIndex},${messageIndex}`;
},
};
Loading

0 comments on commit 2170392

Please sign in to comment.