Skip to content

Commit

Permalink
refactor: ChatGPTNextWeb#1000 ChatGPTNextWeb#1179 api layer for clien…
Browse files Browse the repository at this point in the history
…t-side only mode and local models
  • Loading branch information
Yidadaa committed May 14, 2023
1 parent 58b2b63 commit edd2b59
Show file tree
Hide file tree
Showing 15 changed files with 248 additions and 594 deletions.
77 changes: 3 additions & 74 deletions app/api/openai/[...path]/route.ts
Original file line number Diff line number Diff line change
@@ -1,49 +1,8 @@
import { createParser } from "eventsource-parser";
import { prettyObject } from "@/app/utils/format";
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
import { requestOpenai } from "../../common";

async function createStream(res: Response) {
const encoder = new TextEncoder();
const decoder = new TextDecoder();

const stream = new ReadableStream({
async start(controller) {
function onParse(event: any) {
if (event.type === "event") {
const data = event.data;
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
if (data === "[DONE]") {
controller.close();
return;
}
try {
const json = JSON.parse(data);
const text = json.choices[0].delta.content;
const queue = encoder.encode(text);
controller.enqueue(queue);
} catch (e) {
controller.error(e);
}
}
}

const parser = createParser(onParse);
for await (const chunk of res.body as any) {
parser.feed(decoder.decode(chunk, { stream: true }));
}
},
});
return stream;
}

function formatResponse(msg: any) {
const jsonMsg = ["```json\n", JSON.stringify(msg, null, " "), "\n```"].join(
"",
);
return new Response(jsonMsg);
}

async function handle(
req: NextRequest,
{ params }: { params: { path: string[] } },
Expand All @@ -58,40 +17,10 @@ async function handle(
}

try {
const api = await requestOpenai(req);

const contentType = api.headers.get("Content-Type") ?? "";

// streaming response
if (contentType.includes("stream")) {
const stream = await createStream(api);
const res = new Response(stream);
res.headers.set("Content-Type", contentType);
return res;
}

// try to parse error msg
try {
const mayBeErrorBody = await api.json();
if (mayBeErrorBody.error) {
console.error("[OpenAI Response] ", mayBeErrorBody);
return formatResponse(mayBeErrorBody);
} else {
const res = new Response(JSON.stringify(mayBeErrorBody));
res.headers.set("Content-Type", "application/json");
res.headers.set("Cache-Control", "no-cache");
return res;
}
} catch (e) {
console.error("[OpenAI Parse] ", e);
return formatResponse({
msg: "invalid response from openai server",
error: e,
});
}
return await requestOpenai(req);
} catch (e) {
console.error("[OpenAI] ", e);
return formatResponse(e);
return NextResponse.json(prettyObject(e));
}
}

Expand Down
9 changes: 0 additions & 9 deletions app/api/openai/typing.ts

This file was deleted.

50 changes: 12 additions & 38 deletions app/client/api.ts
Original file line number Diff line number Diff line change
@@ -1,39 +1,35 @@
import { fetchEventSource } from "@microsoft/fetch-event-source";
import { ACCESS_CODE_PREFIX } from "../constant";
import { ModelType, useAccessStore } from "../store";
import { ModelConfig, ModelType, useAccessStore } from "../store";
import { ChatGPTApi } from "./platforms/openai";

export enum MessageRole {
System = "system",
User = "user",
Assistant = "assistant",
}
export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];

export const Models = ["gpt-3.5-turbo", "gpt-4"] as const;
export type ChatModel = ModelType;

export interface Message {
export interface RequestMessage {
role: MessageRole;
content: string;
}

export interface LLMConfig {
model: string;
temperature?: number;
topP?: number;
top_p?: number;
stream?: boolean;
presencePenalty?: number;
frequencyPenalty?: number;
presence_penalty?: number;
frequency_penalty?: number;
}

export interface ChatOptions {
messages: Message[];
model: ChatModel;
messages: RequestMessage[];
config: LLMConfig;

onUpdate: (message: string, chunk: string) => void;
onUpdate?: (message: string, chunk: string) => void;
onFinish: (message: string) => void;
onError: (err: Error) => void;
onUnAuth: () => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
}

export interface LLMUsage {
Expand All @@ -53,28 +49,6 @@ export class ClientApi {
this.llm = new ChatGPTApi();
}

headers() {
const accessStore = useAccessStore.getState();
let headers: Record<string, string> = {};

const makeBearer = (token: string) => `Bearer ${token.trim()}`;
const validString = (x: string) => x && x.length > 0;

// use user's api key first
if (validString(accessStore.token)) {
headers.Authorization = makeBearer(accessStore.token);
} else if (
accessStore.enabledAccessControl() &&
validString(accessStore.accessCode)
) {
headers.Authorization = makeBearer(
ACCESS_CODE_PREFIX + accessStore.accessCode,
);
}

return headers;
}

config() {}

prompts() {}
Expand Down
158 changes: 111 additions & 47 deletions app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import { REQUEST_TIMEOUT_MS } from "@/app/constant";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import {
EventStreamContentType,
fetchEventSource,
} from "@microsoft/fetch-event-source";
import { ChatOptions, LLMApi, LLMUsage } from "../api";

import { ChatOptions, getHeaders, LLMApi, LLMUsage } from "../api";
import Locale from "../../locales";

export class ChatGPTApi implements LLMApi {
public ChatPath = "v1/chat/completions";
public UsagePath = "dashboard/billing/usage";
public SubsPath = "dashboard/billing/subscription";

path(path: string): string {
const openaiUrl = useAccessStore.getState().openaiUrl;
Expand All @@ -29,7 +29,7 @@ export class ChatGPTApi implements LLMApi {
...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig,
...{
model: options.model,
model: options.config.model,
},
};

Expand All @@ -45,80 +45,144 @@ export class ChatGPTApi implements LLMApi {

const shouldStream = !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);

try {
const chatPath = this.path(this.ChatPath);
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: getHeaders(),
};

// make a fetch request
const reqestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);

if (shouldStream) {
let responseText = "";

fetchEventSource(chatPath, {
...chatPayload,
async onopen(res) {
if (
res.ok &&
res.headers.get("Content-Type") === EventStreamContentType
) {
return;
}
const finish = () => {
options.onFinish(responseText);
};

if (res.status === 401) {
// TODO: Unauthorized 401
responseText += "\n\n";
} else if (res.status !== 200) {
console.error("[Request] response", res);
throw new Error("[Request] server error");
}
},
onmessage: (ev) => {
if (ev.data === "[DONE]") {
return options.onFinish(responseText);
const res = await fetch(chatPath, chatPayload);
clearTimeout(reqestTimeoutId);

if (res.status === 401) {
responseText += "\n\n" + Locale.Error.Unauthorized;
return finish();
}

if (
!res.ok ||
!res.headers.get("Content-Type")?.includes("stream") ||
!res.body
) {
return options.onError?.(new Error());
}

const reader = res.body.getReader();
const decoder = new TextDecoder("utf-8");

while (true) {
const { done, value } = await reader.read();
if (done) {
return finish();
}

const chunk = decoder.decode(value);
const lines = chunk.split("data: ");

for (const line of lines) {
const text = line.trim();
if (line.startsWith("[DONE]")) {
return finish();
}
try {
const resJson = JSON.parse(ev.data);
const message = this.extractMessage(resJson);
responseText += message;
options.onUpdate(responseText, message);
} catch (e) {
console.error("[Request] stream error", e);
options.onError(e as Error);
if (text.length === 0) continue;
const json = JSON.parse(text);
const delta = json.choices[0].delta.content;
if (delta) {
responseText += delta;
options.onUpdate?.(responseText, delta);
}
},
onclose() {
options.onError(new Error("stream closed unexpected"));
},
onerror(err) {
options.onError(err);
},
});
}
}
} else {
const res = await fetch(chatPath, chatPayload);
clearTimeout(reqestTimeoutId);

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
}

clearTimeout(reqestTimeoutId);
} catch (e) {
console.log("[Request] failed to make a chat reqeust", e);
options.onError(e as Error);
options.onError?.(e as Error);
}
}
async usage() {
const formatDate = (d: Date) =>
`${d.getFullYear()}-${(d.getMonth() + 1).toString().padStart(2, "0")}-${d
.getDate()
.toString()
.padStart(2, "0")}`;
const ONE_DAY = 1 * 24 * 60 * 60 * 1000;
const now = new Date();
const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
const startDate = formatDate(startOfMonth);
const endDate = formatDate(new Date(Date.now() + ONE_DAY));

const [used, subs] = await Promise.all([
fetch(
this.path(
`${this.UsagePath}?start_date=${startDate}&end_date=${endDate}`,
),
{
method: "GET",
headers: getHeaders(),
},
),
fetch(this.path(this.SubsPath), {
method: "GET",
headers: getHeaders(),
}),
]);

if (!used.ok || !subs.ok || used.status === 401) {
throw new Error(Locale.Error.Unauthorized);
}

const response = (await used.json()) as {
total_usage?: number;
error?: {
type: string;
message: string;
};
};

const total = (await subs.json()) as {
hard_limit_usd?: number;
};

if (response.error && response.error.type) {
throw Error(response.error.message);
}

if (response.total_usage) {
response.total_usage = Math.round(response.total_usage) / 100;
}

if (total.hard_limit_usd) {
total.hard_limit_usd = Math.round(total.hard_limit_usd * 100) / 100;
}

return {
used: 0,
total: 0,
used: response.total_usage,
total: total.hard_limit_usd,
} as LLMUsage;
}
}
Loading

0 comments on commit edd2b59

Please sign in to comment.