Skip to content

Commit

Permalink
Task: Redefine consistent output (WIP)
Browse files Browse the repository at this point in the history
  • Loading branch information
zya committed Oct 9, 2023
1 parent 6dbf97b commit 90c7b76
Show file tree
Hide file tree
Showing 5 changed files with 37 additions and 19 deletions.
4 changes: 1 addition & 3 deletions src/handlers/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ function toResponse(
anthropicResponse: Anthropic.Completion,
): ResultNotStreaming {
return {
id: '',
choices: [
{
message: {
Expand All @@ -39,10 +38,9 @@ function toStreamingChunk(
anthropicResponse: Anthropic.Completion,
): StreamingChunk {
return {
id: '',
choices: [
{
delta: { content: anthropicResponse.completion },
delta: { content: anthropicResponse.completion, role: 'assistant' },
finish_reason: 'stop',
index: 0,
},
Expand Down
3 changes: 1 addition & 2 deletions src/handlers/cohere.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@ async function* toStream(
response: cohereResponse<generateResponse>,
): AsyncIterable<StreamingChunk> {
yield {
id: '',
choices: [
{
delta: {
content: response.body.generations[0].text,
role: 'assistant',
},
finish_reason: 'stop',
index: 0,
Expand Down Expand Up @@ -61,7 +61,6 @@ export async function CohereHandler(
}

return {
id: '',
choices: [
{
message: {
Expand Down
4 changes: 1 addition & 3 deletions src/handlers/ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,9 @@ interface OllamaResponseChunk {

function toStreamingChunk(ollamaResponse: OllamaResponseChunk): StreamingChunk {
return {
id: '',
choices: [
{
delta: { content: ollamaResponse.response },
delta: { content: ollamaResponse.response, role: 'assistant' },
finish_reason: 'stop',
index: 0,
},
Expand All @@ -37,7 +36,6 @@ function toResponse(content: string): ResultNotStreaming {
index: 0,
},
],
id: '',
};
}

Expand Down
4 changes: 3 additions & 1 deletion src/handlers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,5 +24,7 @@ export async function OpenAIHandler(
params: HandlerParams,
): Promise<ResultNotStreaming | ResultStreaming> {
const openai = new OpenAI();
return openai.chat.completions.create(params);
return openai.chat.completions.create(params) as Promise<
ResultNotStreaming | ResultStreaming
>; // TODO: Undo the type casting by properly handling and converting to consistent response
}
41 changes: 31 additions & 10 deletions src/types.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,40 @@
import OpenAI from 'openai';

export interface Message {
role: 'system' | 'user' | 'assistant' | 'function';
content: string | null;
}

export type ResultNotStreaming = Pick<
OpenAI.Chat.ChatCompletion,
'choices' | 'id'
>;
export interface ConsistentResponseChoice {
finish_reason: string;
index: number;
message: {
role: string;
content: string;
};
}

export interface ConsistentResponseStreamingChoice
extends Omit<ConsistentResponseChoice, 'message'> {
delta: ConsistentResponseChoice['message'];
}

export interface ConsistentResponseUsage {
prompt_tokens: number;
completion_tokesn: number;
total_tokens: number;
}

export interface ConsistentResponse {
choices: ConsistentResponseChoice[];
model?: string; // TODO: Make this non-optional
created?: string; // TODO: Make this non-optional and implement
usage?: ConsistentResponseUsage; // TODO: Make this non-optional and implement
}

export type StreamingChunk = Pick<
OpenAI.Chat.ChatCompletionChunk,
'choices' | 'id'
>;
export type ResultNotStreaming = ConsistentResponse;

export interface StreamingChunk extends Omit<ConsistentResponse, 'choices'> {
choices: ConsistentResponseStreamingChoice[];
}

export type ResultStreaming = AsyncIterable<StreamingChunk>;

Expand Down

0 comments on commit 90c7b76

Please sign in to comment.