Skip to content

Commit

Permalink
Refactor: Extract combine function
Browse files Browse the repository at this point in the history
  • Loading branch information
zya committed Oct 6, 2023
1 parent fcc5266 commit 6dbf97b
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 15 deletions.
5 changes: 2 additions & 3 deletions src/handlers/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,10 @@ import {
StreamingChunk,
Message,
} from '../types';
import { combinePrompts } from '../utils/combinePrompts';

function toAnthropicPrompt(messages: Message[]): string {
const textsCombined = messages.reduce((acc, message) => {
return (acc += message.content);
}, '');
const textsCombined = combinePrompts(messages);
return `${Anthropic.HUMAN_PROMPT} ${textsCombined}${Anthropic.AI_PROMPT}`;
}

Expand Down
5 changes: 2 additions & 3 deletions src/handlers/cohere.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import {
StreamingChunk,
} from '../types';
import { cohereResponse, generateResponse } from 'cohere-ai/dist/models';
import { combinePrompts } from '../utils/combinePrompts';

// eslint-disable-next-line @typescript-eslint/require-await
async function* toStream(
Expand Down Expand Up @@ -44,9 +45,7 @@ export async function CohereHandler(
params: HandlerParams,
): Promise<ResultNotStreaming | ResultStreaming> {
cohere.init(process.env.COHERE_API_KEY!);
const textsCombined = params.messages.reduce((acc, message) => {
return (acc += message.content);
}, '');
const textsCombined = combinePrompts(params.messages);

const config = {
model: params.model,
Expand Down
11 changes: 2 additions & 9 deletions src/handlers/ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@ import {
HandlerParams,
HandlerParamsNotStreaming,
HandlerParamsStreaming,
Message,
ResultNotStreaming,
ResultStreaming,
StreamingChunk,
} from '../types';
import { combinePrompts } from '../utils/combinePrompts';

interface OllamaResponseChunk {
model: string;
Expand Down Expand Up @@ -60,13 +60,6 @@ async function* iterateResponse(
}
}

function combineMessagesToPromit(messages: Message[]): string {
return messages.reduce((acc: string, message: Message) => {
// TODO: Distinguish between the different role types
return (acc += message.content);
}, '');
}

async function getOllamaResponse(
model: string,
prompt: string,
Expand Down Expand Up @@ -102,7 +95,7 @@ export async function OllamaHandler(
): Promise<ResultNotStreaming | ResultStreaming> {
const baseUrl = params.baseUrl ?? 'http://127.0.0.1:11434';
const model = params.model.split('ollama/')[1];
const prompt = combineMessagesToPromit(params.messages);
const prompt = combinePrompts(params.messages);

const res = await getOllamaResponse(model, prompt, baseUrl);

Expand Down
7 changes: 7 additions & 0 deletions src/utils/combinePrompts.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
import { Message } from '../types';

export function combinePrompts(messages: Message[]): string {
return messages.reduce((acc, message) => {
return (acc += message.content);
}, '');
}

0 comments on commit 6dbf97b

Please sign in to comment.