Skip to content

Commit

Permalink
Apply suggestions from code review
Browse files Browse the repository at this point in the history
  • Loading branch information
zya authored Dec 21, 2023
1 parent 421d1dd commit 6911da3
Show file tree
Hide file tree
Showing 6 changed files with 17 additions and 17 deletions.
4 changes: 2 additions & 2 deletions src/handlers/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,10 @@ export async function AnthropicHandler(
export async function AnthropicHandler(
params: HandlerParams,
): Promise<ResultNotStreaming | ResultStreaming> {
const api_key = params.api_key ?? process.env.ANTHROPIC_API_KEY;
const apiKey = params. apiKey ?? process.env.ANTHROPIC_API_KEY;

const anthropic = new Anthropic({
apiKey: api_key,
apiKey: apiKey,
});
const prompt = toAnthropicPrompt(params.messages);

Expand Down
4 changes: 2 additions & 2 deletions src/handlers/cohere.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,9 @@ export async function CohereHandler(
export async function CohereHandler(
params: HandlerParams,
): Promise<ResultNotStreaming | ResultStreaming> {
const api_key = params.api_key ?? process.env.COHERE_API_KEY!;
const apiKey = params. apiKey ?? process.env.COHERE_API_KEY!;

cohere.init(api_key);
cohere.init(apiKey);
const textsCombined = combinePrompts(params.messages);

const config = {
Expand Down
8 changes: 4 additions & 4 deletions src/handlers/deepinfra.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,14 +36,14 @@ async function getDeepInfraResponse(
model: string,
messages: Message[],
baseUrl: string,
api_key: string,
apiKey: string,
stream: boolean,
): Promise<Response> {
return fetch(`${baseUrl}/v1/openai/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${api_key}`,
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
messages,
Expand All @@ -69,14 +69,14 @@ export async function DeepInfraHandler(
params: HandlerParams,
): Promise<ResultNotStreaming | ResultStreaming> {
const baseUrl = params.baseUrl ?? 'https://api.deepinfra.com';
const api_key = params.api_key ?? process.env.DEEPINFRA_API_KEY!;
const apiKey = params. apiKey ?? process.env.DEEPINFRA_API_KEY!;
const model = params.model.split('deepinfra/')[1];

const res = await getDeepInfraResponse(
model,
params.messages,
baseUrl,
api_key,
apiKey,
params.stream ?? false,
);

Expand Down
8 changes: 4 additions & 4 deletions src/handlers/mistral.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,14 +37,14 @@ async function getMistralResponse(
model: string,
messages: Message[],
baseUrl: string,
api_key: string,
apiKey: string,
stream: boolean,
): Promise<Response> {
return fetch(`${baseUrl}/v1/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${api_key}`,
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
messages,
Expand All @@ -70,14 +70,14 @@ export async function MistralHandler(
params: HandlerParams,
): Promise<ResultNotStreaming | ResultStreaming> {
const baseUrl = params.baseUrl ?? 'https://api.mistral.ai';
const api_key = params.api_key ?? process.env.MISTRAL_API_KEY!;
const apiKey = params. apiKey ?? process.env.MISTRAL_API_KEY!;
const model = params.model.split('mistral/')[1];

const res = await getMistralResponse(
model,
params.messages,
baseUrl,
api_key,
apiKey,
params.stream ?? false,
);

Expand Down
8 changes: 4 additions & 4 deletions src/handlers/mistralEmbedding.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@ async function getMistralResponse(
model: string,
input: EmbeddingParams['input'],
baseUrl: string,
api_key: string,
apiKey: string,
): Promise<Response> {
return fetch(`${baseUrl}/v1/embeddings`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${api_key}`,
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
model,
Expand All @@ -25,12 +25,12 @@ export async function MistralEmbeddingHandler(
): Promise<EmbeddingResponse> {
const model = params.model.split('mistral/')[1];
const baseUrl = params.baseUrl ?? 'https://api.mistral.ai';
const api_key = params.api_key ?? process.env.MISTRAL_API_KEY!;
const apiKey = params. apiKey ?? process.env.MISTRAL_API_KEY!;
const response = await getMistralResponse(
model,
params.input,
baseUrl,
api_key,
apiKey,
);

if (!response.ok) {
Expand Down
2 changes: 1 addition & 1 deletion src/handlers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ export async function OpenAIHandler(
export async function OpenAIHandler(
params: HandlerParams,
): Promise<ResultNotStreaming | ResultStreaming> {
const api_key = params.api_key ?? process.env.OPENAI_API_KEY;
const apiKey = params. apiKey ?? process.env.OPENAI_API_KEY;

const openai = new OpenAI({
apiKey: apiKey,
Expand Down

0 comments on commit 6911da3

Please sign in to comment.