Skip to content

Commit

Permalink
Merge branch 'main' into cs-7714-show-current-llm-model-used
Browse files Browse the repository at this point in the history
  • Loading branch information
FadhlanR committed Jan 8, 2025
2 parents 3e66444 + 2f93df0 commit b655c1b
Show file tree
Hide file tree
Showing 253 changed files with 8,457 additions and 3,619 deletions.
4 changes: 1 addition & 3 deletions QUICKSTART.md
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,5 @@ If you experience issues, you can start from scratch by running this command

```
pnpm clear-caches
rm -rf ./packages/matrix/synapse-data
docker ps -a --format '{{.Names}}' | grep -E 'boxel-smtp|boxel-synapse|synapse-admin' | xargs -r docker stop
docker ps -a --format '{{.Names}}' | grep -E 'boxel-smtp|boxel-synapse|synapse-admin' | xargs -r docker rm -v
pnpm full-reset
```
128 changes: 52 additions & 76 deletions packages/ai-bot/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,21 +8,23 @@ import type {
MatrixEvent as DiscreteMatrixEvent,
CardFragmentContent,
CommandEvent,
CommandResultEvent,
ReactionEvent,
Tool,
SkillsConfigEvent,
ActiveLLMEvent,
CommandResultEvent,
} from 'https://cardstack.com/base/matrix-event';
import { MatrixEvent, type IRoomEvent } from 'matrix-js-sdk';
import { ChatCompletionMessageToolCall } from 'openai/resources/chat/completions';
import * as Sentry from '@sentry/node';
import { logger } from '@cardstack/runtime-common';
import {
APP_BOXEL_COMMAND_RESULT_EVENT_TYPE,
APP_BOXEL_COMMAND_RESULT_WITH_OUTPUT_MSGTYPE,
} from '../runtime-common/matrix-constants';
import {
APP_BOXEL_CARDFRAGMENT_MSGTYPE,
APP_BOXEL_MESSAGE_MSGTYPE,
APP_BOXEL_COMMAND_MSGTYPE,
APP_BOXEL_COMMAND_RESULT_MSGTYPE,
APP_BOXEL_ROOM_SKILLS_EVENT_TYPE,
DEFAULT_LLM,
APP_BOXEL_ACTIVE_LLM,
Expand Down Expand Up @@ -144,6 +146,16 @@ export function constructHistory(
}
}
let event = { ...rawEvent } as DiscreteMatrixEvent;
if (
event.type === APP_BOXEL_COMMAND_RESULT_EVENT_TYPE &&
event.content.msgtype == APP_BOXEL_COMMAND_RESULT_WITH_OUTPUT_MSGTYPE
) {
let { cardEventId } = event.content.data;
event.content.data.card = serializedCardFromFragments(
cardEventId,
cardFragments,
);
}
if (event.type !== 'm.room.message') {
continue;
}
Expand Down Expand Up @@ -362,60 +374,20 @@ export function getToolChoice(
return 'auto';
}

export function isCommandResultEvent(
event: DiscreteMatrixEvent,
): event is CommandResultEvent {
return (
event.type === 'm.room.message' &&
typeof event.content === 'object' &&
event.content.msgtype === APP_BOXEL_COMMAND_RESULT_MSGTYPE
);
}

export function isReactionEvent(
event: DiscreteMatrixEvent,
): event is ReactionEvent {
return (
event.type === 'm.reaction' &&
event.content['m.relates_to'].rel_type === 'm.annotation'
);
}

function getReactionStatus(
commandEvent: DiscreteMatrixEvent,
history: DiscreteMatrixEvent[],
) {
let maybeReactionEvent = history.find((e) => {
if (
isReactionEvent(e) &&
e.content['m.relates_to']?.event_id === commandEvent.event_id
) {
return true;
}
return false;
});
return maybeReactionEvent && isReactionEvent(maybeReactionEvent)
? maybeReactionEvent.content['m.relates_to'].key
: undefined;
}

function getCommandResult(
commandEvent: CommandEvent,
history: DiscreteMatrixEvent[],
) {
let maybeCommandResultEvent = history.find((e) => {
let commandResultEvent = history.find((e) => {
if (
isCommandResultEvent(e) &&
e.content['m.relates_to']?.event_id === commandEvent.event_id
) {
return true;
}
return false;
});
return maybeCommandResultEvent &&
isCommandResultEvent(maybeCommandResultEvent)
? maybeCommandResultEvent.content.result
: undefined;
}) as CommandResultEvent | undefined;
return commandResultEvent;
}

function toToolCall(event: CommandEvent): ChatCompletionMessageToolCall {
Expand All @@ -433,21 +405,26 @@ function toPromptMessageWithToolResult(
event: CommandEvent,
history: DiscreteMatrixEvent[],
): OpenAIPromptMessage {
let commandResult = getCommandResult(event as CommandEvent, history);
let commandResult = getCommandResult(event, history);
let content = 'pending';
if (commandResult) {
return {
role: 'tool',
content: commandResult,
tool_call_id: event.content.data.toolCall.id,
};
} else {
let reactionStatus = getReactionStatus(event, history);
return {
role: 'tool',
content: reactionStatus ?? 'pending',
tool_call_id: event.content.data.toolCall.id,
};
let status = commandResult.content['m.relates_to']?.key;
if (
commandResult.content.msgtype ===
APP_BOXEL_COMMAND_RESULT_WITH_OUTPUT_MSGTYPE
) {
content = `Command ${status}, with result card: ${JSON.stringify(
commandResult.content.data.card,
)}.\n`;
} else {
content = `Command ${status}.\n`;
}
}
return {
role: 'tool',
content,
tool_call_id: event.content.data.toolCall.id,
};
}

export function getModifyPrompt(
Expand Down Expand Up @@ -574,24 +551,13 @@ export function cleanContent(content: string) {
return content.trim();
}

export const isCommandReactionEvent = (event?: MatrixEvent) => {
if (event === undefined) {
return false;
}
let content = event.getContent();
return (
event.getType() === 'm.reaction' &&
content['m.relates_to']?.rel_type === 'm.annotation'
);
};

export const isCommandReactionStatusApplied = (event?: MatrixEvent) => {
export const isCommandResultStatusApplied = (event?: MatrixEvent) => {
if (event === undefined) {
return false;
}
let content = event.getContent();
return (
isCommandReactionEvent(event) && content['m.relates_to']?.key === 'applied'
isCommandResultEvent(event.event as DiscreteMatrixEvent) &&
event.getContent()['m.relates_to']?.key === 'applied'
);
};

Expand All @@ -608,9 +574,7 @@ export function isCommandEvent(
);
}

function getModel(
eventlist: DiscreteMatrixEvent[]
): string {
function getModel(eventlist: DiscreteMatrixEvent[]): string {
let activeLLMEvent = eventlist.findLast(
(event) => event.type === APP_BOXEL_ACTIVE_LLM,
) as ActiveLLMEvent;
Expand All @@ -619,3 +583,15 @@ function getModel(
}
return activeLLMEvent.content.model;
}

export function isCommandResultEvent(
event?: DiscreteMatrixEvent,
): event is CommandResultEvent {
if (event === undefined) {
return false;
}
return (
event.type === APP_BOXEL_COMMAND_RESULT_EVENT_TYPE &&
event.content['m.relates_to']?.rel_type === 'm.annotation'
);
}
23 changes: 14 additions & 9 deletions packages/ai-bot/lib/set-title.ts
Original file line number Diff line number Diff line change
@@ -1,18 +1,23 @@
import { type MatrixEvent, type IEventRelation } from 'matrix-js-sdk';
import {
type MatrixEvent,
type IEventRelation,
IRoomEvent,
} from 'matrix-js-sdk';
import OpenAI from 'openai';
import {
type OpenAIPromptMessage,
isCommandReactionStatusApplied,
isCommandResultStatusApplied,
attachedCardsToMessage,
isCommandEvent,
getRelevantCards,
} from '../helpers';
import { MatrixClient } from './matrix';
import type { MatrixEvent as DiscreteMatrixEvent } from 'https://cardstack.com/base/matrix-event';
import { ChatCompletionMessageParam } from 'openai/resources';

const SET_TITLE_SYSTEM_MESSAGE = `You are a chat titling system, you must read the conversation and return a suggested title of no more than six words.
Do NOT say talk or discussion or discussing or chat or chatting, this is implied by the context.
The user can optionally apply 'patchCard' by sending data about fields to update.
Do NOT say talk or discussion or discussing or chat or chatting, this is implied by the context.
The user can optionally apply 'patchCard' by sending data about fields to update.
Explain the general actions and user intent. If 'patchCard' was used, express the title in an active sentence. Do NOT use the word "patch" in the title.`;

export async function setTitle(
Expand All @@ -39,7 +44,7 @@ export async function setTitle(
let result = await openai.chat.completions.create(
{
model: 'gpt-4o',
messages: startOfConversation,
messages: startOfConversation as ChatCompletionMessageParam[],
stream: false,
},
{
Expand Down Expand Up @@ -120,15 +125,15 @@ export const getLatestCommandApplyMessage = (
return [];
};

export const roomTitleAlreadySet = (rawEventLog: DiscreteMatrixEvent[]) => {
export const roomTitleAlreadySet = (rawEventLog: IRoomEvent[]) => {
return (
rawEventLog.filter((event) => event.type === 'm.room.name').length > 1 ??
false
);
};

const userAlreadyHasSentNMessages = (
rawEventLog: DiscreteMatrixEvent[],
rawEventLog: IRoomEvent[],
botUserId: string,
n = 5,
) => {
Expand All @@ -140,12 +145,12 @@ const userAlreadyHasSentNMessages = (
};

export function shouldSetRoomTitle(
rawEventLog: DiscreteMatrixEvent[],
rawEventLog: IRoomEvent[],
aiBotUserId: string,
event?: MatrixEvent,
) {
return (
(isCommandReactionStatusApplied(event) ||
(isCommandResultStatusApplied(event) ||
userAlreadyHasSentNMessages(rawEventLog, aiBotUserId)) &&
!roomTitleAlreadySet(rawEventLog)
);
Expand Down
12 changes: 7 additions & 5 deletions packages/ai-bot/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import { logger, aiBotUsername } from '@cardstack/runtime-common';
import {
type PromptParts,
constructHistory,
isCommandReactionStatusApplied,
isCommandResultStatusApplied,
getPromptParts,
extractCardFragmentsFromEvents,
} from './helpers';
Expand All @@ -34,6 +34,8 @@ import * as Sentry from '@sentry/node';

import { getAvailableCredits, saveUsageCost } from './lib/ai-billing';
import { PgAdapter } from '@cardstack/postgres';
import { ChatCompletionMessageParam } from 'openai/resources';
import { OpenAIError } from 'openai/error';

let log = logger('ai-bot');

Expand Down Expand Up @@ -73,12 +75,12 @@ class Assistant {
if (prompt.tools.length === 0) {
return this.openai.beta.chat.completions.stream({
model: prompt.model,
messages: prompt.messages,
messages: prompt.messages as ChatCompletionMessageParam[],
});
} else {
return this.openai.beta.chat.completions.stream({
model: prompt.model,
messages: prompt.messages,
messages: prompt.messages as ChatCompletionMessageParam[],
tools: prompt.tools,
tool_choice: prompt.toolChoice,
});
Expand Down Expand Up @@ -261,7 +263,7 @@ Common issues are:
finalContent = await runner.finalContent();
await responder.finalize(finalContent);
} catch (error) {
await responder.onError(error);
await responder.onError(error as OpenAIError);
} finally {
if (generationId) {
assistant.trackAiUsageCost(senderMatrixUserId, generationId);
Expand Down Expand Up @@ -289,7 +291,7 @@ Common issues are:
if (!room) {
return;
}
if (!isCommandReactionStatusApplied(event)) {
if (!isCommandResultStatusApplied(event)) {
return;
}
log.info(
Expand Down
1 change: 1 addition & 0 deletions packages/ai-bot/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
},
"devDependencies": {
"@sinonjs/fake-timers": "^11.2.2",
"@types/qunit": "^2.19.12",
"@types/sinonjs__fake-timers": "^8.1.5",
"qunit": "^2.18.0"
},
Expand Down
Loading

0 comments on commit b655c1b

Please sign in to comment.