Skip to content

Commit

Permalink
Handle room without available llm models
Browse files Browse the repository at this point in the history
  • Loading branch information
FadhlanR committed Jan 7, 2025
1 parent 178be47 commit 166e9f4
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 14 deletions.
27 changes: 19 additions & 8 deletions packages/ai-bot/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,12 @@ import {
getPromptParts,
extractCardFragmentsFromEvents,
} from './helpers';
import { APP_BOXEL_AVAILABLE_LLM_MODELS, APP_BOXEL_CARDFRAGMENT_MSGTYPE, APP_BOXEL_SELECTED_LLM_MODEL, DEFAULT_LLM_MODEL } from '@cardstack/runtime-common/matrix-constants';
import {
APP_BOXEL_AVAILABLE_LLM_MODELS,
APP_BOXEL_CARDFRAGMENT_MSGTYPE,
APP_BOXEL_SELECTED_LLM_MODEL,
DEFAULT_LLM_MODEL,
} from '@cardstack/runtime-common/matrix-constants';

import {
shouldSetRoomTitle,
Expand Down Expand Up @@ -101,15 +106,20 @@ class Assistant {

async setLLMModels(roomId: string) {
const response = await this.openai.models.list();
const models = response.data.map(data => data.id);
const models = response.data.map((data) => data.id);

await updateStateEvent(this.client, roomId, APP_BOXEL_AVAILABLE_LLM_MODELS, {
models
});
await updateStateEvent(
this.client,
roomId,
APP_BOXEL_AVAILABLE_LLM_MODELS,
{
models,
},
);

// set default selected LLM model
await updateStateEvent(this.client, roomId, APP_BOXEL_SELECTED_LLM_MODEL, {
model: DEFAULT_LLM_MODEL
model: DEFAULT_LLM_MODEL,
});
}
}
Expand Down Expand Up @@ -182,7 +192,7 @@ Common issues are:
if (toStartOfTimeline) {
return; // don't print paginated results
}
if(event.getType() === APP_BOXEL_SELECTED_LLM_MODEL) {
if (event.getType() === APP_BOXEL_SELECTED_LLM_MODEL) {
selectedLLMModels.set(room!.roomId, event.getContent().model);
return;
}
Expand Down Expand Up @@ -214,7 +224,8 @@ Common issues are:
[]) as DiscreteMatrixEvent[];
try {
promptParts = getPromptParts(eventList, aiBotUserId);
promptParts.model = selectedLLMModels.get(room!.roomId) ?? promptParts.model;
promptParts.model =
selectedLLMModels.get(room!.roomId) ?? promptParts.model;
} catch (e) {
log.error(e);
responder.finalize(
Expand Down
26 changes: 20 additions & 6 deletions packages/host/app/components/matrix/room.gts
Original file line number Diff line number Diff line change
Expand Up @@ -142,17 +142,17 @@ export default class Room extends Component<Signature> {
@onChange={{this.selectLLMModel}}
@options={{this.availableLLMModels}}
@matchTriggerWidth={{false}}
@disabled={{this.selectLLMModelTask.isRunning}}
@disabled={{this.isLLMModelSelectionDisabled}}
@dropdownClass='available-llm-models__dropdown'
as |item|
>
<div class='llm-model'>
<div class='check-mark'>
{{#if (this.isSelectedModel item)}}
{{#if (this.isSelectedLLMModel item)}}
<CheckMark width='12' height='12' />
{{/if}}
</div>
<span class='model'>{{item}}</span>
<span class='llm-model-name'>{{item}}</span>
</div>
</BoxelSelect>
</div>
Expand Down Expand Up @@ -224,6 +224,10 @@ export default class Room extends Component<Signature> {
.available-llm-models :deep(.boxel-trigger-content) {
flex: 1;
}
.available-llm-models[aria-disabled='true'] {
background-color: var(--boxel-light);
cursor: auto;
}
.llm-model {
display: grid;
grid-template-columns: 12px 1fr;
Expand All @@ -236,13 +240,17 @@ export default class Room extends Component<Signature> {
.available-llm-models :deep(.boxel-trigger-content .check-mark) {
display: none;
}
.available-llm-models :deep(.boxel-trigger-content .model) {
.available-llm-models[aria-disabled='true']
:deep(.boxel-trigger > *:nth-child(2)) {
display: none;
}
.available-llm-models :deep(.boxel-trigger-content .llm-model-name) {
text-align: right;
}
.check-mark {
height: 12px;
}
.model {
.llm-model-name {
overflow: hidden;
text-overflow: ellipsis;
text-wrap: nowrap;
Expand Down Expand Up @@ -528,10 +536,16 @@ export default class Room extends Component<Signature> {
}

@action
isSelectedModel(model: string) {
isSelectedLLMModel(model: string) {
return this.selectedLLMModel === model;
}

private get isLLMModelSelectionDisabled() {
return (
this.selectLLMModelTask.isRunning || this.availableLLMModels.length <= 0
);
}

private get selectedLLMModel() {
return this.roomResource.selectedLLMModel;
}
Expand Down

0 comments on commit 166e9f4

Please sign in to comment.