Skip to content

Commit

Permalink
feat: pass a mocked LLM Service to Aila via the new services object (#25
Browse files Browse the repository at this point in the history
)
  • Loading branch information
stefl authored Aug 29, 2024
1 parent 5c5f1b3 commit b7b46a8
Show file tree
Hide file tree
Showing 5 changed files with 99 additions and 39 deletions.
62 changes: 56 additions & 6 deletions packages/aila/src/core/Aila.test.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { Aila } from ".";
import { MockLLMService } from "../../tests/mocks/MockLLMService";
import { setupPolly } from "../../tests/mocks/setupPolly";
import { MockCategoriser } from "../features/categorisation/categorisers/MockCategoriser";
import { AilaAuthenticationError } from "./AilaError";
import { MockLLMService } from "./llm/MockLLMService";

describe("Aila", () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
Expand Down Expand Up @@ -253,10 +253,9 @@ describe("Aila", () => {
value: newTitle,
},
};
const llmService = new MockLLMService(
`${JSON.stringify(mockedResponse)}␞\n`,
);

const chatLlmService = new MockLLMService([
JSON.stringify(mockedResponse),
]);
const ailaInstance = new Aila({
lessonPlan: {
title: "Roman Britain",
Expand All @@ -267,7 +266,6 @@ describe("Aila", () => {
chat: {
id: "123",
userId: "user123",
llmService,
},
options: {
usePersistence: false,
Expand All @@ -276,6 +274,9 @@ describe("Aila", () => {
useModeration: false,
},
plugins: [],
services: {
chatLlmService,
},
});

await ailaInstance.generateSync({
Expand Down Expand Up @@ -307,6 +308,7 @@ describe("Aila", () => {
useModeration: false,
},
services: {
chatLlmService: new MockLLMService(),
chatCategoriser: mockCategoriser,
},
plugins: [],
Expand All @@ -319,4 +321,52 @@ describe("Aila", () => {
expect(ailaInstance.lesson.plan.keyStage).toBe("key-stage-3");
});
});

describe("categorisation and LLM service", () => {
it("should use both MockCategoriser and MockLLMService", async () => {
const mockedLessonPlan = {
title: "Mocked Lesson Plan",
subject: "Mocked Subject",
keyStage: "key-stage-3",
};

const mockCategoriser = new MockCategoriser({ mockedLessonPlan });

const mockLLMResponse = [
'{"type":"patch","reasoning":"Update title","value":{"op":"replace","path":"/title","value":"Updated Mocked Lesson Plan"}}␞\n',
'{"type":"patch","reasoning":"Update subject","value":{"op":"replace","path":"/subject","value":"Updated Mocked Subject"}}␞\n',
];
const mockLLMService = new MockLLMService(mockLLMResponse);

const ailaInstance = new Aila({
lessonPlan: {},
chat: { id: "123", userId: "user123" },
options: {
usePersistence: false,
useRag: false,
useAnalytics: false,
useModeration: false,
},
services: {
chatCategoriser: mockCategoriser,
chatLlmService: mockLLMService,
},
plugins: [],
});

await ailaInstance.initialise();

// Check if MockCategoriser was used
expect(ailaInstance.lesson.plan.title).toBe("Mocked Lesson Plan");
expect(ailaInstance.lesson.plan.subject).toBe("Mocked Subject");
expect(ailaInstance.lesson.plan.keyStage).toBe("key-stage-3");

// Use MockLLMService to generate a response
await ailaInstance.generateSync({ input: "Test input" });

// Check if MockLLMService updates were applied
expect(ailaInstance.lesson.plan.title).toBe("Updated Mocked Lesson Plan");
expect(ailaInstance.lesson.plan.subject).toBe("Updated Mocked Subject");
});
});
});
11 changes: 11 additions & 0 deletions packages/aila/src/core/Aila.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@ import {
} from "./AilaServices";
import { AilaChat, Message } from "./chat";
import { AilaLesson } from "./lesson";
import { LLMService } from "./llm/LLMService";
import { OpenAIService } from "./llm/OpenAIService";
import { AilaPlugin } from "./plugins/types";
import {
AilaGenerateLessonPlanOptions,
Expand All @@ -37,6 +39,7 @@ export class Aila implements AilaServices {
private _errorReporter?: AilaErrorReportingFeature;
private _isShutdown: boolean = false;
private _lesson: AilaLessonService;
private _chatLlmService: LLMService;
private _moderation?: AilaModerationFeature;
private _options: AilaOptionsWithDefaultFallbackValues;
private _persistence: AilaPersistenceFeature[] = [];
Expand All @@ -51,10 +54,14 @@ export class Aila implements AilaServices {
this._chatId = options.chat.id;
this._options = this.initialiseOptions(options.options);

this._chatLlmService =
options.services?.chatLlmService ??
new OpenAIService({ userId: this._userId, chatId: this._chatId });
this._chat = new AilaChat({
...options.chat,
aila: this,
promptBuilder: options.promptBuilder,
llmService: this._chatLlmService,
});

this._prisma = options.prisma ?? globalPrisma;
Expand Down Expand Up @@ -175,6 +182,10 @@ export class Aila implements AilaServices {
return this._plugins;
}

public get chatLlmService() {
return this._chatLlmService;
}

// Check methods
public checkUserIdPresentIfPersisting() {
if (!this._chat.userId && this._options.usePersistence) {
Expand Down
31 changes: 31 additions & 0 deletions packages/aila/src/core/llm/MockLLMService.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
import { LLMService } from "./LLMService";

export class MockLLMService implements LLMService {
name = "MockLLM";
private responseChunks: string[];

constructor(responseChunks: string[] = ["This is ", "a mock ", "response."]) {
this.responseChunks = responseChunks;
}

async createChatCompletionStream(): Promise<
ReadableStreamDefaultReader<Uint8Array | undefined>
> {
const encoder = new TextEncoder();
const responseChunks = this.responseChunks;
const stream = new ReadableStream({
async start(controller) {
for (const chunk of responseChunks) {
controller.enqueue(encoder.encode(chunk));
await new Promise((resolve) => setTimeout(resolve, 4));
}
controller.close();
},
});
return stream.getReader();
}

setResponse(chunks: string[]) {
this.responseChunks = chunks;
}
}
1 change: 1 addition & 0 deletions packages/aila/src/core/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,5 +70,6 @@ export type AilaInitializationOptions = {
plugins: AilaPlugin[];
services?: {
chatCategoriser?: AilaCategorisationFeature;
chatLlmService?: LLMService;
};
};
33 changes: 0 additions & 33 deletions packages/aila/tests/mocks/MockLLMService.ts

This file was deleted.

0 comments on commit b7b46a8

Please sign in to comment.