Skip to content

Commit

Permalink
feat(minor): reference common classes on the Llama instance (#360)
Browse files Browse the repository at this point in the history
  • Loading branch information
giladgd authored Oct 6, 2024
1 parent 51eab61 commit 8145c94
Show file tree
Hide file tree
Showing 4 changed files with 34 additions and 1 deletion.
2 changes: 1 addition & 1 deletion docs/guide/downloading-models.md
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ When using a URI to reference a model,
it's recommended [to add it to your `package.json` file](#cli) to ensure it's downloaded when running `npm install`,
and also resolve it using the [`resolveModelFile`](../api/functions/resolveModelFile.md) method to get the full path of the resolved model file.

Here's and example usage of the [`resolveModelFile`](../api/functions/resolveModelFile.md) method:
Here's an example usage of the [`resolveModelFile`](../api/functions/resolveModelFile.md) method:
```typescript
import {fileURLToPath} from "url";
import path from "path";
Expand Down
9 changes: 9 additions & 0 deletions src/bindings/Llama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import {GbnfJsonSchema} from "../utils/gbnfJson/types.js";
import {LlamaJsonSchemaGrammar} from "../evaluator/LlamaJsonSchemaGrammar.js";
import {LlamaGrammar, LlamaGrammarOptions} from "../evaluator/LlamaGrammar.js";
import {ThreadsSplitter} from "../utils/ThreadsSplitter.js";
import {getLlamaClasses, LlamaClasses} from "../utils/getLlamaClasses.js";
import {BindingModule} from "./AddonTypes.js";
import {BuildGpu, BuildMetadataFile, LlamaGpuType, LlamaLocks, LlamaLogLevel} from "./types.js";
import {MemoryOrchestrator, MemoryReservation} from "./utils/MemoryOrchestrator.js";
Expand Down Expand Up @@ -56,6 +57,7 @@ export class Llama {
/** @internal */ private _nextLogNeedNewLine: boolean = false;
/** @internal */ private _disposed: boolean = false;

private _classes?: LlamaClasses;
public readonly onDispose = new EventRelay<void>();

private constructor({
Expand Down Expand Up @@ -137,6 +139,13 @@ export class Llama {
return this._disposed;
}

public get classes() {
if (this._classes == null)
this._classes = getLlamaClasses();

return this._classes;
}

public get gpu() {
return this._gpu;
}
Expand Down
2 changes: 2 additions & 0 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ import {
import {GgmlType, type GgufTensorInfo} from "./gguf/types/GgufTensorInfoTypes.js";
import {type ModelFileAccessTokens} from "./utils/modelFileAccesTokens.js";
import {type OverridesObject} from "./utils/OverridesObject.js";
import type {LlamaClasses} from "./utils/getLlamaClasses.js";


export {
Expand All @@ -108,6 +109,7 @@ export {
type LlamaOptions,
type LastBuildOptions,
type LlamaGpuType,
type LlamaClasses,
LlamaLogLevel,
NoBinaryFoundError,
resolveModelFile,
Expand Down
22 changes: 22 additions & 0 deletions src/utils/getLlamaClasses.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import {LlamaChatSession} from "../evaluator/LlamaChatSession/LlamaChatSession.js";
import {LlamaChat} from "../evaluator/LlamaChat/LlamaChat.js";
import {LlamaCompletion} from "../evaluator/LlamaCompletion.js";

export type LlamaClasses = {
readonly LlamaChatSession: typeof LlamaChatSession,
readonly LlamaChat: typeof LlamaChat,
readonly LlamaCompletion: typeof LlamaCompletion
};

let cachedClasses: LlamaClasses | undefined = undefined;

export function getLlamaClasses(): LlamaClasses {
if (cachedClasses == null)
cachedClasses = Object.seal({
LlamaChatSession,
LlamaChat,
LlamaCompletion
});

return cachedClasses;
}

0 comments on commit 8145c94

Please sign in to comment.