Skip to content

Commit

Permalink
feat: split gguf files support (#214)
Browse files Browse the repository at this point in the history
* feat: split gguf files support
* feat: `pull` command
* feat: `stopOnAbortSignal` and `customStopTriggers` on `LlamaChat` and `LlamaChatSession`
* feat: `checkTensors` parameter on `loadModel`
* feat: improve Electron support
* fix: more efficient max context size finding algorithm
* fix: make embedding-only models work correctly
* fix: perform context shift on the correct token index on generation
* fix: make context loading work for all models on Electron
* refactor: simplify `LlamaText` implementation
* docs: update simple usage
  • Loading branch information
giladgd authored May 8, 2024
1 parent ef501f9 commit 453c162
Show file tree
Hide file tree
Showing 60 changed files with 9,161 additions and 6,508 deletions.
92 changes: 75 additions & 17 deletions .vitepress/config.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import {defineConfig, DefaultTheme} from "vitepress";
import {DefaultTheme, defineConfig} from "vitepress";
import path from "path";
import fs from "fs-extra";
import {fileURLToPath} from "url";
import {transformerTwoslash} from "@shikijs/vitepress-twoslash";
import ts from "typescript";
import typedocSidebar from "../docs/api/typedoc-sidebar.json"; // if this import fails, run `npm run docs:generateTypedoc`
import envVar from "env-var";
import process from "process";
Expand Down Expand Up @@ -126,6 +128,29 @@ export default defineConfig({
{rel: "canonical", href: canonicalUrl}
])
},
markdown: {
codeTransformers: [
transformerTwoslash({
// explicitTrigger: false,
twoslashOptions: {
compilerOptions: {
...(await fs.readJSON(path.join(__dirname, "..", "tsconfig.json"))).compilerOptions,
moduleResolution: undefined,
paths: {
"node-llama-cpp": [path.resolve(__dirname, "..", "src", "index.ts")]
},
typeRoots: [
path.resolve(__dirname, "..", "node_modules"),
path.resolve(__dirname, "..", "node_modules", "@types")
],
module: ts.ModuleKind.ES2022,
target: ts.ScriptTarget.ES2022
},
tsModule: ts
}
})
]
},
themeConfig: {
editLink: {
pattern: "https://github.com/withcatai/node-llama-cpp/edit/master/docs/:path"
Expand Down Expand Up @@ -196,6 +221,7 @@ export default defineConfig({
collapsed: true,
link: "/",
items: [
{text: "Pull", link: "/pull"},
{text: "Chat", link: "/chat"},
{text: "Download", link: "/download"},
{text: "Complete", link: "/complete"},
Expand All @@ -216,6 +242,7 @@ export default defineConfig({
}]
},
socialLinks: [
{icon: "npm", link: "https://www.npmjs.com/package/node-llama-cpp"},
{icon: "github", link: "https://github.com/withcatai/node-llama-cpp"}
]
}
Expand Down Expand Up @@ -257,8 +284,6 @@ function getApiReferenceSidebar(): typeof typedocSidebar {
return item;

case "Variables":
item.text = "Enums";

if (item.collapsed)
item.collapsed = false;

Expand All @@ -271,6 +296,7 @@ function getApiReferenceSidebar(): typeof typedocSidebar {
}

function orderApiReferenceSidebar(sidebar: typeof typedocSidebar): typeof typedocSidebar {
applyOverrides(sidebar);
orderClasses(sidebar);
orderTypes(sidebar);
orderFunctions(sidebar);
Expand All @@ -280,6 +306,23 @@ function orderApiReferenceSidebar(sidebar: typeof typedocSidebar): typeof typedo
return sidebar;
}

function applyOverrides(sidebar: typeof typedocSidebar) {
const functions = sidebar.find((item) => item.text === "Functions");

const llamaTextFunction = functions?.items?.find((item) => item.text === "LlamaText");
if (llamaTextFunction != null) {
delete (llamaTextFunction as {link?: string}).link;
}

const classes = sidebar.find((item) => item.text === "Classes");
if (classes != null && classes.items instanceof Array && !classes.items.some((item) => item.text === "LlamaText")) {
classes.items.push({
text: "LlamaText",
link: "/api/classes/LlamaText.md"
});
}
}

function orderClasses(sidebar: typeof typedocSidebar) {
const baseChatWrapper = "ChatWrapper";
const chatWrapperItems: DefaultTheme.SidebarItem[] = [];
Expand Down Expand Up @@ -322,21 +365,36 @@ function orderClasses(sidebar: typeof typedocSidebar) {
{moveToEndIfGrouped: false}
)

const LlamaTextGroup = {
text: "LlamaText",
collapsed: true,
items: []
};
(classes.items as DefaultTheme.SidebarItem[]).push(LlamaTextGroup);
const LlamaTextGroupItemsOrder = ["SpecialTokensText", "SpecialToken"];
let LlamaTextGroup = classes.items.find((item) => item.text === "LlamaText") as {
text: string,
collapsed?: boolean,
items?: []
} | undefined;
if (LlamaTextGroup == null) {
LlamaTextGroup = {
text: "LlamaText",
collapsed: true,
items: []
};
(classes.items as DefaultTheme.SidebarItem[]).push(LlamaTextGroup);
}

groupItems(
classes.items,
(item) => item === LlamaTextGroup,
(item) => item.text != null && LlamaTextGroupItemsOrder.includes(item.text),
{moveToEndIfGrouped: false}
)
sortItemsInOrder(LlamaTextGroup.items, LlamaTextGroupItemsOrder);
if (LlamaTextGroup != null) {
LlamaTextGroup.collapsed = true;

if (LlamaTextGroup.items == null)
LlamaTextGroup.items = [];

const LlamaTextGroupItemsOrder = ["SpecialTokensText", "SpecialToken"];

groupItems(
classes.items,
(item) => item === LlamaTextGroup,
(item) => item.text != null && LlamaTextGroupItemsOrder.includes(item.text),
{moveToEndIfGrouped: false}
)
sortItemsInOrder(LlamaTextGroup.items, LlamaTextGroupItemsOrder);
}

sortItemsInOrder(chatWrapperItems, chatWrappersOrder);
}
Expand Down
9 changes: 8 additions & 1 deletion .vitepress/theme/index.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,18 @@
import {h} from "vue";
import Theme from "vitepress/theme";
import TwoslashFloatingVue from "@shikijs/vitepress-twoslash/client";
import "@shikijs/vitepress-twoslash/style.css";
import "./style.css";

import type {EnhanceAppContext} from "vitepress";

export default {
extends: Theme,
Layout: () => {
return h(Theme.Layout, null, {});
},
enhanceApp({app, router, siteData}) {}
enhanceApp({app, router, siteData}: EnhanceAppContext) {
// @ts-ignore
app.use(TwoslashFloatingVue);
}
};
46 changes: 46 additions & 0 deletions .vitepress/theme/style.css
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,52 @@
}
}

.VPNavBar .divider-line:after {
display: block;
position: absolute;
width: 100%;
height: 32px;
background: linear-gradient(var(--vp-c-bg), transparent 70%);
content: "";
transition: opacity 0.5s;
opacity: 0;
pointer-events: none;
}

.VPNavBar:not(.home) .divider-line[class] {
background-color: transparent;
}
.VPNavBar:not(.home) .divider-line:after {
opacity: 1;
}

@media (min-width: 960px) {
.VPNavBar:not(.home.top) .divider-line[class] {
background-color: transparent;
}
.VPNavBar:not(.home.top) .divider-line:after {
opacity: 1;
}

.VPNavBar:not(.has-sidebar):not(.home.top) .divider[class] {
background-color: transparent;
}
}

.VPLocalNav[class] {
border-bottom: none;
}
.VPLocalNav[class]:after {
display: block;
position: absolute;
width: 100%;
height: 32px;
background: linear-gradient(var(--vp-c-bg), transparent 70%);
content: "";
transition: opacity 0.5s;
pointer-events: none;
}

.main-badges>p {
display: flex;
flex-direction: row;
Expand Down
2 changes: 1 addition & 1 deletion .vitepress/utils/buildHtmlTable.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ export function buildHtmlTable(header: string[], rows: string[][]) {
}

if (rows.length > 0) {
res += "" + "<tbody>\n";
res += "" + '<tbody style="white-space: pre-wrap">\n';

for (const row of rows) {
res += "" + "" + "<tr>\n";
Expand Down
2 changes: 1 addition & 1 deletion .vitepress/utils/getCommandHtmlDoc.ts
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ function renderOptionsGroupOptionsTable(options: {name: string, option: Options}
const hasDefaultDescription = option.defaultDescription != null && option.defaultDescription.trim().length > 0;
if (option.default != null || hasDefaultDescription) {
if (hasDefaultDescription && option.defaultDescription != null)
optionDescription.push(`<span style="opacity: 0.72">(${htmlEscape("default: ")}${htmlEscape(option.defaultDescription.trim())})</span>`);
optionDescription.push(`<span style="opacity: 0.72">(${htmlEscape("default: ")}${htmlEscapeWithCodeMarkdown(option.defaultDescription.trim())})</span>`);
else
optionDescription.push(`<span style="opacity: 0.72">(${htmlEscape("default: ")}<code>${htmlEscape(option.default)}</code>)</span>`);
}
Expand Down
13 changes: 8 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,15 +47,18 @@ To disable this behavior set the environment variable `NODE_LLAMA_CPP_SKIP_DOWNL
```typescript
import {fileURLToPath} from "url";
import path from "path";
import {LlamaModel, LlamaContext, LlamaChatSession} from "node-llama-cpp";
import {getLlama, LlamaChatSession} from "node-llama-cpp";

const __dirname = path.dirname(fileURLToPath(import.meta.url));

const model = new LlamaModel({
modelPath: path.join(__dirname, "models", "codellama-13b.Q3_K_M.gguf")
const llama = await getLlama();
const model = await llama.loadModel({
modelPath: path.join(__dirname, "models", "dolphin-2.1-mistral-7b.Q4_K_M.gguf")
});
const context = await model.createContext();
const session = new LlamaChatSession({
contextSequence: context.getSequence()
});
const context = new LlamaContext({model});
const session = new LlamaChatSession({context});


const q1 = "Hi there, how are you?";
Expand Down
3 changes: 3 additions & 0 deletions docs/guide/cli/cli.data.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import {CommandModule} from "yargs";
import {getCommandHtmlDoc} from "../../../.vitepress/utils/getCommandHtmlDoc.js";
import {PullCommand} from "../../../src/cli/commands/PullCommand.js";
import {BuildCommand} from "../../../src/cli/commands/BuildCommand.js";
import {ChatCommand} from "../../../src/cli/commands/ChatCommand.js";
import {CompleteCommand} from "../../../src/cli/commands/CompleteCommand.js";
Expand All @@ -23,6 +24,7 @@ export default {

return {
index: buildIndexTable([
["pull", PullCommand],
["chat", ChatCommand],
["complete", CompleteCommand],
["infill", InfillCommand],
Expand All @@ -32,6 +34,7 @@ export default {
["clear", ClearCommand]
]),

pull: await getCommandHtmlDoc(PullCommand),
chat: await getCommandHtmlDoc(ChatCommand),
complete: await getCommandHtmlDoc(CompleteCommand),
infill: await getCommandHtmlDoc(InfillCommand),
Expand Down
17 changes: 17 additions & 0 deletions docs/guide/cli/pull.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
---
outline: deep
---
# `pull` command

<script setup lang="ts">
import {data as docs} from "./cli.data.js";
const commandDoc = docs.pull;
</script>

{{commandDoc.description}}

## Usage
```shell-vue
{{commandDoc.usage}}
```
<div v-html="commandDoc.options"></div>
15 changes: 9 additions & 6 deletions docs/guide/index.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
---
outline: deep
---
# Getting started
# Getting started

## Installation
Inside of your node.js project directory, run this command:
Expand Down Expand Up @@ -53,15 +53,18 @@ npx --no node-llama-cpp chat --wrapper llamaChat --model <path-to-a-model-file-o
```typescript
import {fileURLToPath} from "url";
import path from "path";
import {LlamaModel, LlamaContext, LlamaChatSession} from "node-llama-cpp";
import {getLlama, LlamaChatSession} from "node-llama-cpp";

const __dirname = path.dirname(fileURLToPath(import.meta.url));

const model = new LlamaModel({
modelPath: path.join(__dirname, "models", "codellama-13b.Q3_K_M.gguf")
const llama = await getLlama();
const model = await llama.loadModel({
modelPath: path.join(__dirname, "models", "dolphin-2.1-mistral-7b.Q4_K_M.gguf")
});
const context = await model.createContext();
const session = new LlamaChatSession({
contextSequence: context.getSequence()
});
const context = new LlamaContext({model});
const session = new LlamaChatSession({context});


const q1 = "Hi there, how are you?";
Expand Down
25 changes: 18 additions & 7 deletions llama/addon.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -289,6 +289,10 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
model_params.use_mlock = options.Get("useMlock").As<Napi::Boolean>().Value();
}

if (options.Has("checkTensors")) {
model_params.check_tensors = options.Get("checkTensors").As<Napi::Boolean>().Value();
}

if (options.Has("onLoadProgress")) {
auto onLoadProgressJSCallback = options.Get("onLoadProgress").As<Napi::Function>();
if (onLoadProgressJSCallback.IsFunction()) {
Expand Down Expand Up @@ -1483,6 +1487,11 @@ class AddonContextSampleTokenWorker : public Napi::AsyncWorker {
llama_token new_token_id = 0;

// Select the best prediction.
if (llama_get_logits(ctx->ctx) == nullptr) {
SetError("This model does not support token generation");
return;
}

auto logits = llama_get_logits_ith(ctx->ctx, batchLogitIndex);
auto n_vocab = llama_n_vocab(ctx->model->model);

Expand Down Expand Up @@ -1701,13 +1710,15 @@ static void addonLlamaCppLogCallback(ggml_log_level level, const char* text, voi
}
}

if (level == 2) {
fputs(text, stderr);
fflush(stderr);
} else {
fputs(text, stdout);
fflush(stdout);
}
if (text != nullptr) {
if (level == 2) {
fputs(text, stderr);
fflush(stderr);
} else {
fputs(text, stdout);
fflush(stdout);
}
}
}

Napi::Value setLogger(const Napi::CallbackInfo& info) {
Expand Down
Loading

0 comments on commit 453c162

Please sign in to comment.