From 3095a06dc223f97f17ae426c4ab2b46e13a648e6 Mon Sep 17 00:00:00 2001 From: buhe Date: Tue, 23 Jan 2024 12:04:37 +0800 Subject: [PATCH] "Add llmfarm_core.swift as a remote source control dependency with version 0.8.1." --- Package.resolved | 9 +++++++++ Package.swift | 2 +- Sources/LangChain/llms/Local.swift | 28 +--------------------------- 3 files changed, 11 insertions(+), 28 deletions(-) diff --git a/Package.resolved b/Package.resolved index b5382dd..d353cb0 100644 --- a/Package.resolved +++ b/Package.resolved @@ -63,6 +63,15 @@ "version" : "4.2.2" } }, + { + "identity" : "llmfarm_core.swift", + "kind" : "remoteSourceControl", + "location" : "https://github.com/guinmoon/llmfarm_core.swift", + "state" : { + "revision" : "7226a84a69361c312c997453827c609e613585fd", + "version" : "0.8.1" + } + }, { "identity" : "openai-kit", "kind" : "remoteSourceControl", diff --git a/Package.swift b/Package.swift index ae72d87..2883fb5 100644 --- a/Package.swift +++ b/Package.swift @@ -26,7 +26,7 @@ let package = Package( .package(url: "https://github.com/juyan/swift-filestore", .upToNextMajor(from: "0.5.0")), .package(url: "https://github.com/ZachNagengast/similarity-search-kit.git", from: "0.0.11"), .package(url: "https://github.com/google/generative-ai-swift", .upToNextMajor(from: "0.4.4")), - .package(path: "/Users/guyanhua/code/github/LLMFarm/llmfarm_core.swift/") + .package(url: "https://github.com/guinmoon/llmfarm_core.swift", .upToNextMajor(from: "0.8.1")), ], targets: [ // Targets are the basic building blocks of a package, defining a module or a test suite. diff --git a/Sources/LangChain/llms/Local.swift b/Sources/LangChain/llms/Local.swift index 44b6aaf..13f8965 100644 --- a/Sources/LangChain/llms/Local.swift +++ b/Sources/LangChain/llms/Local.swift @@ -6,39 +6,13 @@ // import Foundation -import llmfarm_core -import llmfarm_core_cpp - public class Local { public init() { } public func _send(text: String, stops: [String] = []) async throws -> LLMResult { - print("Hello.") - var input_text = "State the meaning of life." - var modelInference:ModelInference - var ai = AI(_modelPath: "/Users/guyanhua/llama-2-7b-chat.Q3_K_S.gguf",_chatName: "chat") - modelInference = ModelInference.LLama_gguf - var params:ModelAndContextParams = .default - params.context = 4095 - params.n_threads = 14 - // - params.use_metal = false - - do{ - try ai.loadModel(modelInference,contextParams: params) - var output="" - try ExceptionCather.catchException { - output = try! ai.model.predict(input_text, mainCallback) - } - // llama_save_session_file(ai.model.context,"/Users/guinmoon/dev/alpaca_llama_etc/dump_state.bin",ai.model.session_tokens, ai.model.session_tokens.count) - // llama_save_state(ai.model.context,"/Users/guinmoon/dev/alpaca_llama_etc/dump_state_.bin") - // - print(output) - }catch { - print (error) - } + return LLMResult() }