diff --git a/.gitignore b/.gitignore index 663d0bcb..e0123285 100644 --- a/.gitignore +++ b/.gitignore @@ -19,4 +19,5 @@ bin/ llama.cpp/ whisper.cpp/ -*/.hugo_build.lock \ No newline at end of file +*/.hugo_build.lock +docs/public/ \ No newline at end of file diff --git a/assistant/assistant.go b/assistant/assistant.go index 87adb93d..e591ebd1 100644 --- a/assistant/assistant.go +++ b/assistant/assistant.go @@ -2,6 +2,7 @@ package assistant import ( "context" + "fmt" "strings" obs "github.com/henomis/lingoose/observer" @@ -22,11 +23,16 @@ type observer interface { SpanEnd(s *obs.Span) (*obs.Span, error) } +const ( + DefaultMaxIterations = 3 +) + type Assistant struct { - llm LLM - rag RAG - thread *thread.Thread - parameters Parameters + llm LLM + rag RAG + thread *thread.Thread + parameters Parameters + maxIterations uint } type LLM interface { @@ -48,6 +54,7 @@ func New(llm LLM) *Assistant { CompanyName: defaultCompanyName, CompanyDescription: defaultCompanyDescription, }, + maxIterations: DefaultMaxIterations, } return assistant @@ -83,6 +90,33 @@ func (a *Assistant) Run(ctx context.Context) error { if errGenerate != nil { return errGenerate } + } else { + a.injectSystemMessage() + } + + for i := 0; i < int(a.maxIterations); i++ { + err = a.runIteration(ctx, i) + if err != nil { + return err + } + + if a.thread.LastMessage().Role != thread.RoleTool { + break + } + } + + err = a.stopObserveSpan(ctx, spanAssistant) + if err != nil { + return err + } + + return nil +} + +func (a *Assistant) runIteration(ctx context.Context, iteration int) error { + ctx, spanIteration, err := a.startObserveSpan(ctx, fmt.Sprintf("iteration-%d", iteration+1)) + if err != nil { + return err } err = a.llm.Generate(ctx, a.thread) @@ -90,7 +124,7 @@ func (a *Assistant) Run(ctx context.Context) error { return err } - err = a.stopObserveSpan(ctx, spanAssistant) + err = a.stopObserveSpan(ctx, spanIteration) if err != nil { return err } @@ -123,7 +157,7 @@ func (a *Assistant) generateRAGMessage(ctx context.Context) error { a.thread.AddMessage(thread.NewSystemMessage().AddContent( thread.NewTextContent( - systemRAGPrompt, + systemPrompt, ).Format( types.M{ "assistantName": a.parameters.AssistantName, @@ -147,6 +181,11 @@ func (a *Assistant) generateRAGMessage(ctx context.Context) error { return nil } +func (a *Assistant) WithMaxIterations(maxIterations uint) *Assistant { + a.maxIterations = maxIterations + return a +} + func (a *Assistant) startObserveSpan(ctx context.Context, name string) (context.Context, *obs.Span, error) { o, ok := obs.ContextValueObserverInstance(ctx).(observer) if o == nil || !ok { @@ -183,3 +222,27 @@ func (a *Assistant) stopObserveSpan(ctx context.Context, span *obs.Span) error { _, err := o.SpanEnd(span) return err } + +func (a *Assistant) injectSystemMessage() { + for _, message := range a.thread.Messages { + if message.Role == thread.RoleSystem { + return + } + } + + systemMessage := thread.NewSystemMessage().AddContent( + thread.NewTextContent( + systemPrompt, + ).Format( + types.M{ + "assistantName": a.parameters.AssistantName, + "assistantIdentity": a.parameters.AssistantIdentity, + "assistantScope": a.parameters.AssistantScope, + "companyName": a.parameters.CompanyName, + "companyDescription": a.parameters.CompanyDescription, + }, + ), + ) + + a.thread.Messages = append([]*thread.Message{systemMessage}, a.thread.Messages...) +} diff --git a/assistant/prompt.go b/assistant/prompt.go index 18a4f8fe..c6f14b08 100644 --- a/assistant/prompt.go +++ b/assistant/prompt.go @@ -4,7 +4,7 @@ const ( //nolint:lll baseRAGPrompt = "Use the following pieces of retrieved context to answer the question.\n\nQuestion: {{.question}}\nContext:\n{{range .results}}{{.}}\n\n{{end}}" //nolint:lll - systemRAGPrompt = "You name is {{.assistantName}}, and you are {{.assistantIdentity}} {{if ne .companyName \"\" }}at {{.companyName}}{{end}}{{if ne .companyDescription \"\" }}, {{.companyDescription}}{{end}}. Your task is to assist humans {{.assistantScope}}." + systemPrompt = "{{if ne .assistantName \"\"}}You name is {{.assistantName}}, {{end}}{{if ne .assistantIdentity \"\"}}you are {{.assistantIdentity}}.{{end}} {{if ne .companyName \"\" }}at {{.companyName}}{{end}}{{if ne .companyDescription \"\" }}, {{.companyDescription}}.{{end}} Your task is to assist humans {{.assistantScope}}." defaultAssistantName = "AI assistant" defaultAssistantIdentity = "a helpful and polite assistant" diff --git a/docs/content/reference/assistant.md b/docs/content/reference/assistant.md index a7f95357..f66b1c31 100644 --- a/docs/content/reference/assistant.md +++ b/docs/content/reference/assistant.md @@ -31,4 +31,37 @@ if err != nil { fmt.Println(myAssistant.Thread()) ``` -We can define the LinGoose `Assistant` as a `Thread` runner with an optional `RAG` component that will help to produce the response. \ No newline at end of file +We can define the LinGoose `Assistant` as a `Thread` runner with an optional `RAG` component that will help to produce the response. + +## Assistant as Agent + +The `Assistant` can be used as an agent in a conversation. It can be used to automate tasks, answer questions, and provide information. + +```go +auto := "auto" +myAgent := assistant.New( + openai.New().WithModel(openai.GPT4o).WithToolChoice(&auto).WithTools( + pythontool.New(), + serpapitool.New(), + ), +).WithParameters( + assistant.Parameters{ + AssistantName: "AI Assistant", + AssistantIdentity: "an helpful assistant", + AssistantScope: "with their questions.", + CompanyName: "", + CompanyDescription: "", + }, +).WithThread( + thread.New().AddMessages( + thread.NewUserMessage().AddContent( + thread.NewTextContent("calculate the average temperature in celsius degrees of New York, Rome, and Tokyo."), + ), + ), +).WithMaxIterations(10) + +err := myAgent.Run(context.Background()) +if err != nil { + panic(err) +} +``` \ No newline at end of file diff --git a/docs/content/reference/examples.md b/docs/content/reference/examples.md index 7a48602c..18a600cc 100644 --- a/docs/content/reference/examples.md +++ b/docs/content/reference/examples.md @@ -2,7 +2,7 @@ title: "LinGoose Examples" description: linkTitle: "Examples" -menu: { main: { parent: 'reference', weight: -88 } } +menu: { main: { parent: 'reference', weight: -87 } } --- LinGoose provides a number of examples to help you get started with building your own AI app. You can use these examples as a reference to understand how to build your own assistant. diff --git a/docs/content/reference/linglet.md b/docs/content/reference/linglet.md index f113e0be..16f7c90f 100644 --- a/docs/content/reference/linglet.md +++ b/docs/content/reference/linglet.md @@ -2,7 +2,7 @@ title: "LinGoose Linglets" description: linkTitle: "Linglets" -menu: { main: { parent: 'reference', weight: -89 } } +menu: { main: { parent: 'reference', weight: -88 } } --- Linglets are pre-built LinGoose Assistants with a specific purpose. They are designed to be used as a starting point for building your own AI app. You can use them as a reference to understand how to build your own assistant. diff --git a/docs/content/reference/observer.md b/docs/content/reference/observer.md index 2202f3a4..0c6d937e 100644 --- a/docs/content/reference/observer.md +++ b/docs/content/reference/observer.md @@ -1,5 +1,5 @@ --- -title: "Observer" +title: "Observe and Analyze LLM Applications" description: linkTitle: "Observer" menu: { main: { parent: 'reference', weight: -92 } } diff --git a/docs/content/reference/tool.md b/docs/content/reference/tool.md new file mode 100644 index 00000000..279968dc --- /dev/null +++ b/docs/content/reference/tool.md @@ -0,0 +1,52 @@ +--- +title: "Performing tasks with Tools" +description: +linkTitle: "Tool" +menu: { main: { parent: 'reference', weight: -89 } } +--- + +Tools are components that can be used to perform specific tasks. They can be used to automate, answer questions, and provide information. LinGoose offers a variety of tools that can be used to perform different actions. + +## Available Tools + +- *Python*: It can be used to run Python code and get the output. +- *SerpApi*: It can be used to get search results from Google and other search engines. +- *Dall-e*: It can be used to generate images based on text descriptions. +- *DuckDuckGo*: It can be used to get search results from DuckDuckGo. +- *RAG*: It can be used to retrieve relevant documents based on a query. +- *LLM*: It can be used to generate text based on a prompt. +- *Shell*: It can be used to run shell commands and get the output. + + +## Using Tools + +LinGoose tools can be used to perform specific tasks. Here is an example of using the `Python` and `serpapi` tools to get information and run Python code and get the output. + +```go +auto := "auto" +myAgent := assistant.New( + openai.New().WithModel(openai.GPT4o).WithToolChoice(&auto).WithTools( + pythontool.New(), + serpapitool.New(), + ), +).WithParameters( + assistant.Parameters{ + AssistantName: "AI Assistant", + AssistantIdentity: "an helpful assistant", + AssistantScope: "with their questions.", + CompanyName: "", + CompanyDescription: "", + }, +).WithThread( + thread.New().AddMessages( + thread.NewUserMessage().AddContent( + thread.NewTextContent("calculate the average temperature in celsius degrees of New York, Rome, and Tokyo."), + ), + ), +).WithMaxIterations(10) + +err := myAgent.Run(context.Background()) +if err != nil { + panic(err) +} +``` \ No newline at end of file diff --git a/embedder/llamacpp/llamacpp.go b/embedder/llamacpp/llamacpp.go index c7d7868a..e8744576 100644 --- a/embedder/llamacpp/llamacpp.go +++ b/embedder/llamacpp/llamacpp.go @@ -2,10 +2,10 @@ package llamacppembedder import ( "context" + "encoding/json" + "errors" "os" "os/exec" - "strconv" - "strings" "github.com/henomis/lingoose/embedder" ) @@ -16,6 +16,16 @@ type LlamaCppEmbedder struct { modelPath string } +type output struct { + Object string `json:"object"` + Data []data `json:"data"` +} +type data struct { + Object string `json:"object"` + Index int `json:"index"` + Embedding []float64 `json:"embedding"` +} + func New() *LlamaCppEmbedder { return &LlamaCppEmbedder{ llamacppPath: "./llama.cpp/embedding", @@ -61,7 +71,7 @@ func (l *LlamaCppEmbedder) embed(ctx context.Context, text string) (embedder.Emb return nil, err } - llamacppArgs := []string{"-m", l.modelPath, "-p", text} + llamacppArgs := []string{"-m", l.modelPath, "--embd-output-format", "json", "-p", text} llamacppArgs = append(llamacppArgs, l.llamacppArgs...) //nolint:gosec @@ -74,14 +84,15 @@ func (l *LlamaCppEmbedder) embed(ctx context.Context, text string) (embedder.Emb } func parseEmbeddings(str string) (embedder.Embedding, error) { - strSlice := strings.Split(strings.TrimSpace(str), " ") - floatSlice := make([]float64, len(strSlice)) - for i, s := range strSlice { - f, err := strconv.ParseFloat(s, 64) - if err != nil { - return nil, err - } - floatSlice[i] = f + var out output + err := json.Unmarshal([]byte(str), &out) + if err != nil { + return nil, err } - return floatSlice, nil + + if len(out.Data) != 1 { + return nil, errors.New("no embeddings found") + } + + return out.Data[0].Embedding, nil } diff --git a/embedder/ollama/api.go b/embedder/ollama/api.go index b0233112..5e6be62d 100644 --- a/embedder/ollama/api.go +++ b/embedder/ollama/api.go @@ -34,6 +34,7 @@ func (r *request) ContentType() string { type response struct { HTTPStatusCode int `json:"-"` acceptContentType string `json:"-"` + RawBody []byte `json:"-"` Embedding []float64 `json:"embedding"` CreatedAt string `json:"created_at"` } @@ -46,7 +47,13 @@ func (r *response) Decode(body io.Reader) error { return json.NewDecoder(body).Decode(r) } -func (r *response) SetBody(_ io.Reader) error { +func (r *response) SetBody(body io.Reader) error { + rawBody, err := io.ReadAll(body) + if err != nil { + return err + } + + r.RawBody = rawBody return nil } diff --git a/embedder/ollama/ollama.go b/embedder/ollama/ollama.go index 54379303..b15766e4 100644 --- a/embedder/ollama/ollama.go +++ b/embedder/ollama/ollama.go @@ -2,6 +2,9 @@ package ollamaembedder import ( "context" + "errors" + "fmt" + "net/http" "github.com/henomis/restclientgo" @@ -14,6 +17,14 @@ const ( defaultEndpoint = "http://localhost:11434/api" ) +type OllamaEmbedError struct { + Err error +} + +func (e *OllamaEmbedError) Error() string { + return fmt.Sprintf("Error embedding text: %v", e.Err) +} + type Embedder struct { model string restClient *restclientgo.RestClient @@ -88,5 +99,11 @@ func (e *Embedder) embed(ctx context.Context, text string) (embedder.Embedding, return nil, err } + if resp.HTTPStatusCode >= http.StatusBadRequest { + return nil, &OllamaEmbedError{ + Err: errors.New(string(resp.RawBody)), + } + } + return resp.Embedding, nil } diff --git a/examples/assistant/agent/main.go b/examples/assistant/agent/main.go new file mode 100644 index 00000000..96441d56 --- /dev/null +++ b/examples/assistant/agent/main.go @@ -0,0 +1,59 @@ +package main + +import ( + "context" + "fmt" + + "github.com/henomis/lingoose/assistant" + "github.com/henomis/lingoose/llm/openai" + "github.com/henomis/lingoose/observer" + "github.com/henomis/lingoose/observer/langfuse" + "github.com/henomis/lingoose/thread" + + humantool "github.com/henomis/lingoose/tool/human" + pythontool "github.com/henomis/lingoose/tool/python" + serpapitool "github.com/henomis/lingoose/tool/serpapi" +) + +func main() { + ctx := context.Background() + + langfuseObserver := langfuse.New(ctx) + trace, err := langfuseObserver.Trace(&observer.Trace{Name: "Italian guests calculator"}) + if err != nil { + panic(err) + } + + ctx = observer.ContextWithObserverInstance(ctx, langfuseObserver) + ctx = observer.ContextWithTraceID(ctx, trace.ID) + + auto := "auto" + myAssistant := assistant.New( + openai.New().WithModel(openai.GPT4o).WithToolChoice(&auto).WithTools( + pythontool.New(), + serpapitool.New(), + humantool.New(), + ), + ).WithParameters( + assistant.Parameters{ + AssistantName: "AI Assistant", + AssistantIdentity: "a helpful assistant", + AssistantScope: "answering questions", + }, + ).WithThread( + thread.New().AddMessages( + thread.NewUserMessage().AddContent( + thread.NewTextContent("search the top 3 italian dishes and then their costs, then ask the user's budget in euros and calculate how many guests can be invited for each dish"), + ), + ), + ).WithMaxIterations(10) + + err = myAssistant.Run(ctx) + if err != nil { + panic(err) + } + + fmt.Println(myAssistant.Thread()) + + langfuseObserver.Flush(ctx) +} diff --git a/examples/assistant/main.go b/examples/assistant/rag/main.go similarity index 100% rename from examples/assistant/main.go rename to examples/assistant/rag/main.go diff --git a/examples/llm/openai/thread/main.go b/examples/llm/openai/thread/main.go index 5180349c..7bc462f0 100644 --- a/examples/llm/openai/thread/main.go +++ b/examples/llm/openai/thread/main.go @@ -2,11 +2,12 @@ package main import ( "context" + "encoding/json" "fmt" - "strings" "github.com/henomis/lingoose/llm/openai" "github.com/henomis/lingoose/thread" + "github.com/henomis/lingoose/tool/dalle" "github.com/henomis/lingoose/transformer" ) @@ -32,15 +33,7 @@ func newStr(str string) *string { func main() { openaillm := openai.New().WithModel(openai.GPT4o) - openaillm.WithToolChoice(newStr("auto")) - err := openaillm.BindFunction( - crateImage, - "createImage", - "use this function to create an image from a description", - ) - if err != nil { - panic(err) - } + openaillm.WithToolChoice(newStr("auto")).WithTools(dalle.New()) t := thread.New().AddMessage( thread.NewUserMessage().AddContent( @@ -48,15 +41,22 @@ func main() { ), ) - err = openaillm.Generate(context.Background(), t) + err := openaillm.Generate(context.Background(), t) if err != nil { panic(err) } if t.LastMessage().Role == thread.RoleTool { + var output dalle.Output + + err = json.Unmarshal([]byte(t.LastMessage().Contents[0].AsToolResponseData().Result), &output) + if err != nil { + panic(err) + } + t.AddMessage(thread.NewUserMessage().AddContent( thread.NewImageContentFromURL( - strings.ReplaceAll(t.LastMessage().Contents[0].AsToolResponseData().Result, `"`, ""), + output.ImageURL, ), ).AddContent( thread.NewTextContent("can you describe the image?"), diff --git a/examples/llm/openai/tools/python/main.go b/examples/llm/openai/tools/python/main.go new file mode 100644 index 00000000..71a8d580 --- /dev/null +++ b/examples/llm/openai/tools/python/main.go @@ -0,0 +1,32 @@ +package main + +import ( + "context" + "fmt" + + "github.com/henomis/lingoose/llm/openai" + "github.com/henomis/lingoose/thread" + "github.com/henomis/lingoose/tool/python" +) + +func main() { + newStr := func(str string) *string { + return &str + } + llm := openai.New().WithModel(openai.GPT3Dot5Turbo0613).WithToolChoice(newStr("auto")).WithTools( + python.New(), + ) + + t := thread.New().AddMessage( + thread.NewUserMessage().AddContent( + thread.NewTextContent("calculate reverse string of 'ailatiditalia', don't try to guess, let's use appropriate tool"), + ), + ) + + llm.Generate(context.Background(), t) + if t.LastMessage().Role == thread.RoleTool { + llm.Generate(context.Background(), t) + } + + fmt.Println(t) +} diff --git a/examples/llm/openai/tools/rag/main.go b/examples/llm/openai/tools/rag/main.go new file mode 100644 index 00000000..30010b5b --- /dev/null +++ b/examples/llm/openai/tools/rag/main.go @@ -0,0 +1,66 @@ +package main + +import ( + "context" + "fmt" + "os" + + openaiembedder "github.com/henomis/lingoose/embedder/openai" + "github.com/henomis/lingoose/index" + "github.com/henomis/lingoose/index/vectordb/jsondb" + "github.com/henomis/lingoose/llm/openai" + "github.com/henomis/lingoose/rag" + "github.com/henomis/lingoose/thread" + ragtool "github.com/henomis/lingoose/tool/rag" + "github.com/henomis/lingoose/tool/serpapi" + "github.com/henomis/lingoose/tool/shell" +) + +func main() { + + rag := rag.New( + index.New( + jsondb.New().WithPersist("index.json"), + openaiembedder.New(openaiembedder.AdaEmbeddingV2), + ), + ).WithChunkSize(1000).WithChunkOverlap(0) + + _, err := os.Stat("index.json") + if os.IsNotExist(err) { + err = rag.AddSources(context.Background(), "state_of_the_union.txt") + if err != nil { + panic(err) + } + } + + newStr := func(str string) *string { + return &str + } + llm := openai.New().WithModel(openai.GPT4o).WithToolChoice(newStr("auto")).WithTools( + ragtool.New(rag, "US covid vaccines"), + serpapi.New(), + shell.New(), + ) + + topics := []string{ + "how many covid vaccine doses US has donated to other countries.", + "who's the author of LinGoose github project.", + "which process is consuming the most memory.", + } + + for _, topic := range topics { + t := thread.New().AddMessage( + thread.NewUserMessage().AddContent( + thread.NewTextContent("Please tell me " + topic), + ), + ) + + llm.Generate(context.Background(), t) + if t.LastMessage().Role == thread.RoleTool { + llm.Generate(context.Background(), t) + } + + fmt.Println(t) + } + +} diff --git a/examples/observer/assistant/main.go b/examples/observer/assistant/main.go index 9e744f9d..f5880c09 100644 --- a/examples/observer/assistant/main.go +++ b/examples/observer/assistant/main.go @@ -71,4 +71,6 @@ func main() { fmt.Println("----") fmt.Println(a.Thread()) fmt.Println("----") + + o.Flush(ctx) } diff --git a/examples/observer/langfuse/main.go b/examples/observer/langfuse/main.go index 6de4022c..80c46d49 100644 --- a/examples/observer/langfuse/main.go +++ b/examples/observer/langfuse/main.go @@ -84,12 +84,14 @@ func main() { panic(err) } - generation.Output = &thread.Message{ - Role: thread.RoleAssistant, - Contents: []*thread.Content{ - { - Type: thread.ContentTypeText, - Data: "The Q3 OKRs contain goals for multiple teams...", + generation.Output = []*thread.Message{ + { + Role: thread.RoleAssistant, + Contents: []*thread.Content{ + { + Type: thread.ContentTypeText, + Data: "The Q3 OKRs contain goals for multiple teams...", + }, }, }, } diff --git a/examples/tools/duckduckgo/main.go b/examples/tools/duckduckgo/main.go new file mode 100644 index 00000000..fbcfd0a8 --- /dev/null +++ b/examples/tools/duckduckgo/main.go @@ -0,0 +1,15 @@ +package main + +import ( + "fmt" + + "github.com/henomis/lingoose/tool/duckduckgo" +) + +func main() { + + t := duckduckgo.New().WithMaxResults(5) + f := t.Fn().(duckduckgo.FnPrototype) + + fmt.Println(f(duckduckgo.Input{Query: "Simone Vellei"})) +} diff --git a/examples/tools/python/main.go b/examples/tools/python/main.go new file mode 100644 index 00000000..3eee0b30 --- /dev/null +++ b/examples/tools/python/main.go @@ -0,0 +1,16 @@ +package main + +import ( + "fmt" + + "github.com/henomis/lingoose/tool/python" +) + +func main() { + t := python.New().WithPythonPath("python3") + + pythonScript := `print("Hello from Python!")` + f := t.Fn().(python.FnPrototype) + + fmt.Println(f(python.Input{PythonCode: pythonScript})) +} diff --git a/examples/tools/serpapi/main.go b/examples/tools/serpapi/main.go new file mode 100644 index 00000000..55ca5163 --- /dev/null +++ b/examples/tools/serpapi/main.go @@ -0,0 +1,15 @@ +package main + +import ( + "fmt" + + "github.com/henomis/lingoose/tool/serpapi" +) + +func main() { + + t := serpapi.New() + f := t.Fn().(serpapi.FnPrototype) + + fmt.Println(f(serpapi.Input{Query: "Simone Vellei"})) +} diff --git a/examples/tools/shell/main.go b/examples/tools/shell/main.go new file mode 100644 index 00000000..86cb39a9 --- /dev/null +++ b/examples/tools/shell/main.go @@ -0,0 +1,16 @@ +package main + +import ( + "fmt" + + "github.com/henomis/lingoose/tool/shell" +) + +func main() { + t := shell.New() + + bashScript := `echo "Hello from $SHELL!"` + f := t.Fn().(shell.FnPrototype) + + fmt.Println(f(shell.Input{BashScript: bashScript})) +} diff --git a/go.mod b/go.mod index 25c30a82..4131f84e 100644 --- a/go.mod +++ b/go.mod @@ -15,6 +15,7 @@ require ( github.com/henomis/restclientgo v1.2.0 github.com/invopop/jsonschema v0.7.0 github.com/sashabaranov/go-openai v1.24.0 + golang.org/x/net v0.25.0 ) require ( diff --git a/go.sum b/go.sum index e2d3391c..a79137ce 100644 --- a/go.sum +++ b/go.sum @@ -38,6 +38,8 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= diff --git a/llm/antropic/antropic.go b/llm/antropic/antropic.go index 648bf521..a4a81e70 100644 --- a/llm/antropic/antropic.go +++ b/llm/antropic/antropic.go @@ -171,7 +171,7 @@ func (o *Antropic) Generate(ctx context.Context, t *thread.Thread) error { return err } - err = o.stopObserveGeneration(ctx, generation, t) + err = o.stopObserveGeneration(ctx, generation, []*thread.Message{t.LastMessage()}) if err != nil { return fmt.Errorf("%w: %w", ErrAnthropicChat, err) } @@ -281,11 +281,11 @@ func (o *Antropic) startObserveGeneration(ctx context.Context, t *thread.Thread) func (o *Antropic) stopObserveGeneration( ctx context.Context, generation *observer.Generation, - t *thread.Thread, + messagges []*thread.Message, ) error { return llmobserver.StopObserveGeneration( ctx, generation, - t, + messagges, ) } diff --git a/llm/cohere/cohere.go b/llm/cohere/cohere.go index 4966f9af..1c53a04e 100644 --- a/llm/cohere/cohere.go +++ b/llm/cohere/cohere.go @@ -233,7 +233,7 @@ func (c *Cohere) Generate(ctx context.Context, t *thread.Thread) error { return err } - err = c.stopObserveGeneration(ctx, generation, t) + err = c.stopObserveGeneration(ctx, generation, []*thread.Message{t.LastMessage()}) if err != nil { return fmt.Errorf("%w: %w", ErrCohereChat, err) } @@ -309,11 +309,11 @@ func (c *Cohere) startObserveGeneration(ctx context.Context, t *thread.Thread) ( func (c *Cohere) stopObserveGeneration( ctx context.Context, generation *observer.Generation, - t *thread.Thread, + messages []*thread.Message, ) error { return llmobserver.StopObserveGeneration( ctx, generation, - t, + messages, ) } diff --git a/llm/observer/observer.go b/llm/observer/observer.go index b5ddfbcb..b54ad7a9 100644 --- a/llm/observer/observer.go +++ b/llm/observer/observer.go @@ -47,7 +47,7 @@ func StartObserveGeneration( func StopObserveGeneration( ctx context.Context, generation *observer.Generation, - t *thread.Thread, + messages []*thread.Message, ) error { o, ok := observer.ContextValueObserverInstance(ctx).(LLMObserver) if o == nil || !ok { @@ -55,7 +55,7 @@ func StopObserveGeneration( return nil } - generation.Output = t.LastMessage() + generation.Output = messages _, err := o.GenerationEnd(generation) return err } diff --git a/llm/ollama/ollama.go b/llm/ollama/ollama.go index 51e9c61f..329446cb 100644 --- a/llm/ollama/ollama.go +++ b/llm/ollama/ollama.go @@ -150,7 +150,7 @@ func (o *Ollama) Generate(ctx context.Context, t *thread.Thread) error { return err } - err = o.stopObserveGeneration(ctx, generation, t) + err = o.stopObserveGeneration(ctx, generation, []*thread.Message{t.LastMessage()}) if err != nil { return fmt.Errorf("%w: %w", ErrOllamaChat, err) } @@ -248,11 +248,11 @@ func (o *Ollama) startObserveGeneration(ctx context.Context, t *thread.Thread) ( func (o *Ollama) stopObserveGeneration( ctx context.Context, generation *observer.Generation, - t *thread.Thread, + messages []*thread.Message, ) error { return llmobserver.StopObserveGeneration( ctx, generation, - t, + messages, ) } diff --git a/llm/openai/function.go b/llm/openai/function.go index 4b9c77bb..deaa12a5 100644 --- a/llm/openai/function.go +++ b/llm/openai/function.go @@ -79,6 +79,25 @@ func (o *OpenAI) BindFunction( return nil } +type Tool interface { + Description() string + Name() string + Fn() any +} + +func (o *OpenAI) WithTools(tools ...Tool) *OpenAI { + for _, tool := range tools { + function, err := bindFunction(tool.Fn(), tool.Name(), tool.Description()) + if err != nil { + fmt.Println(err) + } + + o.functions[tool.Name()] = *function + } + + return o +} + func (o *Legacy) getFunctions() []openai.FunctionDefinition { var functions []openai.FunctionDefinition diff --git a/llm/openai/openai.go b/llm/openai/openai.go index bf6deba0..f0da97b8 100644 --- a/llm/openai/openai.go +++ b/llm/openai/openai.go @@ -202,6 +202,8 @@ func (o *OpenAI) Generate(ctx context.Context, t *thread.Thread) error { return fmt.Errorf("%w: %w", ErrOpenAIChat, err) } + nMessageBeforeGeneration := len(t.Messages) + if o.streamCallbackFn != nil { err = o.stream(ctx, t, chatCompletionRequest) } else { @@ -211,7 +213,7 @@ func (o *OpenAI) Generate(ctx context.Context, t *thread.Thread) error { return err } - err = o.stopObserveGeneration(ctx, generation, t) + err = o.stopObserveGeneration(ctx, generation, t.Messages[nMessageBeforeGeneration:]) if err != nil { return fmt.Errorf("%w: %w", ErrOpenAIChat, err) } @@ -454,11 +456,11 @@ func (o *OpenAI) startObserveGeneration(ctx context.Context, t *thread.Thread) ( func (o *OpenAI) stopObserveGeneration( ctx context.Context, generation *observer.Generation, - t *thread.Thread, + messages []*thread.Message, ) error { return llmobserver.StopObserveGeneration( ctx, generation, - t, + messages, ) } diff --git a/observer/langfuse/formatter.go b/observer/langfuse/formatter.go index 9409178a..8d77a5e0 100644 --- a/observer/langfuse/formatter.go +++ b/observer/langfuse/formatter.go @@ -54,22 +54,77 @@ func threadMessagesToLangfuseMSlice(messages []*thread.Message) []model.M { return mSlice } +func threadOutputMessagesToLangfuseOutput(messages []*thread.Message) any { + if len(messages) == 1 && + messages[0].Role == thread.RoleAssistant && + len(messages[0].Contents) == 1 && + messages[0].Contents[0].Type == thread.ContentTypeText { + return threadMessageToLangfuseM(messages[0]) + } + + toolCalls := model.M{} + toolMessages := []*thread.Message{} + + for _, message := range messages { + if message.Role == thread.RoleAssistant && + message.Contents[0].Type == thread.ContentTypeToolCall { + toolCalls = threadMessageToLangfuseM(message) + } else if message.Role == thread.RoleTool && + message.Contents[0].Type == thread.ContentTypeToolResponse { + toolMessages = append(toolMessages, message) + } + } + + return append([]model.M{toolCalls}, threadMessagesToLangfuseMSlice(toolMessages)...) +} + func threadMessageToLangfuseM(message *thread.Message) model.M { if message == nil { return nil } - messageContent := "" role := message.Role + if message.Role == thread.RoleTool { + data := message.Contents[0].AsToolResponseData() + m := model.M{ + "type": message.Contents[0].Type, + "id": data.ID, + "name": data.Name, + "results": data.Result, + } + + return model.M{ + "role": role, + "content": m, + } + } + + messageContent := "" + m := make([]model.M, 0) for _, content := range message.Contents { if content.Type == thread.ContentTypeText { messageContent += content.AsString() + } else if content.Type == thread.ContentTypeToolCall { + for _, data := range content.AsToolCallData() { + m = append(m, model.M{ + "type": content.Type, + "id": data.ID, + "name": data.Name, + "arguments": data.Arguments, + }) + } } } - return model.M{ + output := model.M{ "role": role, "content": messageContent, } + + if len(m) > 0 { + output["content"] = m + } + + return output } func observerGenerationToLangfuseGeneration(g *observer.Generation) *model.Generation { @@ -81,7 +136,7 @@ func observerGenerationToLangfuseGeneration(g *observer.Generation) *model.Gener Model: g.Model, ModelParameters: g.ModelParameters, Input: threadMessagesToLangfuseMSlice(g.Input), - Output: threadMessageToLangfuseM(g.Output), + Output: threadOutputMessagesToLangfuseOutput(g.Output), Metadata: g.Metadata, } } diff --git a/observer/observer.go b/observer/observer.go index 89fb8680..b3912350 100644 --- a/observer/observer.go +++ b/observer/observer.go @@ -38,7 +38,7 @@ type Generation struct { Model string ModelParameters types.M Input []*thread.Message - Output *thread.Message + Output []*thread.Message Metadata types.M } diff --git a/tool/dalle/dalle.go b/tool/dalle/dalle.go new file mode 100644 index 00000000..03e3cb5e --- /dev/null +++ b/tool/dalle/dalle.go @@ -0,0 +1,56 @@ +package dalle + +import ( + "context" + "fmt" + "time" + + "github.com/henomis/lingoose/transformer" +) + +const ( + defaultTimeoutInSeconds = 60 +) + +type Tool struct { +} + +type Input struct { + Description string `json:"description" jsonschema:"description=the description of the image that should be created"` +} + +type Output struct { + Error string `json:"error,omitempty"` + ImageURL string `json:"imageURL,omitempty"` +} + +type FnPrototype func(Input) Output + +func New() *Tool { + return &Tool{} +} + +func (t *Tool) Name() string { + return "dalle" +} + +func (t *Tool) Description() string { + return "A tool that creates an image from a description." +} + +func (t *Tool) Fn() any { + return t.fn +} + +func (t *Tool) fn(i Input) Output { + ctx, cancel := context.WithTimeout(context.Background(), defaultTimeoutInSeconds*time.Second) + defer cancel() + + d := transformer.NewDallE().WithImageSize(transformer.DallEImageSize512x512) + imageURL, err := d.Transform(ctx, i.Description) + if err != nil { + return Output{Error: fmt.Sprintf("error creating image: %v", err)} + } + + return Output{ImageURL: imageURL.(string)} +} diff --git a/tool/duckduckgo/api.go b/tool/duckduckgo/api.go new file mode 100644 index 00000000..a301c5e7 --- /dev/null +++ b/tool/duckduckgo/api.go @@ -0,0 +1,168 @@ +package duckduckgo + +import ( + "bytes" + "io" + "regexp" + "strings" + + "github.com/henomis/restclientgo" + "golang.org/x/net/html" +) + +const ( + class = "class" +) + +type request struct { + Query string +} + +type response struct { + MaxResults uint + HTTPStatusCode int + RawBody []byte + Results []result +} + +type result struct { + Title string + Info string + URL string +} + +func (r *request) Path() (string, error) { + return "/html/?q=" + r.Query, nil +} + +func (r *request) Encode() (io.Reader, error) { + return nil, nil +} + +func (r *request) ContentType() string { + return "" +} + +func (r *response) Decode(body io.Reader) error { + results, err := r.parseBody(body) + if err != nil { + return err + } + + r.Results = results + return nil +} + +func (r *response) SetBody(body io.Reader) error { + r.RawBody, _ = io.ReadAll(body) + return nil +} + +func (r *response) AcceptContentType() string { + return "text/html" +} + +func (r *response) SetStatusCode(code int) error { + r.HTTPStatusCode = code + return nil +} + +func (r *response) SetHeaders(_ restclientgo.Headers) error { return nil } + +func (r *response) parseBody(body io.Reader) ([]result, error) { + doc, err := html.Parse(body) + if err != nil { + return nil, err + } + ch := make(chan result) + go r.findWebResults(ch, doc) + + results := []result{} + for n := range ch { + results = append(results, n) + } + + return results, nil +} + +func (r *response) findWebResults(ch chan result, doc *html.Node) { + var results uint + var f func(*html.Node) + f = func(n *html.Node) { + if results >= r.MaxResults { + return + } + if n.Type == html.ElementNode && n.Data == "div" { + for _, div := range n.Attr { + if div.Key == class && strings.Contains(div.Val, "web-result") { + info, href := r.findInfo(n) + ch <- result{ + Title: r.findTitle(n), + Info: info, + URL: href, + } + results++ + break + } + } + } + for c := n.FirstChild; c != nil; c = c.NextSibling { + f(c) + } + } + f(doc) + close(ch) +} + +func (r *response) findTitle(n *html.Node) string { + var title string + var f func(*html.Node) + f = func(n *html.Node) { + if n.Type == html.ElementNode && n.Data == "a" { + for _, a := range n.Attr { + if a.Key == class && strings.Contains(a.Val, "result__a") { + title = n.FirstChild.Data + break + } + } + } + for c := n.FirstChild; c != nil; c = c.NextSibling { + f(c) + } + } + f(n) + return title +} + +//nolint:gocognit +func (r *response) findInfo(n *html.Node) (string, string) { + var info string + var link string + var f func(*html.Node) + f = func(n *html.Node) { + if n.Type == html.ElementNode && n.Data == "a" { + for _, a := range n.Attr { + if a.Key == class && strings.Contains(a.Val, "result__snippet") { + var b bytes.Buffer + _ = html.Render(&b, n) + + re := regexp.MustCompile("<.*?>") + info = html.UnescapeString(re.ReplaceAllString(b.String(), "")) + + for _, h := range n.Attr { + if h.Key == "href" { + link = "https:" + h.Val + break + } + } + break + } + } + } + for c := n.FirstChild; c != nil; c = c.NextSibling { + f(c) + } + } + f(n) + return info, link +} diff --git a/tool/duckduckgo/duckduckgo.go b/tool/duckduckgo/duckduckgo.go new file mode 100644 index 00000000..ec374942 --- /dev/null +++ b/tool/duckduckgo/duckduckgo.go @@ -0,0 +1,85 @@ +package duckduckgo + +import ( + "context" + "fmt" + "net/http" + "time" + + "github.com/henomis/restclientgo" +) + +const ( + defaultTimeoutInSeconds = 60 +) + +type Tool struct { + maxResults uint + userAgent string + restClient *restclientgo.RestClient +} + +type Input struct { + Query string `json:"query" jsonschema:"description=the query to search for"` +} + +type Output struct { + Error string `json:"error,omitempty"` + Results []result `json:"results,omitempty"` +} + +type FnPrototype func(Input) Output + +func New() *Tool { + t := &Tool{ + maxResults: 1, + } + + restClient := restclientgo.New("https://html.duckduckgo.com"). + WithRequestModifier( + func(r *http.Request) *http.Request { + r.Header.Add("User-Agent", t.userAgent) + return r + }, + ) + + t.restClient = restClient + return t +} + +func (t *Tool) WithUserAgent(userAgent string) *Tool { + t.userAgent = userAgent + return t +} + +func (t *Tool) WithMaxResults(maxResults uint) *Tool { + t.maxResults = maxResults + return t +} + +func (t *Tool) Name() string { + return "duckduckgo" +} + +func (t *Tool) Description() string { + return "A tool that uses the DuckDuckGo internet search engine for a query." +} + +func (t *Tool) Fn() any { + return t.fn +} + +func (t *Tool) fn(i Input) Output { + ctx, cancel := context.WithTimeout(context.Background(), defaultTimeoutInSeconds*time.Second) + defer cancel() + + req := &request{Query: i.Query} + res := &response{MaxResults: t.maxResults} + + err := t.restClient.Get(ctx, req, res) + if err != nil { + return Output{Error: fmt.Sprintf("failed to search DuckDuckGo: %v", err)} + } + + return Output{Results: res.Results} +} diff --git a/tool/human/human.go b/tool/human/human.go new file mode 100644 index 00000000..9e6f6e55 --- /dev/null +++ b/tool/human/human.go @@ -0,0 +1,44 @@ +package human + +import ( + "fmt" +) + +type Tool struct { +} + +func New() *Tool { + return &Tool{} +} + +type Input struct { + Question string `json:"question" jsonschema:"description=the question to ask the human"` +} + +type Output struct { + Error string `json:"error,omitempty"` + Result string `json:"result,omitempty"` +} + +type FnPrototype = func(Input) Output + +func (t *Tool) Name() string { + return "human" +} + +func (t *Tool) Description() string { + return "A tool that asks a question to a human and returns the answer. Use it to interact with a human." +} + +func (t *Tool) Fn() any { + return t.fn +} + +func (t *Tool) fn(i Input) Output { + var answer string + + fmt.Printf("\n\n%s > ", i.Question) + fmt.Scanln(&answer) + + return Output{Result: answer} +} diff --git a/tool/llm/llm.go b/tool/llm/llm.go new file mode 100644 index 00000000..4f6190cb --- /dev/null +++ b/tool/llm/llm.go @@ -0,0 +1,68 @@ +package llm + +import ( + "context" + "time" + + "github.com/henomis/lingoose/thread" +) + +const ( + defaultTimeoutInMinutes = 6 +) + +type LLM interface { + Generate(context.Context, *thread.Thread) error +} + +type Tool struct { + llm LLM +} + +func New(llm LLM) *Tool { + return &Tool{ + llm: llm, + } +} + +type Input struct { + Query string `json:"query" jsonschema:"description=user query"` +} + +type Output struct { + Error string `json:"error,omitempty"` + Result string `json:"result,omitempty"` +} + +type FnPrototype func(Input) Output + +func (t *Tool) Name() string { + return "llm" +} + +func (t *Tool) Description() string { + return "A tool that uses a language model to generate a response to a user query." +} + +func (t *Tool) Fn() any { + return t.fn +} + +//nolint:gosec +func (t *Tool) fn(i Input) Output { + ctx, cancel := context.WithTimeout(context.Background(), defaultTimeoutInMinutes*time.Minute) + defer cancel() + + th := thread.New().AddMessage( + thread.NewUserMessage().AddContent( + thread.NewTextContent(i.Query), + ), + ) + + err := t.llm.Generate(ctx, th) + if err != nil { + return Output{Error: err.Error()} + } + + return Output{Result: th.LastMessage().Contents[0].AsString()} +} diff --git a/tool/python/python.go b/tool/python/python.go new file mode 100644 index 00000000..c495a852 --- /dev/null +++ b/tool/python/python.go @@ -0,0 +1,77 @@ +package python + +import ( + "bytes" + "fmt" + "os/exec" +) + +type Tool struct { + pythonPath string +} + +func New() *Tool { + return &Tool{ + pythonPath: "python3", + } +} + +func (t *Tool) WithPythonPath(pythonPath string) *Tool { + t.pythonPath = pythonPath + return t +} + +type Input struct { + // nolint:lll + PythonCode string `json:"python_code" jsonschema:"description=python code that uses print() to print the final result to stdout."` +} + +type Output struct { + Error string `json:"error,omitempty"` + Result string `json:"result,omitempty"` +} + +type FnPrototype = func(Input) Output + +func (t *Tool) Name() string { + return "python" +} + +//nolint:lll +func (t *Tool) Description() string { + // nolint:lll + return "Use this tool to solve calculations, manipulate data, or perform any other Python-related tasks. The code should use print() to print the final result to stdout." +} + +func (t *Tool) Fn() any { + return t.fn +} + +//nolint:gosec +func (t *Tool) fn(i Input) Output { + // Create a command to run the Python interpreter with the script. + cmd := exec.Command(t.pythonPath, "-c", i.PythonCode) + + // Create a buffer to capture the output. + var out bytes.Buffer + var stderr bytes.Buffer + cmd.Stdout = &out + cmd.Stderr = &stderr + + // Run the command. + err := cmd.Run() + if err != nil { + return Output{ + Error: fmt.Sprintf("failed to run script: %v, stderr: %v", err, stderr.String()), + } + } + + if out.String() == "" { + return Output{ + Error: "no output from script, script must print the final result to stdout", + } + } + + // Return the output as a string. + return Output{Result: out.String()} +} diff --git a/tool/rag/rag.go b/tool/rag/rag.go new file mode 100644 index 00000000..c46788cf --- /dev/null +++ b/tool/rag/rag.go @@ -0,0 +1,62 @@ +package rag + +import ( + "context" + "strings" + "time" + + "github.com/henomis/lingoose/rag" +) + +const ( + defaultTimeoutInMinutes = 6 +) + +type Tool struct { + rag *rag.RAG + topic string +} + +func New(rag *rag.RAG, topic string) *Tool { + return &Tool{ + rag: rag, + topic: topic, + } +} + +type Input struct { + Query string `json:"rag_query" jsonschema:"description=search query"` +} + +type Output struct { + Error string `json:"error,omitempty"` + Result string `json:"result,omitempty"` +} + +type FnPrototype = func(Input) Output + +func (t *Tool) Name() string { + return "rag" +} + +func (t *Tool) Description() string { + return "A tool that searches information ONLY for this topic: " + t.topic + ". DO NOT use this tool for other topics." +} + +func (t *Tool) Fn() any { + return t.fn +} + +//nolint:gosec +func (t *Tool) fn(i Input) Output { + ctx, cancel := context.WithTimeout(context.Background(), defaultTimeoutInMinutes*time.Minute) + defer cancel() + + results, err := t.rag.Retrieve(ctx, i.Query) + if err != nil { + return Output{Error: err.Error()} + } + + // Return the output as a string. + return Output{Result: strings.Join(results, "\n")} +} diff --git a/tool/serpapi/api.go b/tool/serpapi/api.go new file mode 100644 index 00000000..aa3f4c6f --- /dev/null +++ b/tool/serpapi/api.go @@ -0,0 +1,122 @@ +package serpapi + +import ( + "encoding/json" + "io" + + "github.com/henomis/restclientgo" +) + +type request struct { + Query string + GoogleDomain string + CountryCode string + LanguageCode string + APIKey string +} + +type response struct { + HTTPStatusCode int + Map map[string]interface{} + RawBody []byte + apiResponse apiResponse + Results []result +} + +type apiResponse struct { + AnswerBox map[string]interface{} `json:"answer_box,omitempty"` + SportsResults map[string]interface{} `json:"sports_results,omitempty"` + KnowledgeGraph map[string]interface{} `json:"knowledge_graph,omitempty"` + OrganicResults []OrganicResults `json:"organic_results"` +} + +type Top struct { + Extensions []string `json:"extensions"` +} + +type RichSnippet struct { + Top Top `json:"top"` +} + +type OrganicResults struct { + Position int `json:"position"` + Title string `json:"title"` + Link string `json:"link"` + RedirectLink string `json:"redirect_link"` + DisplayedLink string `json:"displayed_link"` + Thumbnail string `json:"thumbnail,omitempty"` + Favicon string `json:"favicon"` + Snippet string `json:"snippet"` + Source string `json:"source"` + RichSnippet RichSnippet `json:"rich_snippet,omitempty"` + SnippetHighlightedWords []string `json:"snippet_highlighted_words,omitempty"` +} + +type result struct { + Title string + Info string + URL string +} + +func (r *request) Path() (string, error) { + urlValues := restclientgo.NewURLValues() + urlValues.Add("q", &r.Query) + urlValues.Add("api_key", &r.APIKey) + + if r.GoogleDomain != "" { + urlValues.Add("google_domain", &r.GoogleDomain) + } + + if r.CountryCode != "" { + urlValues.Add("gl", &r.CountryCode) + } + + if r.LanguageCode != "" { + urlValues.Add("hl", &r.LanguageCode) + } + + params := urlValues.Encode() + + return "/search?" + params, nil +} + +func (r *request) Encode() (io.Reader, error) { + return nil, nil +} + +func (r *request) ContentType() string { + return "" +} + +func (r *response) Decode(body io.Reader) error { + err := json.NewDecoder(body).Decode(&r.apiResponse) + if err != nil { + return err + } + + for _, res := range r.apiResponse.OrganicResults { + r.Results = append(r.Results, result{ + Title: res.Title, + Info: res.Snippet, + URL: res.Link, + }) + } + + return nil +} + +func (r *response) SetBody(body io.Reader) error { + r.RawBody, _ = io.ReadAll(body) + return nil +} + +func (r *response) AcceptContentType() string { + return "application/json" +} + +func (r *response) SetStatusCode(code int) error { + r.HTTPStatusCode = code + return nil +} + +func (r *response) SetHeaders(_ restclientgo.Headers) error { return nil } diff --git a/tool/serpapi/serpapi.go b/tool/serpapi/serpapi.go new file mode 100644 index 00000000..9637e691 --- /dev/null +++ b/tool/serpapi/serpapi.go @@ -0,0 +1,98 @@ +package serpapi + +import ( + "context" + "fmt" + "os" + "time" + + "github.com/henomis/restclientgo" +) + +const ( + defaultTimeoutInSeconds = 60 +) + +type Tool struct { + restClient *restclientgo.RestClient + googleDomain string + countryCode string + languageCode string + apiKey string +} + +type Input struct { + Query string `json:"query" jsonschema:"description=the query to search for"` +} + +type Output struct { + Error string `json:"error,omitempty"` + Results []result `json:"results,omitempty"` +} + +type FnPrototype = func(Input) Output + +func New() *Tool { + t := &Tool{ + apiKey: os.Getenv("SERPAPI_API_KEY"), + restClient: restclientgo.New("https://serpapi.com"), + googleDomain: "google.com", + countryCode: "us", + languageCode: "en", + } + + return t +} + +func (t *Tool) WithGoogleDomain(googleDomain string) *Tool { + t.googleDomain = googleDomain + return t +} + +func (t *Tool) WithCountryCode(countryCode string) *Tool { + t.countryCode = countryCode + return t +} + +func (t *Tool) WithLanguageCode(languageCode string) *Tool { + t.languageCode = languageCode + return t +} + +func (t *Tool) WithAPIKey(apiKey string) *Tool { + t.apiKey = apiKey + return t +} + +func (t *Tool) Name() string { + return "google" +} + +func (t *Tool) Description() string { + return "A tool that uses the Google internet search engine for a query." +} + +func (t *Tool) Fn() any { + return t.fn +} + +func (t *Tool) fn(i Input) Output { + ctx, cancel := context.WithTimeout(context.Background(), defaultTimeoutInSeconds*time.Second) + defer cancel() + + req := &request{ + Query: i.Query, + GoogleDomain: t.googleDomain, + CountryCode: t.countryCode, + LanguageCode: t.languageCode, + APIKey: t.apiKey, + } + res := &response{} + + err := t.restClient.Get(ctx, req, res) + if err != nil { + return Output{Error: fmt.Sprintf("failed to search serpapi: %v", err)} + } + + return Output{Results: res.Results} +} diff --git a/tool/shell/shell.go b/tool/shell/shell.go new file mode 100644 index 00000000..c5c5e308 --- /dev/null +++ b/tool/shell/shell.go @@ -0,0 +1,91 @@ +package shell + +import ( + "bytes" + "fmt" + "os/exec" +) + +type Tool struct { + shell string + askForConfirm bool +} + +func New() *Tool { + return &Tool{ + shell: "bash", + askForConfirm: true, + } +} + +func (t *Tool) WithShell(shell string) *Tool { + t.shell = shell + return t +} + +func (t *Tool) WithAskForConfirm(askForConfirm bool) *Tool { + t.askForConfirm = askForConfirm + return t +} + +type Input struct { + BashScript string `json:"bash_code" jsonschema:"description=shell script"` +} + +type Output struct { + Error string `json:"error,omitempty"` + Result string `json:"result,omitempty"` +} + +type FnPrototype = func(Input) Output + +func (t *Tool) Name() string { + return "bash" +} + +func (t *Tool) Description() string { + return "A tool that runs a shell script using the " + t.shell + " interpreter. Use it to interact with the OS." +} + +func (t *Tool) Fn() any { + return t.fn +} + +//nolint:gosec +func (t *Tool) fn(i Input) Output { + // Ask for confirmation if the flag is set. + if t.askForConfirm { + fmt.Println("Are you sure you want to run the following script?") + fmt.Println("-------------------------------------------------") + fmt.Println(i.BashScript) + fmt.Println("-------------------------------------------------") + fmt.Print("Type 'yes' to confirm > ") + var confirm string + fmt.Scanln(&confirm) + if confirm != "yes" { + return Output{ + Error: "script execution aborted", + } + } + } + + // Create a command to run the Bash interpreter with the script. + cmd := exec.Command(t.shell, "-c", i.BashScript) + + // Create a buffer to capture the output. + var out bytes.Buffer + var stderr bytes.Buffer + cmd.Stdout = &out + cmd.Stderr = &stderr + + // Run the command. + err := cmd.Run() + if err != nil { + return Output{ + Error: fmt.Sprintf("failed to run script: %v, stderr: %v", err, stderr.String()), + } + } + + // Return the output as a string. + return Output{Result: out.String()} +} diff --git a/tool/tool_router/tool_router.go b/tool/tool_router/tool_router.go new file mode 100644 index 00000000..17d35bcd --- /dev/null +++ b/tool/tool_router/tool_router.go @@ -0,0 +1,84 @@ +package toolrouter + +import ( + "context" + "time" + + "github.com/henomis/lingoose/thread" +) + +const ( + defaultTimeoutInMinutes = 6 +) + +type TTool interface { + Description() string + Name() string + Fn() any +} + +type Tool struct { + llm LLM + tools []TTool +} + +type LLM interface { + Generate(context.Context, *thread.Thread) error +} + +func New(llm LLM, tools ...TTool) *Tool { + return &Tool{ + tools: tools, + llm: llm, + } +} + +type Input struct { + Query string `json:"query" jsonschema:"description=user query"` +} + +type Output struct { + Error string `json:"error,omitempty"` + Result any `json:"result,omitempty"` +} + +type FnPrototype func(Input) Output + +func (t *Tool) Name() string { + return "query_router" +} + +func (t *Tool) Description() string { + return "A tool that select the right tool to answer to user queries." +} + +func (t *Tool) Fn() any { + return t.fn +} + +//nolint:gosec +func (t *Tool) fn(i Input) Output { + ctx, cancel := context.WithTimeout(context.Background(), defaultTimeoutInMinutes*time.Minute) + defer cancel() + + query := "Here's a list of available tools:\n\n" + for _, tool := range t.tools { + query += "Name: " + tool.Name() + "\nDescription: " + tool.Description() + "\n\n" + } + + query += "\nPlease select the right tool that can better answer the query '" + i.Query + + "'. Give me only the name of the tool, nothing else." + + th := thread.New().AddMessage( + thread.NewUserMessage().AddContent( + thread.NewTextContent(query), + ), + ) + + err := t.llm.Generate(ctx, th) + if err != nil { + return Output{Error: err.Error()} + } + + return Output{Result: th.LastMessage().Contents[0].AsString()} +}