From a309b30b726bc905f886ee0d02439114273d541b Mon Sep 17 00:00:00 2001 From: Simon Klinkert Date: Tue, 5 Nov 2024 12:25:00 +0100 Subject: [PATCH] Implement perplexity agent tool This PR enables AI Agents to use perplexity AI in order to retrieve data from the web. --- tools/perplexity/README.md | 62 ++++++++++++++++++++++++++++++ tools/perplexity/perplexity.go | 70 ++++++++++++++++++++++++++++++++++ 2 files changed, 132 insertions(+) create mode 100644 tools/perplexity/README.md create mode 100644 tools/perplexity/perplexity.go diff --git a/tools/perplexity/README.md b/tools/perplexity/README.md new file mode 100644 index 000000000..59628f175 --- /dev/null +++ b/tools/perplexity/README.md @@ -0,0 +1,62 @@ + +# Perplexity Tool Integration for Agents + +Use perplexity in your AI Agent to enrich it with data from the web. + +Full code example: + +```go +package main + +import ( + "context" + "fmt" + "os" + + "github.com/tmc/langchaingo/agents" + "github.com/tmc/langchaingo/callbacks" + "github.com/tmc/langchaingo/chains" + "github.com/tmc/langchaingo/llms/openai" + "github.com/tmc/langchaingo/tools" + "github.com/tmc/langchaingo/tools/perplexity" +) + +func main() { + if err := run(); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } +} + +func run() error { + llm, err := openai.New( + openai.WithModel("gpt-4o-mini"), + openai.WithCallback(callbacks.LogHandler{}), + ) + if err != nil { + return err + } + + perpl, err := perplexity.NewPerplexity(perplexity.ModelLlamaSonarSmall) + if err != nil { + return err + } + + agentTools := []tools.Tool{ + perpl, + } + + agent := agents.NewOneShotAgent(llm, + agentTools, + agents.WithMaxIterations(2), + ) + executor := agents.NewExecutor(agent) + + question := "what's the latest and best LLM on the market at the moment?" + answer, err := chains.Run(context.Background(), executor, question) + + fmt.Println(answer) + + return err +} +``` \ No newline at end of file diff --git a/tools/perplexity/perplexity.go b/tools/perplexity/perplexity.go new file mode 100644 index 000000000..826c25d69 --- /dev/null +++ b/tools/perplexity/perplexity.go @@ -0,0 +1,70 @@ +package perplexity + +import ( + "context" + "fmt" + "os" + + "github.com/tmc/langchaingo/llms" + "github.com/tmc/langchaingo/llms/openai" +) + +type Model string + +// Model pricing overview: https://docs.perplexity.ai/guides/pricing +const ( + ModelLlamaSonarSmall Model = "llama-3.1-sonar-small-128k-online" + ModelLlamaSonarLarge Model = "llama-3.1-sonar-large-128k-online" + ModelLlamaSonarHuge Model = "llama-3.1-sonar-huge-128k-online" +) + +type Perplexity struct { + llm *openai.LLM +} + +func NewPerplexity(model Model) (*Perplexity, error) { + perplexity := &Perplexity{} + var err error + + apiKey := os.Getenv("PERPLEXITY_API_KEY") + if apiKey == "" { + return nil, fmt.Errorf("PERPLEXITY_API_KEY not set") + } + + perplexity.llm, err = openai.New( + openai.WithModel(string(model)), + openai.WithBaseURL("https://api.perplexity.ai"), + openai.WithToken(apiKey), + ) + if err != nil { + return nil, err + } + + return perplexity, nil +} + +func (p *Perplexity) Name() string { + return "PerplexityAI" +} + +func (p *Perplexity) Description() string { + return "Perplexity AI has access to a wide range of information, as it functions as an AI-powered search engine that indexes, analyzes, and summarizes content from across the internet." +} + +func (p *Perplexity) Call(ctx context.Context, input string) (string, error) { + content := []llms.MessageContent{ + llms.TextParts(llms.ChatMessageTypeHuman, input), + } + + var generatedText string + _, err := p.llm.GenerateContent(ctx, content, + llms.WithStreamingFunc(func(_ context.Context, chunk []byte) error { + generatedText += string(chunk) + return nil + })) + if err != nil { + return "", err + } + + return generatedText, nil +}