Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

tools: add perplexity AI #1061

Merged
merged 9 commits into from
Jan 6, 2025
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 62 additions & 0 deletions tools/perplexity/README.md
sklinkert marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@

# Perplexity Tool Integration for Agents

Use perplexity in your AI Agent to enrich it with data from the web.

Full code example:

```go
package main

import (
"context"
"fmt"
"os"

"github.com/tmc/langchaingo/agents"
"github.com/tmc/langchaingo/callbacks"
"github.com/tmc/langchaingo/chains"
"github.com/tmc/langchaingo/llms/openai"
"github.com/tmc/langchaingo/tools"
"github.com/tmc/langchaingo/tools/perplexity"
)

func main() {
if err := run(); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}

func run() error {
llm, err := openai.New(
openai.WithModel("gpt-4o-mini"),
openai.WithCallback(callbacks.LogHandler{}),
)
if err != nil {
return err
}

perpl, err := perplexity.NewPerplexity(perplexity.ModelLlamaSonarSmall)
if err != nil {
return err
}

agentTools := []tools.Tool{
perpl,
}

agent := agents.NewOneShotAgent(llm,
agentTools,
agents.WithMaxIterations(2),
)
executor := agents.NewExecutor(agent)

question := "what's the latest and best LLM on the market at the moment?"
answer, err := chains.Run(context.Background(), executor, question)

fmt.Println(answer)

return err
}
```
70 changes: 70 additions & 0 deletions tools/perplexity/perplexity.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
package perplexity

import (
"context"
"fmt"
"os"

"github.com/tmc/langchaingo/llms"
"github.com/tmc/langchaingo/llms/openai"
)

type Model string

// Model pricing overview: https://docs.perplexity.ai/guides/pricing
const (
ModelLlamaSonarSmall Model = "llama-3.1-sonar-small-128k-online"
ModelLlamaSonarLarge Model = "llama-3.1-sonar-large-128k-online"
ModelLlamaSonarHuge Model = "llama-3.1-sonar-huge-128k-online"
)

type Perplexity struct {
sklinkert marked this conversation as resolved.
Show resolved Hide resolved
llm *openai.LLM
}

sklinkert marked this conversation as resolved.
Show resolved Hide resolved
func NewPerplexity(model Model) (*Perplexity, error) {
sklinkert marked this conversation as resolved.
Show resolved Hide resolved
perplexity := &Perplexity{}
var err error

apiKey := os.Getenv("PERPLEXITY_API_KEY")
sklinkert marked this conversation as resolved.
Show resolved Hide resolved
if apiKey == "" {
return nil, fmt.Errorf("PERPLEXITY_API_KEY not set")
}

perplexity.llm, err = openai.New(
FluffyKebab marked this conversation as resolved.
Show resolved Hide resolved
openai.WithModel(string(model)),
openai.WithBaseURL("https://api.perplexity.ai"),
openai.WithToken(apiKey),
)
if err != nil {
return nil, err
}

return perplexity, nil
}

func (p *Perplexity) Name() string {
return "PerplexityAI"
}

func (p *Perplexity) Description() string {
return "Perplexity AI has access to a wide range of information, as it functions as an AI-powered search engine that indexes, analyzes, and summarizes content from across the internet."
}

func (p *Perplexity) Call(ctx context.Context, input string) (string, error) {
content := []llms.MessageContent{
llms.TextParts(llms.ChatMessageTypeHuman, input),
}

var generatedText string
_, err := p.llm.GenerateContent(ctx, content,
llms.WithStreamingFunc(func(_ context.Context, chunk []byte) error {
generatedText += string(chunk)
return nil
}))
if err != nil {
return "", err
}

return generatedText, nil
}
55 changes: 55 additions & 0 deletions tools/perplexity/perplexity_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
package perplexity

import (
"context"
"os"
"strings"
"testing"

"github.com/tmc/langchaingo/agents"
"github.com/tmc/langchaingo/chains"
"github.com/tmc/langchaingo/llms/openai"
"github.com/tmc/langchaingo/tools"
)

func TestRun(t *testing.T) {
t.Parallel()

if os.Getenv("PERPLEXITY_API_KEY") == "" {
t.Skip("PERPLEXITY_API_KEY not set")
}
if os.Getenv("OPENAI_API_KEY") == "" {
t.Skip("OPENAI_API_KEY not set")
}

llm, err := openai.New()
if err != nil {
sklinkert marked this conversation as resolved.
Show resolved Hide resolved
t.Fatalf("failed to create LLM: %v", err)
}

perpl, err := NewPerplexity(ModelLlamaSonarSmall)
if err != nil {
t.Fatalf("failed to create Perplexity tool: %v", err)
}

agentTools := []tools.Tool{
sklinkert marked this conversation as resolved.
Show resolved Hide resolved
perpl,
}

agent := agents.NewOneShotAgent(llm,
agentTools,
agents.WithMaxIterations(1),
)
executor := agents.NewExecutor(agent)

question := "what is the largest country in the world by total area?"
answer, err := chains.Run(context.Background(), executor, question)
if err != nil {
t.Fatalf("failed to run chains: %v", err)
}

const expectedAnswer = "Russia"
if !strings.Contains(answer, expectedAnswer) {
t.Errorf("expected answer to contain %q, got %q", expectedAnswer, answer)
}
}