Skip to content

Commit

Permalink
Implement perplexity agent tool
Browse files Browse the repository at this point in the history
This PR enables AI Agents to use perplexity AI in order to retrieve data
from the web.
  • Loading branch information
sklinkert committed Nov 5, 2024
1 parent 238d1c7 commit a309b30
Show file tree
Hide file tree
Showing 2 changed files with 132 additions and 0 deletions.
62 changes: 62 additions & 0 deletions tools/perplexity/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@

# Perplexity Tool Integration for Agents

Use perplexity in your AI Agent to enrich it with data from the web.

Full code example:

```go
package main

import (
"context"
"fmt"
"os"

"github.com/tmc/langchaingo/agents"
"github.com/tmc/langchaingo/callbacks"
"github.com/tmc/langchaingo/chains"
"github.com/tmc/langchaingo/llms/openai"
"github.com/tmc/langchaingo/tools"
"github.com/tmc/langchaingo/tools/perplexity"
)

func main() {
if err := run(); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}

func run() error {
llm, err := openai.New(
openai.WithModel("gpt-4o-mini"),
openai.WithCallback(callbacks.LogHandler{}),
)
if err != nil {
return err
}

perpl, err := perplexity.NewPerplexity(perplexity.ModelLlamaSonarSmall)
if err != nil {
return err
}

agentTools := []tools.Tool{
perpl,
}

agent := agents.NewOneShotAgent(llm,
agentTools,
agents.WithMaxIterations(2),
)
executor := agents.NewExecutor(agent)

question := "what's the latest and best LLM on the market at the moment?"
answer, err := chains.Run(context.Background(), executor, question)

fmt.Println(answer)

return err
}
```
70 changes: 70 additions & 0 deletions tools/perplexity/perplexity.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
package perplexity

import (
"context"
"fmt"
"os"

"github.com/tmc/langchaingo/llms"
"github.com/tmc/langchaingo/llms/openai"
)

type Model string

// Model pricing overview: https://docs.perplexity.ai/guides/pricing
const (
ModelLlamaSonarSmall Model = "llama-3.1-sonar-small-128k-online"
ModelLlamaSonarLarge Model = "llama-3.1-sonar-large-128k-online"
ModelLlamaSonarHuge Model = "llama-3.1-sonar-huge-128k-online"
)

type Perplexity struct {
llm *openai.LLM
}

func NewPerplexity(model Model) (*Perplexity, error) {
perplexity := &Perplexity{}
var err error

apiKey := os.Getenv("PERPLEXITY_API_KEY")
if apiKey == "" {
return nil, fmt.Errorf("PERPLEXITY_API_KEY not set")
}

perplexity.llm, err = openai.New(
openai.WithModel(string(model)),
openai.WithBaseURL("https://api.perplexity.ai"),
openai.WithToken(apiKey),
)
if err != nil {
return nil, err
}

return perplexity, nil
}

func (p *Perplexity) Name() string {
return "PerplexityAI"
}

func (p *Perplexity) Description() string {
return "Perplexity AI has access to a wide range of information, as it functions as an AI-powered search engine that indexes, analyzes, and summarizes content from across the internet."
}

func (p *Perplexity) Call(ctx context.Context, input string) (string, error) {
content := []llms.MessageContent{
llms.TextParts(llms.ChatMessageTypeHuman, input),
}

var generatedText string
_, err := p.llm.GenerateContent(ctx, content,
llms.WithStreamingFunc(func(_ context.Context, chunk []byte) error {
generatedText += string(chunk)
return nil
}))
if err != nil {
return "", err
}

return generatedText, nil
}

0 comments on commit a309b30

Please sign in to comment.