-
-
Notifications
You must be signed in to change notification settings - Fork 670
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
This PR enables AI Agents to use perplexity AI in order to retrieve data from the web.
- Loading branch information
Showing
2 changed files
with
132 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,62 @@ | ||
|
||
# Perplexity Tool Integration for Agents | ||
|
||
Use perplexity in your AI Agent to enrich it with data from the web. | ||
|
||
Full code example: | ||
|
||
```go | ||
package main | ||
|
||
import ( | ||
"context" | ||
"fmt" | ||
"os" | ||
|
||
"github.com/tmc/langchaingo/agents" | ||
"github.com/tmc/langchaingo/callbacks" | ||
"github.com/tmc/langchaingo/chains" | ||
"github.com/tmc/langchaingo/llms/openai" | ||
"github.com/tmc/langchaingo/tools" | ||
"github.com/tmc/langchaingo/tools/perplexity" | ||
) | ||
|
||
func main() { | ||
if err := run(); err != nil { | ||
fmt.Fprintln(os.Stderr, err) | ||
os.Exit(1) | ||
} | ||
} | ||
|
||
func run() error { | ||
llm, err := openai.New( | ||
openai.WithModel("gpt-4o-mini"), | ||
openai.WithCallback(callbacks.LogHandler{}), | ||
) | ||
if err != nil { | ||
return err | ||
} | ||
|
||
perpl, err := perplexity.NewPerplexity(perplexity.ModelLlamaSonarSmall) | ||
if err != nil { | ||
return err | ||
} | ||
|
||
agentTools := []tools.Tool{ | ||
perpl, | ||
} | ||
|
||
agent := agents.NewOneShotAgent(llm, | ||
agentTools, | ||
agents.WithMaxIterations(2), | ||
) | ||
executor := agents.NewExecutor(agent) | ||
|
||
question := "what's the latest and best LLM on the market at the moment?" | ||
answer, err := chains.Run(context.Background(), executor, question) | ||
|
||
fmt.Println(answer) | ||
|
||
return err | ||
} | ||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
package perplexity | ||
|
||
import ( | ||
"context" | ||
"fmt" | ||
"os" | ||
|
||
"github.com/tmc/langchaingo/llms" | ||
"github.com/tmc/langchaingo/llms/openai" | ||
) | ||
|
||
type Model string | ||
|
||
// Model pricing overview: https://docs.perplexity.ai/guides/pricing | ||
const ( | ||
ModelLlamaSonarSmall Model = "llama-3.1-sonar-small-128k-online" | ||
ModelLlamaSonarLarge Model = "llama-3.1-sonar-large-128k-online" | ||
ModelLlamaSonarHuge Model = "llama-3.1-sonar-huge-128k-online" | ||
) | ||
|
||
type Perplexity struct { | ||
llm *openai.LLM | ||
} | ||
|
||
func NewPerplexity(model Model) (*Perplexity, error) { | ||
perplexity := &Perplexity{} | ||
var err error | ||
|
||
apiKey := os.Getenv("PERPLEXITY_API_KEY") | ||
if apiKey == "" { | ||
return nil, fmt.Errorf("PERPLEXITY_API_KEY not set") | ||
} | ||
|
||
perplexity.llm, err = openai.New( | ||
openai.WithModel(string(model)), | ||
openai.WithBaseURL("https://api.perplexity.ai"), | ||
openai.WithToken(apiKey), | ||
) | ||
if err != nil { | ||
return nil, err | ||
} | ||
|
||
return perplexity, nil | ||
} | ||
|
||
func (p *Perplexity) Name() string { | ||
return "PerplexityAI" | ||
} | ||
|
||
func (p *Perplexity) Description() string { | ||
return "Perplexity AI has access to a wide range of information, as it functions as an AI-powered search engine that indexes, analyzes, and summarizes content from across the internet." | ||
} | ||
|
||
func (p *Perplexity) Call(ctx context.Context, input string) (string, error) { | ||
content := []llms.MessageContent{ | ||
llms.TextParts(llms.ChatMessageTypeHuman, input), | ||
} | ||
|
||
var generatedText string | ||
_, err := p.llm.GenerateContent(ctx, content, | ||
llms.WithStreamingFunc(func(_ context.Context, chunk []byte) error { | ||
generatedText += string(chunk) | ||
return nil | ||
})) | ||
if err != nil { | ||
return "", err | ||
} | ||
|
||
return generatedText, nil | ||
} |