Skip to content

Commit

Permalink
Cleaned up langsmith tracing inputs and outputs
Browse files Browse the repository at this point in the history
  • Loading branch information
jubeless committed Nov 22, 2024
1 parent ae03443 commit e1b2581
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 20 deletions.
32 changes: 13 additions & 19 deletions langsmith/tracer.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,26 +63,11 @@ func (t *LangChainTracer) HandleText(_ context.Context, _ string) {
func (t *LangChainTracer) HandleLLMGenerateContentStart(ctx context.Context, ms []llms.MessageContent) {
childTree := t.activeTree.CreateChild()

inputs := []struct {
Role string `json:"role"`
Content []llms.ContentPart `json:"content"`
}{}

for _, prompt := range ms {
inputs = append(inputs, struct {
Role string `json:"role"`
Content []llms.ContentPart `json:"content"`
}{
Role: string(prompt.Role),
Content: prompt.Parts,
})
}

childTree.
SetName("LLMGenerateContent").
SetRunType("llm").
SetInputs(KVMap{
"messages": inputs,
"messages": inputsFromMessages(ms),
})

t.activeTree.AppendChild(childTree)
Expand All @@ -97,9 +82,18 @@ func (t *LangChainTracer) HandleLLMGenerateContentStart(ctx context.Context, ms
func (t *LangChainTracer) HandleLLMGenerateContentEnd(ctx context.Context, res *llms.ContentResponse) {
childTree := t.activeTree.GetChild("LLMGenerateContent")

childTree.SetName("LLMGenerateContent").SetRunType("llm").SetOutputs(KVMap{
"res_content": res,
})
childTree.
SetName("LLMGenerateContent").
SetRunType("llm").
SetOutputs(KVMap{
"choices": res.Choices,
})

if tracingOutput := res.GetTracingOutput(); tracingOutput != nil {
childTree.
SetName(tracingOutput.Name).
SetOutputs(tracingOutput.Output)
}

// Close the run
if err := childTree.patchRun(ctx); err != nil {
Expand Down
21 changes: 20 additions & 1 deletion langsmith/types.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
package langsmith

import "time"
import (
"time"

"github.com/tmc/langchaingo/llms"
)

type KVMap map[string]any

Expand All @@ -24,3 +28,18 @@ func timeToMillisecondsPtr(t time.Time) *int64 {
func ptr[T any](v T) *T {
return &v
}

type inputs []input
type input struct {
Role string `json:"role"`
Content []llms.ContentPart `json:"content"`
}


func inputsFromMessages(ms []llms.MessageContent) inputs {
inputs := make(inputs, len(ms))
for i, msg := range ms {
inputs[i] = input{Role: string(msg.Role), Content: msg.Parts}
}
return inputs
}
15 changes: 15 additions & 0 deletions llms/generatecontent.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,21 @@ func (ToolCallResponse) isPart() {}
// It can potentially return multiple content choices.
type ContentResponse struct {
Choices []*ContentChoice
// trackingOutputs is arbitrary information the model can send to the tracer.
tracingOutput *TracingOutput
}

type TracingOutput struct {
Name string
Output map[string]any
}

func (cr *ContentResponse) SetTracingOutput(v *TracingOutput) {
cr.tracingOutput = v
}

func (cr *ContentResponse) GetTracingOutput() *TracingOutput {
return cr.tracingOutput
}

// ContentChoice is one of the response choices returned by GenerateContent
Expand Down
19 changes: 19 additions & 0 deletions llms/openai/openaillm.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package openai

import (
"context"
"encoding/json"
"fmt"

"github.com/tmc/langchaingo/callbacks"
Expand Down Expand Up @@ -195,11 +196,29 @@ func (o *LLM) GenerateContent(ctx context.Context, messages []llms.MessageConten
response := &llms.ContentResponse{Choices: choices}

if callbacksHandler := o.getCallbackHandler(ctx); callbacksHandler != nil {
tracingOutput, err := o.getTracingOutput(result)
if err != nil {
return nil, fmt.Errorf("failed to get tracing output: %w", err)
}
response.SetTracingOutput(tracingOutput)
callbacksHandler.HandleLLMGenerateContentEnd(ctx, response)
}
return response, nil
}

func (o *LLM) getTracingOutput(resp *openaiclient.ChatCompletionResponse) (*llms.TracingOutput, error) {
jsonBytes, err := json.Marshal(resp)
if err != nil {
return nil, err
}
outputs := map[string]any{}
if err := json.Unmarshal(jsonBytes, &outputs); err != nil {
return nil, err
}

return &llms.TracingOutput{Name: "ChatOpenAI", Output: outputs}, nil
}

// CreateEmbedding creates embeddings for the given input texts.
func (o *LLM) CreateEmbedding(ctx context.Context, inputTexts []string) ([][]float32, error) {
embeddings, err := o.client.CreateEmbedding(ctx, &openaiclient.EmbeddingRequest{
Expand Down

0 comments on commit e1b2581

Please sign in to comment.