From 669914d3623d39d7fae9db1959cb99ff4bb38148 Mon Sep 17 00:00:00 2001 From: abrook Date: Wed, 3 Jul 2024 13:41:51 -0400 Subject: [PATCH 1/2] Move logging to ensure correct context for span --- js/ai/src/generate.ts | 11 ----------- js/ai/src/model.ts | 3 +++ js/ai/src/telemetry.ts | 13 +++++-------- 3 files changed, 8 insertions(+), 19 deletions(-) diff --git a/js/ai/src/generate.ts b/js/ai/src/generate.ts index 9f61eea3c..d4718772b 100755 --- a/js/ai/src/generate.ts +++ b/js/ai/src/generate.ts @@ -42,7 +42,6 @@ import { ToolRequestPart, ToolResponsePart, } from './model.js'; -import * as telemetry from './telemetry.js'; import { resolveTools, ToolAction, @@ -609,11 +608,6 @@ export async function generate< } const request = await toGenerateRequest(resolvedOptions); - telemetry.recordGenerateActionInputLogs( - model.__action.name, - resolvedOptions, - request - ); const response = await runWithStreamingCallback( resolvedOptions.streamingCallback ? (chunk: GenerateResponseChunkData) => @@ -678,11 +672,6 @@ export async function generate< (part) => !!part.toolRequest ); if (resolvedOptions.returnToolRequests || toolCalls.length === 0) { - telemetry.recordGenerateActionOutputLogs( - model.__action.name, - resolvedOptions, - response - ); return response; } const toolResponses: ToolResponsePart[] = await Promise.all( diff --git a/js/ai/src/model.ts b/js/ai/src/model.ts index 45cf8be05..897eb5ecb 100644 --- a/js/ai/src/model.ts +++ b/js/ai/src/model.ts @@ -304,12 +304,15 @@ export function defineModel< }, (input) => { const startTimeMs = performance.now(); + telemetry.recordGenerateActionInputLogs(options.name, input); + return runner(input, getStreamingCallback()) .then((response) => { const timedResponse = { ...response, latencyMs: performance.now() - startTimeMs, }; + telemetry.recordGenerateActionOutputLogs(options.name, response); telemetry.recordGenerateActionMetrics(options.name, input, { response: timedResponse, }); diff --git a/js/ai/src/telemetry.ts b/js/ai/src/telemetry.ts index 4a19893a3..0f12895e1 100644 --- a/js/ai/src/telemetry.ts +++ b/js/ai/src/telemetry.ts @@ -28,7 +28,6 @@ import { } from '@genkit-ai/core/tracing'; import { ValueType } from '@opentelemetry/api'; import { createHash } from 'crypto'; -import { GenerateOptions } from './generate.js'; import { GenerateRequest, GenerateResponseData, @@ -171,7 +170,6 @@ export function recordGenerateActionMetrics( export function recordGenerateActionInputLogs( model: string, - options: GenerateOptions, input: GenerateRequest ) { const flowName = traceMetadataAls?.getStore()?.flowName; @@ -180,11 +178,11 @@ export function recordGenerateActionInputLogs( const sharedMetadata = { model, path, qualifiedPath, flowName }; logger.logStructured(`Config[${path}, ${model}]`, { ...sharedMetadata, - temperature: options.config?.temperature, - topK: options.config?.topK, - topP: options.config?.topP, - maxOutputTokens: options.config?.maxOutputTokens, - stopSequences: options.config?.stopSequences, + temperature: input.config?.temperature, + topK: input.config?.topK, + topP: input.config?.topP, + maxOutputTokens: input.config?.maxOutputTokens, + stopSequences: input.config?.stopSequences, source: 'ts', sourceVersion: GENKIT_VERSION, }); @@ -208,7 +206,6 @@ export function recordGenerateActionInputLogs( export function recordGenerateActionOutputLogs( model: string, - options: GenerateOptions, output: GenerateResponseData ) { const flowName = traceMetadataAls?.getStore()?.flowName; From 93e27a04a12fef8b440aeb668edc69c27946b9be Mon Sep 17 00:00:00 2001 From: abrook Date: Thu, 4 Jul 2024 11:11:49 -0400 Subject: [PATCH 2/2] Log before timer --- js/ai/src/model.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/js/ai/src/model.ts b/js/ai/src/model.ts index 897eb5ecb..15b2de02e 100644 --- a/js/ai/src/model.ts +++ b/js/ai/src/model.ts @@ -303,8 +303,8 @@ export function defineModel< use: middleware, }, (input) => { - const startTimeMs = performance.now(); telemetry.recordGenerateActionInputLogs(options.name, input); + const startTimeMs = performance.now(); return runner(input, getStreamingCallback()) .then((response) => {