From 5c5c96119990e1671b3410c6254ecfae4a1a88ba Mon Sep 17 00:00:00 2001
From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com>
Date: Tue, 22 Aug 2023 15:17:01 +0100
Subject: [PATCH 1/4] Added metering and logging for token usage
---
.../AzureSdk/ClientBase.cs | 35 +++++++++++++++++--
1 file changed, 33 insertions(+), 2 deletions(-)
diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs
index 7db116a7b8f4..5a791227753c 100644
--- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs
+++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs
@@ -2,6 +2,7 @@
using System;
using System.Collections.Generic;
+using System.Diagnostics.Metrics;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
@@ -46,6 +47,19 @@ private protected ClientBase(ILoggerFactory? loggerFactory = null)
///
private protected ILogger Logger { get; set; }
+ ///
+ /// Instance of for metrics.
+ ///
+ private static Meter s_meter = new(typeof(ClientBase).Assembly.GetName().Name);
+
+ ///
+ /// Instance of to keep track of the number of tokens used.
+ ///
+ private static Counter s_tokenAmountCounter =
+ s_meter.CreateCounter(
+ name: "SK.Connectors.OpenAI.TokenAmount",
+ description: "Number of tokens used");
+
///
/// Creates completions for the prompt and settings.
///
@@ -78,6 +92,8 @@ private protected async Task> InternalGetTextResultsA
throw new SKException("Text completions not found");
}
+ this.CaptureUsageDetails(responseData.Usage);
+
return responseData.Choices.Select(choice => new TextResult(responseData, choice)).ToList();
}
@@ -168,12 +184,16 @@ private protected async Task> InternalGetChatResultsA
throw new SKException("Chat completions null response");
}
- if (response.Value.Choices.Count == 0)
+ var responseData = response.Value;
+
+ if (responseData.Choices.Count == 0)
{
throw new SKException("Chat completions not found");
}
- return response.Value.Choices.Select(chatChoice => new ChatResult(response.Value, chatChoice)).ToList();
+ this.CaptureUsageDetails(responseData.Usage);
+
+ return responseData.Choices.Select(chatChoice => new ChatResult(responseData, chatChoice)).ToList();
}
///
@@ -446,4 +466,15 @@ private static async Task RunRequestAsync(Func> request)
$"Something went wrong: {e.Message}", e);
}
}
+
+ ///
+ /// Captures usage details, including token information.
+ ///
+ /// Instance of with usage details.
+ private void CaptureUsageDetails(CompletionsUsage usage)
+ {
+ this.Logger.LogInformation("Token amount: {TokenAmount}", usage.TotalTokens);
+
+ s_tokenAmountCounter.Add(usage.TotalTokens);
+ }
}
From 48ec88f5cdba2e84d5f70eb9f0e26767c4cc2d33 Mon Sep 17 00:00:00 2001
From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com>
Date: Tue, 22 Aug 2023 15:17:08 +0100
Subject: [PATCH 2/4] Fixed examples
---
dotnet/samples/ApplicationInsightsExample/Program.cs | 8 ++++----
.../KernelSyntaxExamples/Example43_GetModelResult.cs | 4 ++--
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/dotnet/samples/ApplicationInsightsExample/Program.cs b/dotnet/samples/ApplicationInsightsExample/Program.cs
index 1c52a8c95734..88316d60afdd 100644
--- a/dotnet/samples/ApplicationInsightsExample/Program.cs
+++ b/dotnet/samples/ApplicationInsightsExample/Program.cs
@@ -40,7 +40,7 @@ public static async Task Main()
var serviceProvider = GetServiceProvider();
var telemetryClient = serviceProvider.GetRequiredService();
- var logger = serviceProvider.GetRequiredService();
+ var loggerFactory = serviceProvider.GetRequiredService();
using var meterListener = new MeterListener();
using var activityListener = new ActivityListener();
@@ -48,8 +48,8 @@ public static async Task Main()
ConfigureMetering(meterListener, telemetryClient);
ConfigureTracing(activityListener, telemetryClient);
- var kernel = GetKernel(logger);
- var planner = GetSequentialPlanner(kernel, logger);
+ var kernel = GetKernel(loggerFactory);
+ var planner = GetSequentialPlanner(kernel, loggerFactory);
try
{
@@ -92,7 +92,7 @@ private static void ConfigureApplicationInsightsTelemetry(ServiceCollection serv
services.AddLogging(loggingBuilder =>
{
- loggingBuilder.AddFilter(typeof(Program).FullName, LogLevel);
+ loggingBuilder.AddFilter(logLevel => logLevel == LogLevel);
loggingBuilder.SetMinimumLevel(LogLevel);
});
diff --git a/dotnet/samples/KernelSyntaxExamples/Example43_GetModelResult.cs b/dotnet/samples/KernelSyntaxExamples/Example43_GetModelResult.cs
index 9e37cb38def5..50e6b865e19a 100644
--- a/dotnet/samples/KernelSyntaxExamples/Example43_GetModelResult.cs
+++ b/dotnet/samples/KernelSyntaxExamples/Example43_GetModelResult.cs
@@ -35,13 +35,13 @@ public static async Task RunAsync()
var textResult = await myFunction.InvokeAsync("Sci-fi",
settings: new CompleteRequestSettings { ResultsPerPrompt = 3, MaxTokens = 500, Temperature = 1, TopP = 0.5 });
Console.WriteLine(textResult);
- Console.WriteLine(textResult.ModelResults.Select(result => result.GetOpenAITextResult()).AsJson());
+ Console.WriteLine(textResult.ModelResults.Select(result => result.GetOpenAIChatResult()).AsJson());
Console.WriteLine();
// Using the Kernel RunAsync
textResult = await kernel.RunAsync("sorry I forgot your birthday", myFunction);
Console.WriteLine(textResult);
- Console.WriteLine(textResult.ModelResults.LastOrDefault()?.GetOpenAITextResult()?.Usage.AsJson());
+ Console.WriteLine(textResult.ModelResults.LastOrDefault()?.GetOpenAIChatResult()?.Usage.AsJson());
Console.WriteLine();
// Using Chat Completion directly
From 3d0766edc9beb3222d22b67ebdbd2f3af740b7a9 Mon Sep 17 00:00:00 2001
From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com>
Date: Tue, 22 Aug 2023 15:37:53 +0100
Subject: [PATCH 3/4] Added metrics for prompt, completion and total tokens
---
dotnet/docs/TELEMETRY.md | 4 +++
.../AzureSdk/ClientBase.cs | 32 +++++++++++++++----
2 files changed, 30 insertions(+), 6 deletions(-)
diff --git a/dotnet/docs/TELEMETRY.md b/dotnet/docs/TELEMETRY.md
index f9c290878ede..12c7be3adec8 100644
--- a/dotnet/docs/TELEMETRY.md
+++ b/dotnet/docs/TELEMETRY.md
@@ -64,6 +64,10 @@ Available meters:
- `SK..ExecutionTotal` - total number of function executions
- `SK..ExecutionSuccess` - number of successful function executions
- `SK..ExecutionFailure` - number of failed function executions
+- _Microsoft.SemanticKernel.Connectors.AI.OpenAI_ - captures metrics for OpenAI functionality. List of metrics:
+ - `SK.Connectors.OpenAI.PromptTokens` - number of prompt tokens used.
+ - `SK.Connectors.OpenAI.CompletionTokens` - number of completion tokens used.
+ - `SK.Connectors.OpenAI.TotalTokens` - total number of tokens used.
### Examples
diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs
index 5a791227753c..fd9d075f9a88 100644
--- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs
+++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs
@@ -53,12 +53,28 @@ private protected ClientBase(ILoggerFactory? loggerFactory = null)
private static Meter s_meter = new(typeof(ClientBase).Assembly.GetName().Name);
///
- /// Instance of to keep track of the number of tokens used.
+ /// Instance of to keep track of the number of prompt tokens used.
///
- private static Counter s_tokenAmountCounter =
+ private static Counter s_promptTokensCounter =
s_meter.CreateCounter(
- name: "SK.Connectors.OpenAI.TokenAmount",
- description: "Number of tokens used");
+ name: "SK.Connectors.OpenAI.PromptTokens",
+ description: "Number of prompt tokens used");
+
+ ///
+ /// Instance of to keep track of the number of completion tokens used.
+ ///
+ private static Counter s_completionTokensCounter =
+ s_meter.CreateCounter(
+ name: "SK.Connectors.OpenAI.CompletionTokens",
+ description: "Number of completion tokens used");
+
+ ///
+ /// Instance of to keep track of the total number of tokens used.
+ ///
+ private static Counter s_totalTokensCounter =
+ s_meter.CreateCounter(
+ name: "SK.Connectors.OpenAI.TotalTokens",
+ description: "Total number of tokens used");
///
/// Creates completions for the prompt and settings.
@@ -473,8 +489,12 @@ private static async Task RunRequestAsync(Func> request)
/// Instance of with usage details.
private void CaptureUsageDetails(CompletionsUsage usage)
{
- this.Logger.LogInformation("Token amount: {TokenAmount}", usage.TotalTokens);
+ this.Logger.LogInformation(
+ "Prompt tokens: {PromptTokens}. Completion tokens: {CompletionTokens}. Total tokens: {TotalTokens}",
+ usage.PromptTokens, usage.CompletionTokens, usage.TotalTokens);
- s_tokenAmountCounter.Add(usage.TotalTokens);
+ s_promptTokensCounter.Add(usage.PromptTokens);
+ s_completionTokensCounter.Add(usage.CompletionTokens);
+ s_totalTokensCounter.Add(usage.TotalTokens);
}
}
From d5800ff2ee5aa7706855f3d0ee20d0e2669e8e60 Mon Sep 17 00:00:00 2001
From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com>
Date: Tue, 22 Aug 2023 15:47:08 +0100
Subject: [PATCH 4/4] Small fix
---
.../src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs
index fd9d075f9a88..d7e5e9c31608 100644
--- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs
+++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs
@@ -490,7 +490,7 @@ private static async Task RunRequestAsync(Func> request)
private void CaptureUsageDetails(CompletionsUsage usage)
{
this.Logger.LogInformation(
- "Prompt tokens: {PromptTokens}. Completion tokens: {CompletionTokens}. Total tokens: {TotalTokens}",
+ "Prompt tokens: {PromptTokens}. Completion tokens: {CompletionTokens}. Total tokens: {TotalTokens}.",
usage.PromptTokens, usage.CompletionTokens, usage.TotalTokens);
s_promptTokensCounter.Add(usage.PromptTokens);