Skip to content

Commit

Permalink
.Net: {Azure}OpenAI Connectors Update to 2.1.0-beta.1 (#9078)
Browse files Browse the repository at this point in the history
### Motivation and Context

Update to the latest pre-release of OpenAI and Azure OpenAI pre-release
packages following the new guidelines described in the ADR.

Moving forward connectors that target pre-releases will also be tagged
as pre-release as well as the `Microsoft.SemanticKernel` meta package
(that targets pre-release version of OpenAI and AzureOpenAI connectors.

THis update enable usage of the Realtime APIs provided by the underlying
SDKs allowing them to be used for breaking glass scenarios.
  • Loading branch information
RogerBarreto authored Oct 3, 2024
1 parent d7d27ee commit 6d5aa6e
Show file tree
Hide file tree
Showing 42 changed files with 139 additions and 96 deletions.
4 changes: 2 additions & 2 deletions dotnet/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
<ItemGroup>
<PackageVersion Include="Azure.AI.Inference" Version="1.0.0-beta.1" />
<PackageVersion Include="Microsoft.VisualStudio.Threading" Version="17.11.20" />
<PackageVersion Include="OpenAI" Version="[2.0.0-beta.12]" />
<PackageVersion Include="OpenAI" Version="[2.1.0-beta.1]" />
<PackageVersion Include="Azure.AI.ContentSafety" Version="1.0.0" />
<PackageVersion Include="Azure.AI.OpenAI" Version="[2.0.0-beta.6]" />
<PackageVersion Include="Azure.AI.OpenAI" Version="[2.1.0-beta.1]" />
<PackageVersion Include="Azure.Identity" Version="1.12.0" />
<PackageVersion Include="Azure.Monitor.OpenTelemetry.Exporter" Version="1.3.0" />
<PackageVersion Include="Azure.Search.Documents" Version="11.6.0" />
Expand Down
3 changes: 2 additions & 1 deletion dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@
using Microsoft.SemanticKernel.Agents.Chat;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using OpenAI.Chat;
using Resources;

using ChatResponseFormat = OpenAI.Chat.ChatResponseFormat;

namespace Agents;

/// <summary>
Expand Down
4 changes: 2 additions & 2 deletions dotnet/samples/Concepts/Agents/MixedChat_Files.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@ public async Task AnalyzeFileAndGenerateReportAsync()
{
OpenAIClientProvider provider = this.GetClientProvider();

FileClient fileClient = provider.Client.GetFileClient();
OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient();

OpenAIFileInfo uploadFile =
OpenAIFile uploadFile =
await fileClient.UploadFileAsync(
new BinaryData(await EmbeddedResource.ReadAllAsync("30-user-context.txt")),
"30-user-context.txt",
Expand Down
2 changes: 1 addition & 1 deletion dotnet/samples/Concepts/Agents/MixedChat_Images.cs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ public async Task AnalyzeDataAndGenerateChartAsync()
{
OpenAIClientProvider provider = this.GetClientProvider();

FileClient fileClient = provider.Client.GetFileClient();
OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient();

// Define the agents
OpenAIAssistantAgent analystAgent =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ public async Task GenerateChartWithOpenAIAssistantAgentAsync()
{
OpenAIClientProvider provider = this.GetClientProvider();

FileClient fileClient = provider.Client.GetFileClient();
OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient();

// Define the agent
OpenAIAssistantAgent agent =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@ public async Task AnalyzeCSVFileUsingOpenAIAssistantAgentAsync()
{
OpenAIClientProvider provider = this.GetClientProvider();

FileClient fileClient = provider.Client.GetFileClient();
OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient();

OpenAIFileInfo uploadFile =
OpenAIFile uploadFile =
await fileClient.UploadFileAsync(
new BinaryData(await EmbeddedResource.ReadAllAsync("sales.csv")!),
"sales.csv",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ public async Task ExampleWithKernelAsync()
/// <summary>
/// Initializes a new instance of the <see cref="AzureSearchChatDataSource"/> class.
/// </summary>
#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
private static AzureSearchChatDataSource GetAzureSearchDataSource()
{
return new AzureSearchChatDataSource
Expand All @@ -136,4 +137,5 @@ private static AzureSearchChatDataSource GetAzureSearchDataSource()
IndexName = TestConfiguration.AzureAISearch.IndexName
};
}
#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
}
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ private async Task<string> StreamMessageOutputFromKernelAsync(Kernel kernel, str
/// </remarks>
private void OutputInnerContent(OpenAI.Chat.StreamingChatCompletionUpdate streamChunk)
{
Console.WriteLine($"Id: {streamChunk.Id}");
Console.WriteLine($"Id: {streamChunk.CompletionId}");
Console.WriteLine($"Model: {streamChunk.Model}");
Console.WriteLine($"Created at: {streamChunk.CreatedAt}");
Console.WriteLine($"Finish reason: {(streamChunk.FinishReason?.ToString() ?? "--")}");
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.

using System.ClientModel;
using System.ClientModel.Primitives;
using Microsoft.SemanticKernel;
using OpenAI;
Expand Down Expand Up @@ -28,7 +29,7 @@ public async Task RunAsync()
RetryPolicy = new ClientRetryPolicy()
};

var customClient = new OpenAIClient(TestConfiguration.OpenAI.ApiKey, clientOptions);
var customClient = new OpenAIClient(new ApiKeyCredential(TestConfiguration.OpenAI.ApiKey), clientOptions);

var kernel = Kernel.CreateBuilder()
.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, customClient)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
using Microsoft.SemanticKernel.Planning;
using OpenAI.Chat;

using ChatMessageContent = Microsoft.SemanticKernel.ChatMessageContent;

namespace Planners;

/// <summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ await OpenAIAssistantAgent.CreateAsync(
{
await agent.DeleteThreadAsync(threadId);
await agent.DeleteAsync();
await provider.Client.GetFileClient().DeleteFileAsync(fileId);
await provider.Client.GetOpenAIFileClient().DeleteFileAsync(fileId);
}

// Local function to invoke agent and display the conversation messages.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,14 @@ await OpenAIAssistantAgent.CreateAsync(
kernel: new Kernel());

// Upload file - Using a table of fictional employees.
FileClient fileClient = provider.Client.GetFileClient();
OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient();
await using Stream stream = EmbeddedResource.ReadStream("employees.pdf")!;
OpenAIFileInfo fileInfo = await fileClient.UploadFileAsync(stream, "employees.pdf", FileUploadPurpose.Assistants);
OpenAIFile fileInfo = await fileClient.UploadFileAsync(stream, "employees.pdf", FileUploadPurpose.Assistants);

// Create a vector-store
VectorStoreClient vectorStoreClient = provider.Client.GetVectorStoreClient();
VectorStore vectorStore =
await vectorStoreClient.CreateVectorStoreAsync(
CreateVectorStoreOperation result =
await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted: false,
new VectorStoreCreationOptions()
{
FileIds = { fileInfo.Id },
Expand All @@ -49,7 +49,7 @@ await vectorStoreClient.CreateVectorStoreAsync(
await agent.CreateThreadAsync(
new OpenAIThreadCreationOptions
{
VectorStoreId = vectorStore.Id,
VectorStoreId = result.VectorStoreId,
Metadata = AssistantSampleMetadata,
});

Expand All @@ -64,7 +64,7 @@ await agent.CreateThreadAsync(
{
await agent.DeleteThreadAsync(threadId);
await agent.DeleteAsync();
await vectorStoreClient.DeleteVectorStoreAsync(vectorStore);
await vectorStoreClient.DeleteVectorStoreAsync(result.VectorStoreId);
await fileClient.DeleteFileAsync(fileInfo.Id);
}

Expand Down
4 changes: 2 additions & 2 deletions dotnet/src/Agents/OpenAI/Internal/AssistantMessageFactory.cs
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,11 @@ public static IEnumerable<MessageContent> GetMessageContents(ChatMessageContent
{
if (imageContent.Uri != null)
{
yield return MessageContent.FromImageUrl(imageContent.Uri);
yield return MessageContent.FromImageUri(imageContent.Uri);
}
else if (string.IsNullOrWhiteSpace(imageContent.DataUri))
{
yield return MessageContent.FromImageUrl(new(imageContent.DataUri!));
yield return MessageContent.FromImageUri(new(imageContent.DataUri!));
}
}
else if (content is FileReferenceContent fileContent)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@ public static RunCreationOptions GenerateOptions(OpenAIAssistantDefinition defin
{
AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? definition.ExecutionOptions?.AdditionalInstructions,
InstructionsOverride = overrideInstructions,
MaxCompletionTokens = ResolveExecutionSetting(invocationOptions?.MaxCompletionTokens, definition.ExecutionOptions?.MaxCompletionTokens),
MaxPromptTokens = ResolveExecutionSetting(invocationOptions?.MaxPromptTokens, definition.ExecutionOptions?.MaxPromptTokens),
MaxOutputTokenCount = ResolveExecutionSetting(invocationOptions?.MaxCompletionTokens, definition.ExecutionOptions?.MaxCompletionTokens),
MaxInputTokenCount = ResolveExecutionSetting(invocationOptions?.MaxPromptTokens, definition.ExecutionOptions?.MaxPromptTokens),
ModelOverride = invocationOptions?.ModelName,
NucleusSamplingFactor = ResolveExecutionSetting(invocationOptions?.TopP, definition.TopP),
ParallelToolCallsEnabled = ResolveExecutionSetting(invocationOptions?.ParallelToolCallsEnabled, definition.ExecutionOptions?.ParallelToolCallsEnabled),
AllowParallelToolCalls = ResolveExecutionSetting(invocationOptions?.ParallelToolCallsEnabled, definition.ExecutionOptions?.ParallelToolCallsEnabled),
ResponseFormat = ResolveExecutionSetting(invocationOptions?.EnableJsonResponse, definition.EnableJsonResponse) ?? false ? AssistantResponseFormat.JsonObject : null,
Temperature = ResolveExecutionSetting(invocationOptions?.Temperature, definition.Temperature),
TruncationStrategy = truncationMessageCount.HasValue ? RunTruncationStrategy.CreateLastMessagesStrategy(truncationMessageCount.Value) : null,
Expand Down
10 changes: 5 additions & 5 deletions dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ public static async IAsyncEnumerable<ChatMessageContent> GetMessagesAsync(Assist
throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}");
}

IReadOnlyList<RunStep> steps = await GetRunStepsAsync(client, run).ConfigureAwait(false);
IReadOnlyList<RunStep> steps = await GetRunStepsAsync(client, run, cancellationToken).ConfigureAwait(false);

// Is tool action required?
if (run.Status == RunStatus.RequiresAction)
Expand Down Expand Up @@ -443,7 +443,7 @@ public static async IAsyncEnumerable<StreamingChatMessageContent> InvokeStreamin

if (run.Status == RunStatus.RequiresAction)
{
IReadOnlyList<RunStep> steps = await GetRunStepsAsync(client, run).ConfigureAwait(false);
IReadOnlyList<RunStep> steps = await GetRunStepsAsync(client, run, cancellationToken).ConfigureAwait(false);

// Execute functions in parallel and post results at once.
FunctionCallContent[] functionCalls = steps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray();
Expand All @@ -460,7 +460,7 @@ public static async IAsyncEnumerable<StreamingChatMessageContent> InvokeStreamin

// Process tool output
ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults);
asyncUpdates = client.SubmitToolOutputsToRunStreamingAsync(run, toolOutputs);
asyncUpdates = client.SubmitToolOutputsToRunStreamingAsync(run.ThreadId, run.Id, toolOutputs, cancellationToken);

messages?.Add(GenerateFunctionResultContent(agent.GetName(), functionResults));
}
Expand Down Expand Up @@ -490,11 +490,11 @@ public static async IAsyncEnumerable<StreamingChatMessageContent> InvokeStreamin
logger.LogOpenAIAssistantCompletedRun(nameof(InvokeAsync), run?.Id ?? "Failed", threadId);
}

private static async Task<IReadOnlyList<RunStep>> GetRunStepsAsync(AssistantClient client, ThreadRun run)
private static async Task<IReadOnlyList<RunStep>> GetRunStepsAsync(AssistantClient client, ThreadRun run, CancellationToken cancellationToken)
{
List<RunStep> steps = [];

await foreach (RunStep step in client.GetRunStepsAsync(run).ConfigureAwait(false))
await foreach (RunStep step in client.GetRunStepsAsync(run.ThreadId, run.Id, cancellationToken: cancellationToken).ConfigureAwait(false))
{
steps.Add(step);
}
Expand Down
6 changes: 3 additions & 3 deletions dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
Original file line number Diff line number Diff line change
Expand Up @@ -245,13 +245,13 @@ public async Task<bool> DeleteThreadAsync(
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>The file identifier</returns>
/// <remarks>
/// Use the <see cref="FileClient"/> directly for more advanced file operations.
/// Use the <see cref="OpenAIFileClient"/> directly for more advanced file operations.
/// </remarks>
public async Task<string> UploadFileAsync(Stream stream, string name, CancellationToken cancellationToken = default)
{
FileClient client = this._provider.Client.GetFileClient();
OpenAIFileClient client = this._provider.Client.GetOpenAIFileClient();

OpenAIFileInfo fileInfo = await client.UploadFileAsync(stream, name, FileUploadPurpose.Assistants, cancellationToken).ConfigureAwait(false);
OpenAIFile fileInfo = await client.UploadFileAsync(stream, name, FileUploadPurpose.Assistants, cancellationToken).ConfigureAwait(false);

return fileInfo.Id;
}
Expand Down
4 changes: 2 additions & 2 deletions dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ private static AzureOpenAIClientOptions CreateAzureClientOptions(HttpClient? htt
{
AzureOpenAIClientOptions options = new()
{
ApplicationId = HttpHeaderConstant.Values.UserAgent
UserAgentApplicationId = HttpHeaderConstant.Values.UserAgent
};

ConfigureClientOptions(httpClient, options);
Expand All @@ -118,7 +118,7 @@ private static OpenAIClientOptions CreateOpenAIClientOptions(Uri? endpoint, Http
{
OpenAIClientOptions options = new()
{
ApplicationId = HttpHeaderConstant.Values.UserAgent,
UserAgentApplicationId = HttpHeaderConstant.Values.UserAgent,
Endpoint = endpoint ?? httpClient?.BaseAddress,
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ public void VerifyAssistantMessageAdapterGetMessageWithImageUrl()
// Assert
Assert.NotNull(contents);
Assert.Single(contents);
Assert.NotNull(contents.Single().ImageUrl);
Assert.NotNull(contents.Single().ImageUri);
}

/// <summary>
Expand All @@ -162,7 +162,7 @@ public void VerifyAssistantMessageAdapterGetMessageWithImageData()
// Assert
Assert.NotNull(contents);
Assert.Single(contents);
Assert.NotNull(contents.Single().ImageUrl);
Assert.NotNull(contents.Single().ImageUri);
}

/// <summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ public static string GetTextMessage(string text = "test") =>
"bytes": 120000,
"created_at": 1677610602,
"filename": "test.txt",
"purpose": "assistant"
"purpose": "assistants"
}
""";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@
using Moq;
using OpenAI.Chat;

using ChatMessageContent = Microsoft.SemanticKernel.ChatMessageContent;

namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Services;

/// <summary>
Expand Down Expand Up @@ -137,12 +139,14 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
StopSequences = ["stop_sequence"],
Logprobs = true,
TopLogprobs = 5,
#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
AzureChatDataSource = new AzureSearchChatDataSource()
{
Endpoint = new Uri("http://test-search-endpoint"),
IndexName = "test-index-name",
Authentication = DataSourceAuthentication.FromApiKey("api-key"),
}
#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
};

var chatHistory = new ChatHistory();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,12 +161,14 @@ public void ItShouldThrowExceptionIfNoEndpointProvided(bool useTokeCredential, s
public async Task GetUriImageContentsResponseFormatRequestWorksCorrectlyAsync(string? responseFormatOption, string? expectedResponseFormat)
{
// Arrange
object? responseFormatObject = responseFormatOption switch
object? responseFormatObject = null;

switch (responseFormatOption)
{
"GeneratedImage.Uri" => GeneratedImageFormat.Uri,
"GeneratedImage.Bytes" => GeneratedImageFormat.Bytes,
_ => responseFormatOption
};
case "GeneratedImage.Uri": responseFormatObject = GeneratedImageFormat.Uri; break;
case "GeneratedImage.Bytes": responseFormatObject = GeneratedImageFormat.Bytes; break;
default: responseFormatObject = responseFormatOption; break;
}

this._httpClient.BaseAddress = new Uri("https://api-host");
var sut = new AzureOpenAITextToImageService("deployment", endpoint: null!, credential: new Mock<TokenCredential>().Object, "dall-e-3", this._httpClient);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,14 @@ public void ItCanCreateOpenAIPromptExecutionSettingsFromAzureOpenAIPromptExecuti
Logprobs = true,
Seed = 123456,
TopLogprobs = 5,
#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
AzureChatDataSource = new AzureSearchChatDataSource
{
Endpoint = new Uri("https://test-host"),
Authentication = DataSourceAuthentication.FromApiKey("api-key"),
IndexName = "index-name"
}
#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
};

// Act
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,27 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- https://learn.microsoft.com/en-us/dotnet/fundamentals/package-validation/diagnostic-ids -->
<Suppressions xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Suppression>
<DiagnosticId>CP0002</DiagnosticId>
<Target>M:Microsoft.SemanticKernel.Connectors.AzureOpenAI.AzureOpenAIPromptExecutionSettings.get_AzureChatDataSource</Target>
<Left>lib/net8.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll</Left>
<Right>lib/net8.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll</Right>
<IsBaselineSuppression>true</IsBaselineSuppression>
</Suppression>
<Suppression>
<DiagnosticId>CP0002</DiagnosticId>
<Target>M:Microsoft.SemanticKernel.Connectors.AzureOpenAI.AzureOpenAITextToImageService.GenerateImageAsync(System.String,System.Int32,System.Int32,Microsoft.SemanticKernel.Kernel,System.Threading.CancellationToken)</Target>
<Left>lib/net8.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll</Left>
<Right>lib/net8.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll</Right>
<IsBaselineSuppression>true</IsBaselineSuppression>
</Suppression>
<Suppression>
<DiagnosticId>CP0002</DiagnosticId>
<Target>M:Microsoft.SemanticKernel.Connectors.AzureOpenAI.AzureOpenAIPromptExecutionSettings.get_AzureChatDataSource</Target>
<Left>lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll</Left>
<Right>lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll</Right>
<IsBaselineSuppression>true</IsBaselineSuppression>
</Suppression>
<Suppression>
<DiagnosticId>CP0002</DiagnosticId>
<Target>M:Microsoft.SemanticKernel.Connectors.AzureOpenAI.AzureOpenAITextToImageService.GenerateImageAsync(System.String,System.Int32,System.Int32,Microsoft.SemanticKernel.Kernel,System.Threading.CancellationToken)</Target>
Expand Down
Loading

0 comments on commit 6d5aa6e

Please sign in to comment.