diff --git a/sdk/cognitiveservices/azopenai/README.md b/sdk/cognitiveservices/azopenai/README.md index 9b8e5a9179fd..8d599d44b437 100644 --- a/sdk/cognitiveservices/azopenai/README.md +++ b/sdk/cognitiveservices/azopenai/README.md @@ -1,8 +1,8 @@ # Azure OpenAI client module for Go -Azure OpenAI is a managed service that allows developers to deploy, tune, and generate content from OpenAI models on Azure resources. +NOTE: this client can be used with Azure OpenAI and OpenAI. -The Azure OpenAI client library for GO is an adaptation of OpenAI's REST APIs that provides an idiomatic interface and rich integration with the rest of the Azure SDK ecosystem. +Azure OpenAI Service provides access to OpenAI's powerful language models including the GPT-4, GPT-35-Turbo, and Embeddings model series, as well as image generation using DALL-E. [Source code][azopenai_repo] | [Package (pkg.go.dev)][azopenai_pkg_go] | [REST API documentation][openai_rest_docs] | [Product documentation][openai_docs] @@ -20,42 +20,34 @@ Install the `azopenai` and `azidentity` modules with `go get`: ```bash go get github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai + +# optional go get github.com/Azure/azure-sdk-for-go/sdk/azidentity ``` -The [azidentity][azure_identity] module is used for authentication during client construction. +The [azidentity][azure_identity] module is used for Azure Active Directory authentication with Azure OpenAI. ### Authentication - +#### Azure OpenAI -#### Create a client +Azure OpenAI clients can authenticate using Azure Active Directory or with an API key: -Constructing the client requires your vault's URL, which you can get from the Azure CLI or the Azure Portal. +* Using Azure Active Directory, with a TokenCredential: [example](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai#example-NewClient) +* Using an API key: [example](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai#example-NewClientWithKeyCredential) -```go -import ( - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai" -) - -func main() { - endpoint := "https://" - apiKey := "" - - var err error - cred := azopenai.KeyCredential{APIKey: apiKey} - client, err := azopenai.NewClientWithKeyCredential(endpoint, cred, &options) - if err != nil { - // TODO: handle error - } -} -``` +#### OpenAI + +OpenAI supports connecting using an API key: [example](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai#example-NewClientForOpenAI) ## Key concepts See [Key concepts][openai_key_concepts] in the product documentation for more details about general concepts. +# Examples + +Examples for various scenarios can be found on [pkg.go.dev](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai#pkg-examples) or in the example*_test.go files in our GitHub repo for [azopenai](https://github.com/Azure/azure-sdk-for-go/blob/main/sdk/cognitiveservices/azopenai). + ## Troubleshooting ### Error Handling @@ -103,3 +95,4 @@ comments. [coc]: https://opensource.microsoft.com/codeofconduct/ [coc_faq]: https://opensource.microsoft.com/codeofconduct/faq/ [coc_contact]: mailto:opencode@microsoft.com +[azure_openai_quickstart]: https://learn.microsoft.com/azure/cognitive-services/openai/quickstart \ No newline at end of file diff --git a/sdk/cognitiveservices/azopenai/assets.json b/sdk/cognitiveservices/azopenai/assets.json index b7b04a7814db..8153da5c9cc9 100644 --- a/sdk/cognitiveservices/azopenai/assets.json +++ b/sdk/cognitiveservices/azopenai/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "go", "TagPrefix": "go/cognitiveservices/azopenai", - "Tag": "go/cognitiveservices/azopenai_63852f374c" + "Tag": "go/cognitiveservices/azopenai_e8362ae205" } diff --git a/sdk/cognitiveservices/azopenai/client_functions_test.go b/sdk/cognitiveservices/azopenai/client_functions_test.go index 352155c1eefb..92bf46f52acd 100644 --- a/sdk/cognitiveservices/azopenai/client_functions_test.go +++ b/sdk/cognitiveservices/azopenai/client_functions_test.go @@ -25,32 +25,23 @@ type ParamProperty struct { Enum []string `json:"enum,omitempty"` } -func getClientForFunctionsTest(t *testing.T, azure bool) *azopenai.Client { - if azure { - cred, err := azopenai.NewKeyCredential(apiKey) - require.NoError(t, err) - - chatClient, err := azopenai.NewClientWithKeyCredential(endpoint, cred, chatCompletionsModelDeployment, newClientOptionsForTest(t)) - require.NoError(t, err) - - return chatClient - } else { - cred, err := azopenai.NewKeyCredential(openAIKey) - require.NoError(t, err) - - chatClient, err := azopenai.NewClientForOpenAI(openAIEndpoint, cred, newClientOptionsForTest(t)) - require.NoError(t, err) - - return chatClient - } +func TestGetChatCompletions_usingFunctions(t *testing.T) { + // https://platform.openai.com/docs/guides/gpt/function-calling + + t.Run("OpenAI", func(t *testing.T) { + chatClient := newOpenAIClientForTest(t) + testChatCompletionsFunctions(t, chatClient) + }) + + t.Run("AzureOpenAI", func(t *testing.T) { + chatClient := newAzureOpenAIClientForTest(t, chatCompletionsModelDeployment, false) + testChatCompletionsFunctions(t, chatClient) + }) } -func TestFunctions(t *testing.T) { - // https://platform.openai.com/docs/guides/gpt/function-calling#:~:text=For%20example%2C%20you%20can%3A%201%20Create%20chatbots%20that,...%203%20Extract%20structured%20data%20from%20text%20 - chatClient := getClientForFunctionsTest(t, false) - +func testChatCompletionsFunctions(t *testing.T, chatClient *azopenai.Client) { resp, err := chatClient.GetChatCompletions(context.Background(), azopenai.ChatCompletionsOptions{ - Model: to.Ptr("gpt-3.5-turbo-0613"), + Model: to.Ptr("gpt-4-0613"), Messages: []azopenai.ChatMessage{ { Role: to.Ptr(azopenai.ChatRoleUser), diff --git a/sdk/cognitiveservices/azopenai/client_shared_test.go b/sdk/cognitiveservices/azopenai/client_shared_test.go index 3e4242842bec..94e201ba8dd6 100644 --- a/sdk/cognitiveservices/azopenai/client_shared_test.go +++ b/sdk/cognitiveservices/azopenai/client_shared_test.go @@ -48,6 +48,8 @@ func getVars(suffix string) (endpoint, apiKey, completionsModelDeployment, chatC apiKey = os.Getenv("AOAI_API_KEY" + suffix) completionsModelDeployment = os.Getenv("AOAI_COMPLETIONS_MODEL_DEPLOYMENT" + suffix) + + // ex: gpt-4-0613 chatCompletionsModelDeployment = os.Getenv("AOAI_CHAT_COMPLETIONS_MODEL_DEPLOYMENT" + suffix) return @@ -56,7 +58,7 @@ func getVars(suffix string) (endpoint, apiKey, completionsModelDeployment, chatC const fakeEndpoint = "https://recordedhost/" const fakeAPIKey = "redacted" -func init() { +func initEnvVars() { if recording.GetRecordMode() == recording.PlaybackMode { endpoint = fakeEndpoint apiKey = fakeAPIKey @@ -71,7 +73,7 @@ func init() { completionsModelDeployment = "text-davinci-003" openAICompletionsModel = "text-davinci-003" - chatCompletionsModelDeployment = "gpt-4" + chatCompletionsModelDeployment = "gpt-4-0613" openAIChatCompletionsModel = "gpt-4" } else { if err := godotenv.Load(); err != nil { @@ -85,7 +87,7 @@ func init() { openAIKey = os.Getenv("OPENAI_API_KEY") openAIEndpoint = os.Getenv("OPENAI_ENDPOINT") openAICompletionsModel = os.Getenv("OPENAI_COMPLETIONS_MODEL") - openAIChatCompletionsModel = os.Getenv("OPENAI_CHAT_COMPLETIONS_MODEL") + openAIChatCompletionsModel = os.Getenv("OPENAI_CHAT_COMPLETIONS_MODEL") // ex: gpt-4-0613 if openAIEndpoint != "" && !strings.HasSuffix(openAIEndpoint, "/") { // (this just makes recording replacement easier) diff --git a/sdk/cognitiveservices/azopenai/custom_client.go b/sdk/cognitiveservices/azopenai/custom_client.go index 6fba8cab1756..c4f1da63b198 100644 --- a/sdk/cognitiveservices/azopenai/custom_client.go +++ b/sdk/cognitiveservices/azopenai/custom_client.go @@ -4,6 +4,10 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. +// Package azopenai Azure OpenAI Service provides access to OpenAI's powerful language models including the GPT-4, +// GPT-35-Turbo, and Embeddings model series, as well as image generation using DALL-E. +// +// The [Client] in this package can be used with Azure OpenAI or OpenAI. package azopenai // this file contains handwritten additions to the generated code @@ -21,7 +25,6 @@ import ( const ( clientName = "azopenai.Client" - apiVersion = "2023-03-15-preview" tokenScope = "https://cognitiveservices.azure.com/.default" ) diff --git a/sdk/cognitiveservices/azopenai/example_client_createimage_test.go b/sdk/cognitiveservices/azopenai/example_client_createimage_test.go new file mode 100644 index 000000000000..42287b861ede --- /dev/null +++ b/sdk/cognitiveservices/azopenai/example_client_createimage_test.go @@ -0,0 +1,66 @@ +//go:build go1.18 +// +build go1.18 + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +package azopenai_test + +import ( + "context" + "fmt" + "net/http" + "os" + + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" + "github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai" +) + +func ExampleClient_CreateImage() { + azureOpenAIKey := os.Getenv("AOAI_API_KEY") + + // Ex: "https://.openai.azure.com" + azureOpenAIEndpoint := os.Getenv("AOAI_ENDPOINT") + + if azureOpenAIKey == "" || azureOpenAIEndpoint == "" { + fmt.Fprintf(os.Stderr, "Skipping example, environment variables missing\n") + return + } + + keyCredential, err := azopenai.NewKeyCredential(azureOpenAIKey) + + if err != nil { + // TODO: handle error + } + + client, err := azopenai.NewClientWithKeyCredential(azureOpenAIEndpoint, keyCredential, "", nil) + + if err != nil { + // TODO: handle error + } + + resp, err := client.CreateImage(context.TODO(), azopenai.ImageGenerationOptions{ + Prompt: to.Ptr("a cat"), + ResponseFormat: to.Ptr(azopenai.ImageGenerationResponseFormatURL), + }, nil) + + if err != nil { + // TODO: handle error + } + + for _, generatedImage := range resp.Data { + // the underlying type for the generatedImage is dictated by the value of + // ImageGenerationOptions.ResponseFormat. In this example we used `azopenai.ImageGenerationResponseFormatURL`, + // so the underlying type will be ImageLocation. + + resp, err := http.Head(*generatedImage.URL) + + if err != nil { + // TODO: handle error + } + + fmt.Fprintf(os.Stderr, "Image generated, HEAD request on URL returned %d\n", resp.StatusCode) + } + + // Output: +} diff --git a/sdk/cognitiveservices/azopenai/example_client_embeddings_test.go b/sdk/cognitiveservices/azopenai/example_client_embeddings_test.go new file mode 100644 index 000000000000..ce77480e676c --- /dev/null +++ b/sdk/cognitiveservices/azopenai/example_client_embeddings_test.go @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +package azopenai_test + +import ( + "context" + "fmt" + "os" + + "github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai" +) + +func ExampleClient_GetEmbeddings() { + azureOpenAIKey := os.Getenv("AOAI_API_KEY") + modelDeploymentID := os.Getenv("AOAI_EMBEDDINGS_MODEL_DEPLOYMENT") + + // Ex: "https://.openai.azure.com" + azureOpenAIEndpoint := os.Getenv("AOAI_ENDPOINT") + + if azureOpenAIKey == "" || modelDeploymentID == "" || azureOpenAIEndpoint == "" { + fmt.Fprintf(os.Stderr, "Skipping example, environment variables missing\n") + return + } + + keyCredential, err := azopenai.NewKeyCredential(azureOpenAIKey) + + if err != nil { + // TODO: handle error + } + + // In Azure OpenAI you must deploy a model before you can use it in your client. For more information + // see here: https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + client, err := azopenai.NewClientWithKeyCredential(azureOpenAIEndpoint, keyCredential, modelDeploymentID, nil) + + if err != nil { + // TODO: handle error + } + + resp, err := client.GetEmbeddings(context.TODO(), azopenai.EmbeddingsOptions{ + Input: []string{"The food was delicious and the waiter..."}, + Model: &modelDeploymentID, + }, nil) + + if err != nil { + // TODO: handle error + } + + for _, embed := range resp.Data { + // embed.Embedding contains the embeddings for this input index. + fmt.Fprintf(os.Stderr, "Got embeddings for input %d\n", *embed.Index) + } + + // Output: +} diff --git a/sdk/cognitiveservices/azopenai/example_client_getchatcompletions_test.go b/sdk/cognitiveservices/azopenai/example_client_getchatcompletions_test.go new file mode 100644 index 000000000000..007338d78547 --- /dev/null +++ b/sdk/cognitiveservices/azopenai/example_client_getchatcompletions_test.go @@ -0,0 +1,275 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +package azopenai_test + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "os" + + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" + "github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai" +) + +func ExampleClient_GetChatCompletions() { + azureOpenAIKey := os.Getenv("AOAI_API_KEY") + modelDeploymentID := os.Getenv("AOAI_CHAT_COMPLETIONS_MODEL_DEPLOYMENT") + + // Ex: "https://.openai.azure.com" + azureOpenAIEndpoint := os.Getenv("AOAI_ENDPOINT") + + if azureOpenAIKey == "" || modelDeploymentID == "" || azureOpenAIEndpoint == "" { + fmt.Fprintf(os.Stderr, "Skipping example, environment variables missing\n") + return + } + + keyCredential, err := azopenai.NewKeyCredential(azureOpenAIKey) + + if err != nil { + // TODO: handle error + } + + // In Azure OpenAI you must deploy a model before you can use it in your client. For more information + // see here: https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + client, err := azopenai.NewClientWithKeyCredential(azureOpenAIEndpoint, keyCredential, modelDeploymentID, nil) + + if err != nil { + // TODO: handle error + } + + // This is a conversation in progress. + // NOTE: all messages, regardless of role, count against token usage for this API. + messages := []azopenai.ChatMessage{ + // You set the tone and rules of the conversation with a prompt as the system role. + {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr("You are a helpful assistant. You will talk like a pirate.")}, + + // The user asks a question + {Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr("Can you help me?")}, + + // The reply would come back from the ChatGPT. You'd add it to the conversation so we can maintain context. + {Role: to.Ptr(azopenai.ChatRoleAssistant), Content: to.Ptr("Arrrr! Of course, me hearty! What can I do for ye?")}, + + // The user answers the question based on the latest reply. + {Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr("What's the best way to train a parrot?")}, + + // from here you'd keep iterating, sending responses back from ChatGPT + } + + gotReply := false + + resp, err := client.GetChatCompletions(context.TODO(), azopenai.ChatCompletionsOptions{ + // This is a conversation in progress. + // NOTE: all messages count against token usage for this API. + Messages: messages, + }, nil) + + if err != nil { + // TODO: handle error + } + + for _, choice := range resp.Choices { + gotReply = true + fmt.Fprintf(os.Stderr, "Content[%d]: %s\n", *choice.Index, *choice.Message.Content) + } + + if gotReply { + fmt.Fprintf(os.Stderr, "Got chat completions reply\n") + } + + // Output: +} + +func ExampleClient_GetChatCompletions_functions() { + azureOpenAIKey := os.Getenv("AOAI_API_KEY") + modelDeploymentID := os.Getenv("AOAI_CHAT_COMPLETIONS_MODEL_DEPLOYMENT") + + // Ex: "https://.openai.azure.com" + azureOpenAIEndpoint := os.Getenv("AOAI_ENDPOINT") + + if azureOpenAIKey == "" || modelDeploymentID == "" || azureOpenAIEndpoint == "" { + fmt.Fprintf(os.Stderr, "Skipping example, environment variables missing\n") + return + } + + keyCredential, err := azopenai.NewKeyCredential(azureOpenAIKey) + + if err != nil { + // TODO: handle error + } + + // In Azure OpenAI you must deploy a model before you can use it in your client. For more information + // see here: https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + client, err := azopenai.NewClientWithKeyCredential(azureOpenAIEndpoint, keyCredential, modelDeploymentID, nil) + + if err != nil { + // TODO: handle error + } + + // some JSON schema keys + const jsonSchemaType = "type" + const jsonSchemaDesc = "description" + const jsonSchemaEnum = "enum" + const jsonSchemaRequired = "required" + const jsonSchemaProps = "properties" + + resp, err := client.GetChatCompletions(context.Background(), azopenai.ChatCompletionsOptions{ + Model: &modelDeploymentID, + Messages: []azopenai.ChatMessage{ + { + Role: to.Ptr(azopenai.ChatRoleUser), + Content: to.Ptr("What's the weather like in Boston, MA, in celsius?"), + }, + }, + FunctionCall: &azopenai.ChatCompletionsOptionsFunctionCall{ + Value: to.Ptr("auto"), + }, + Functions: []azopenai.FunctionDefinition{ + { + Name: to.Ptr("get_current_weather"), + Description: to.Ptr("Get the current weather in a given location"), + + Parameters: map[string]any{ + jsonSchemaRequired: []string{"location"}, + jsonSchemaType: "object", + jsonSchemaProps: map[string]any{ + "location": map[string]any{ + jsonSchemaType: "string", + jsonSchemaDesc: "The city and state, e.g. San Francisco, CA", + }, + "unit": map[string]any{ + jsonSchemaType: "string", + jsonSchemaEnum: []string{"celsius", "fahrenheit"}, + }, + }, + }, + }, + }, + Temperature: to.Ptr[float32](0.0), + }, nil) + + if err != nil { + // TODO: handle error + } + + funcCall := resp.ChatCompletions.Choices[0].Message.FunctionCall + + // This is the function name we gave in the call to GetCompletions + // Prints: Function name: "get_current_weather" + fmt.Fprintf(os.Stderr, "Function name: %q\n", *funcCall.Name) + + // The arguments for your function come back as a JSON string + var funcParams *struct { + Location string `json:"location"` + Unit string `json:"unit"` + } + err = json.Unmarshal([]byte(*funcCall.Arguments), &funcParams) + + if err != nil { + // TODO: handle error + } + + // Prints: + // Parameters: azopenai_test.location{Location:"Boston, MA", Unit:"celsius"} + fmt.Fprintf(os.Stderr, "Parameters: %#v\n", *funcParams) + + // Output: +} + +func ExampleClient_GetChatCompletionsStream() { + azureOpenAIKey := os.Getenv("AOAI_API_KEY") + modelDeploymentID := os.Getenv("AOAI_CHAT_COMPLETIONS_MODEL_DEPLOYMENT") + + // Ex: "https://.openai.azure.com" + azureOpenAIEndpoint := os.Getenv("AOAI_ENDPOINT") + + if azureOpenAIKey == "" || modelDeploymentID == "" || azureOpenAIEndpoint == "" { + fmt.Fprintf(os.Stderr, "Skipping example, environment variables missing\n") + return + } + + keyCredential, err := azopenai.NewKeyCredential(azureOpenAIKey) + + if err != nil { + // TODO: handle error + } + + // In Azure OpenAI you must deploy a model before you can use it in your client. For more information + // see here: https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + client, err := azopenai.NewClientWithKeyCredential(azureOpenAIEndpoint, keyCredential, modelDeploymentID, nil) + + if err != nil { + // TODO: handle error + } + + // This is a conversation in progress. + // NOTE: all messages, regardless of role, count against token usage for this API. + messages := []azopenai.ChatMessage{ + // You set the tone and rules of the conversation with a prompt as the system role. + {Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr("You are a helpful assistant. You will talk like a pirate and limit your responses to 20 words or less.")}, + + // The user asks a question + {Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr("Can you help me?")}, + + // The reply would come back from the ChatGPT. You'd add it to the conversation so we can maintain context. + {Role: to.Ptr(azopenai.ChatRoleAssistant), Content: to.Ptr("Arrrr! Of course, me hearty! What can I do for ye?")}, + + // The user answers the question based on the latest reply. + {Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr("What's the best way to train a parrot?")}, + + // from here you'd keep iterating, sending responses back from ChatGPT + } + + resp, err := client.GetChatCompletionsStream(context.TODO(), azopenai.ChatCompletionsOptions{ + // This is a conversation in progress. + // NOTE: all messages count against token usage for this API. + Messages: messages, + N: to.Ptr[int32](1), + }, nil) + + if err != nil { + // TODO: handle error + } + + streamReader := resp.ChatCompletionsStream + gotReply := false + + for { + chatCompletions, err := streamReader.Read() + + if errors.Is(err, io.EOF) { + break + } + + if err != nil { + // TODO: handle error + } + + for _, choice := range chatCompletions.Choices { + gotReply = true + + text := "" + + if choice.Delta.Content != nil { + text = *choice.Delta.Content + } + + role := "" + + if choice.Delta.Role != nil { + role = string(*choice.Delta.Role) + } + + fmt.Fprintf(os.Stderr, "Content[%d], role %q: %q\n", *choice.Index, role, text) + } + } + + if gotReply { + fmt.Fprintf(os.Stderr, "Got chat completions streaming reply\n") + } + + // Output: +} diff --git a/sdk/cognitiveservices/azopenai/example_client_getcompletions_test.go b/sdk/cognitiveservices/azopenai/example_client_getcompletions_test.go new file mode 100644 index 000000000000..5339502a029b --- /dev/null +++ b/sdk/cognitiveservices/azopenai/example_client_getcompletions_test.go @@ -0,0 +1,114 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +package azopenai_test + +import ( + "context" + "errors" + "fmt" + "io" + "os" + + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" + "github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai" +) + +func ExampleClient_GetCompletions() { + azureOpenAIKey := os.Getenv("AOAI_API_KEY") + modelDeploymentID := os.Getenv("AOAI_COMPLETIONS_MODEL_DEPLOYMENT") + + // Ex: "https://.openai.azure.com" + azureOpenAIEndpoint := os.Getenv("AOAI_ENDPOINT") + + if azureOpenAIKey == "" || modelDeploymentID == "" || azureOpenAIEndpoint == "" { + fmt.Fprintf(os.Stderr, "Skipping example, environment variables missing\n") + return + } + + keyCredential, err := azopenai.NewKeyCredential(azureOpenAIKey) + + if err != nil { + // TODO: handle error + } + + // In Azure OpenAI you must deploy a model before you can use it in your client. For more information + // see here: https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + client, err := azopenai.NewClientWithKeyCredential(azureOpenAIEndpoint, keyCredential, modelDeploymentID, nil) + + if err != nil { + // TODO: handle error + } + + resp, err := client.GetCompletions(context.TODO(), azopenai.CompletionsOptions{ + Prompt: []string{"What is Azure OpenAI, in 20 words or less"}, + MaxTokens: to.Ptr(int32(2048)), + Temperature: to.Ptr(float32(0.0)), + }, nil) + + if err != nil { + // TODO: handle error + } + + for _, choice := range resp.Choices { + fmt.Fprintf(os.Stderr, "Result: %s\n", *choice.Text) + } + + // Output: +} + +func ExampleClient_GetCompletionsStream() { + azureOpenAIKey := os.Getenv("AOAI_API_KEY") + modelDeploymentID := os.Getenv("AOAI_COMPLETIONS_MODEL_DEPLOYMENT") + + // Ex: "https://.openai.azure.com" + azureOpenAIEndpoint := os.Getenv("AOAI_ENDPOINT") + + if azureOpenAIKey == "" || modelDeploymentID == "" || azureOpenAIEndpoint == "" { + fmt.Fprintf(os.Stderr, "Skipping example, environment variables missing\n") + return + } + + keyCredential, err := azopenai.NewKeyCredential(azureOpenAIKey) + + if err != nil { + // TODO: handle error + } + + // In Azure OpenAI you must deploy a model before you can use it in your client. For more information + // see here: https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + client, err := azopenai.NewClientWithKeyCredential(azureOpenAIEndpoint, keyCredential, modelDeploymentID, nil) + + if err != nil { + // TODO: handle error + } + + resp, err := client.GetCompletionsStream(context.TODO(), azopenai.CompletionsOptions{ + Prompt: []string{"What is Azure OpenAI, in 20 words or less?"}, + MaxTokens: to.Ptr(int32(2048)), + Temperature: to.Ptr(float32(0.0)), + }, nil) + + if err != nil { + // TODO: handle error + } + + for { + entry, err := resp.CompletionsStream.Read() + + if errors.Is(err, io.EOF) { + fmt.Fprintf(os.Stderr, "\n*** No more completions ***\n") + break + } + + if err != nil { + // TODO: handle error + } + + for _, choice := range entry.Choices { + fmt.Fprintf(os.Stderr, "Result: %s\n", *choice.Text) + } + } + + // Output: +} diff --git a/sdk/cognitiveservices/azopenai/example_client_test.go b/sdk/cognitiveservices/azopenai/example_client_test.go new file mode 100644 index 000000000000..e13930b3f680 --- /dev/null +++ b/sdk/cognitiveservices/azopenai/example_client_test.go @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +package azopenai_test + +import ( + "github.com/Azure/azure-sdk-for-go/sdk/azidentity" + "github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai" +) + +func ExampleNewClientForOpenAI() { + keyCredential, err := azopenai.NewKeyCredential("") + + if err != nil { + // TODO: handle error + } + + // NOTE: this constructor creates a client that connects to the public OpenAI endpoint. + // To connect to an Azure OpenAI endpoint, use azopenai.NewClient() or azopenai.NewClientWithyKeyCredential. + client, err := azopenai.NewClientForOpenAI("https://api.openai.com/v1", keyCredential, nil) + + if err != nil { + // TODO: handle error + } + + _ = client +} + +func ExampleNewClient() { + dac, err := azidentity.NewDefaultAzureCredential(nil) + + if err != nil { + // TODO: handle error + } + + modelDeploymentID := "model deployment ID" + + // NOTE: this constructor creates a client that connects to an Azure OpenAI endpoint. + // To connect to the public OpenAI endpoint, use azopenai.NewClientForOpenAI + client, err := azopenai.NewClient("https://.openai.azure.com", dac, modelDeploymentID, nil) + + if err != nil { + // TODO: handle error + } + + _ = client +} + +func ExampleNewClientWithKeyCredential() { + keyCredential, err := azopenai.NewKeyCredential("") + + if err != nil { + // TODO: handle error + } + + // In Azure OpenAI you must deploy a model before you can use it in your client. For more information + // see here: https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + modelDeploymentID := "model deployment ID" + + // NOTE: this constructor creates a client that connects to an Azure OpenAI endpoint. + // To connect to the public OpenAI endpoint, use azopenai.NewClientForOpenAI + client, err := azopenai.NewClientWithKeyCredential("https://.openai.azure.com", keyCredential, modelDeploymentID, nil) + + if err != nil { + // TODO: handle error + } + + _ = client +} diff --git a/sdk/cognitiveservices/azopenai/examples_client_test.go b/sdk/cognitiveservices/azopenai/examples_client_test.go deleted file mode 100644 index 7af7c7d1c1fe..000000000000 --- a/sdk/cognitiveservices/azopenai/examples_client_test.go +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. - -package azopenai_test - -import ( - "context" - "errors" - "fmt" - "io" - "os" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai" -) - -func ExampleNewClientForOpenAI() { - // NOTE: this constructor creates a client that connects to the public OpenAI endpoint. - // To connect to an Azure OpenAI endpoint, use azopenai.NewClient() or azopenai.NewClientWithyKeyCredential. - keyCredential, err := azopenai.NewKeyCredential("") - - if err != nil { - panic(err) - } - - client, err := azopenai.NewClientForOpenAI("https://api.openai.com/v1", keyCredential, nil) - - if err != nil { - panic(err) - } - - _ = client -} - -func ExampleNewClient() { - // NOTE: this constructor creates a client that connects to an Azure OpenAI endpoint. - // To connect to the public OpenAI endpoint, use azopenai.NewClientForOpenAI - dac, err := azidentity.NewDefaultAzureCredential(nil) - - if err != nil { - panic(err) - } - - modelDeploymentID := "model deployment ID" - client, err := azopenai.NewClient("https://.openai.azure.com", dac, modelDeploymentID, nil) - - if err != nil { - panic(err) - } - - _ = client -} - -func ExampleNewClientWithKeyCredential() { - // NOTE: this constructor creates a client that connects to an Azure OpenAI endpoint. - // To connect to the public OpenAI endpoint, use azopenai.NewClientForOpenAI - keyCredential, err := azopenai.NewKeyCredential("") - - if err != nil { - panic(err) - } - - // In Azure OpenAI you must deploy a model before you can use it in your client. For more information - // see here: https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - modelDeploymentID := "model deployment ID" - client, err := azopenai.NewClientWithKeyCredential("https://.openai.azure.com", keyCredential, modelDeploymentID, nil) - - if err != nil { - panic(err) - } - - _ = client -} - -func ExampleClient_GetCompletionsStream() { - azureOpenAIKey := os.Getenv("AOAI_API_KEY") - modelDeploymentID := os.Getenv("AOAI_COMPLETIONS_MODEL_DEPLOYMENT") - - // Ex: "https://.openai.azure.com" - azureOpenAIEndpoint := os.Getenv("AOAI_ENDPOINT") - - if azureOpenAIKey == "" || modelDeploymentID == "" || azureOpenAIEndpoint == "" { - return - } - - keyCredential, err := azopenai.NewKeyCredential(azureOpenAIKey) - - if err != nil { - panic(err) - } - - // In Azure OpenAI you must deploy a model before you can use it in your client. For more information - // see here: https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - client, err := azopenai.NewClientWithKeyCredential(azureOpenAIEndpoint, keyCredential, modelDeploymentID, nil) - - if err != nil { - panic(err) - } - - resp, err := client.GetCompletionsStream(context.TODO(), azopenai.CompletionsOptions{ - Prompt: []string{"What is Azure OpenAI?"}, - MaxTokens: to.Ptr(int32(2048)), - Temperature: to.Ptr(float32(0.0)), - }, nil) - - if err != nil { - panic(err) - } - - for { - entry, err := resp.CompletionsStream.Read() - - if errors.Is(err, io.EOF) { - fmt.Printf("\n *** No more completions ***\n") - break - } - - if err != nil { - panic(err) - } - - for _, choice := range entry.Choices { - fmt.Printf("%s", *choice.Text) - } - } -} - -func ExampleClient_CreateImage() { - azureOpenAIKey := os.Getenv("AOAI_API_KEY") - - // Ex: "https://.openai.azure.com" - azureOpenAIEndpoint := os.Getenv("AOAI_ENDPOINT") - - if azureOpenAIKey == "" || azureOpenAIEndpoint == "" { - fmt.Printf("Skipping example, environment variables missing\n") - return - } - - keyCredential, err := azopenai.NewKeyCredential(azureOpenAIKey) - - if err != nil { - panic(err) - } - - client, err := azopenai.NewClientWithKeyCredential(azureOpenAIEndpoint, keyCredential, "", nil) - - if err != nil { - panic(err) - } - - resp, err := client.CreateImage(context.TODO(), azopenai.ImageGenerationOptions{ - Prompt: to.Ptr("a cat"), - ResponseFormat: to.Ptr(azopenai.ImageGenerationResponseFormatURL), - }, nil) - - if err != nil { - panic(err) - } - - for _, generatedImage := range resp.Data { - // the underlying type for the generatedImage is dictated by the value of - // ImageGenerationOptions.ResponseFormat. In this example we used `azopenai.ImageGenerationResponseFormatURL`, - // so the underlying type will be ImageLocation. - fmt.Printf("Image generated at URL %q\n", *generatedImage.URL) - } -} diff --git a/sdk/cognitiveservices/azopenai/main_test.go b/sdk/cognitiveservices/azopenai/main_test.go new file mode 100644 index 000000000000..2890ad613e61 --- /dev/null +++ b/sdk/cognitiveservices/azopenai/main_test.go @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +package azopenai_test + +import ( + "os" + "testing" +) + +func TestMain(m *testing.M) { + initEnvVars() + os.Exit(m.Run()) +}