From 3f649f89183ea9bb4512fa71f03292fe7c2595cd Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Wed, 12 Jul 2023 19:49:22 +0000 Subject: [PATCH] CodeGen from PR 24652 in Azure/azure-rest-api-specs Merge fc29d6dd3a8f1890ff67340561f2513dea60bd35 into e994b93c82c5d23eb377f35434354438e748cb87 --- .../azure/ai/openai/OpenAIAsyncClient.java | 23 ++- .../com/azure/ai/openai/OpenAIClient.java | 24 ++- .../azure/ai/openai/OpenAIServiceVersion.java | 7 +- .../implementation/OpenAIClientImpl.java | 166 ++++++++++++++---- .../azure/ai/openai/models/ChatChoice.java | 21 +++ .../ai/openai/models/ChatCompletions.java | 63 ++++--- .../openai/models/ChatCompletionsOptions.java | 67 +++++++ .../azure/ai/openai/models/ChatMessage.java | 68 +++++++ .../com/azure/ai/openai/models/ChatRole.java | 3 + .../com/azure/ai/openai/models/Choice.java | 21 +++ .../azure/ai/openai/models/Completions.java | 19 ++ .../models/CompletionsFinishReason.java | 3 + .../CompletionsLogProbabilityModel.java | 86 ++++----- .../ai/openai/models/ContentFilterResult.java | 63 +++++++ .../openai/models/ContentFilterResults.java | 119 +++++++++++++ .../openai/models/ContentFilterSeverity.java | 71 ++++++++ .../azure/ai/openai/models/EmbeddingItem.java | 28 +-- .../azure/ai/openai/models/FunctionCall.java | 67 +++++++ .../openai/models/FunctionCallModelBase.java | 11 ++ .../ai/openai/models/FunctionCallPreset.java | 59 +++++++ .../FunctionCallPresetFunctionCallModel.java | 33 ++++ .../ai/openai/models/FunctionDefinition.java | 105 +++++++++++ .../azure/ai/openai/models/FunctionName.java | 45 +++++ .../models/FunctionNameFunctionCallModel.java | 33 ++++ .../ai/openai/models/PromptFilterResult.java | 59 +++++++ sdk/openai/azure-ai-openai/tsp-location.yaml | 8 +- 26 files changed, 1138 insertions(+), 134 deletions(-) create mode 100644 sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ContentFilterResult.java create mode 100644 sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ContentFilterResults.java create mode 100644 sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ContentFilterSeverity.java create mode 100644 sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCall.java create mode 100644 sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCallModelBase.java create mode 100644 sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCallPreset.java create mode 100644 sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCallPresetFunctionCallModel.java create mode 100644 sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionDefinition.java create mode 100644 sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionName.java create mode 100644 sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionNameFunctionCallModel.java create mode 100644 sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/PromptFilterResult.java diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/OpenAIAsyncClient.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/OpenAIAsyncClient.java index 9abde7ed899fa..0ba8c79850808 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/OpenAIAsyncClient.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/OpenAIAsyncClient.java @@ -266,7 +266,8 @@ public Mono> getChatCompletionsWithResponse( /** * Return the embeddings for a given prompt. * - * @param deploymentId deployment id of the deployed model. + * @param deploymentOrModelName Specifies either the model deployment name (when using Azure OpenAI) or model name + * (when using non-Azure OpenAI) to use for this request. * @param embeddingsOptions The configuration information for an embeddings request. Embeddings measure the * relatedness of text strings and are commonly used for search, clustering, recommendations, and other similar * scenarios. @@ -282,10 +283,11 @@ public Mono> getChatCompletionsWithResponse( */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) - public Mono getEmbeddings(String deploymentId, EmbeddingsOptions embeddingsOptions) { + public Mono getEmbeddings(String deploymentOrModelName, EmbeddingsOptions embeddingsOptions) { // Generated convenience method for getEmbeddingsWithResponse RequestOptions requestOptions = new RequestOptions(); - return getEmbeddingsWithResponse(deploymentId, BinaryData.fromObject(embeddingsOptions), requestOptions) + return getEmbeddingsWithResponse( + deploymentOrModelName, BinaryData.fromObject(embeddingsOptions), requestOptions) .flatMap(FluxUtil::toMono) .map(protocolMethodData -> protocolMethodData.toObject(Embeddings.class)); } @@ -294,7 +296,8 @@ public Mono getEmbeddings(String deploymentId, EmbeddingsOptions emb * Gets completions for the provided input prompts. Completions support a wide variety of tasks and generate text * that continues from or "completes" provided prompt data. * - * @param deploymentId deployment id of the deployed model. + * @param deploymentOrModelName Specifies either the model deployment name (when using Azure OpenAI) or model name + * (when using non-Azure OpenAI) to use for this request. * @param completionsOptions The configuration information for a completions request. Completions support a wide * variety of tasks and generate text that continues from or "completes" provided prompt data. * @throws IllegalArgumentException thrown if parameters fail the validation. @@ -308,10 +311,11 @@ public Mono getEmbeddings(String deploymentId, EmbeddingsOptions emb */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) - public Mono getCompletions(String deploymentId, CompletionsOptions completionsOptions) { + public Mono getCompletions(String deploymentOrModelName, CompletionsOptions completionsOptions) { // Generated convenience method for getCompletionsWithResponse RequestOptions requestOptions = new RequestOptions(); - return getCompletionsWithResponse(deploymentId, BinaryData.fromObject(completionsOptions), requestOptions) + return getCompletionsWithResponse( + deploymentOrModelName, BinaryData.fromObject(completionsOptions), requestOptions) .flatMap(FluxUtil::toMono) .map(protocolMethodData -> protocolMethodData.toObject(Completions.class)); } @@ -369,7 +373,8 @@ public Flux getCompletionsStream(String deploymentId, CompletionsOp * Gets chat completions for the provided chat messages. Completions support a wide variety of tasks and generate * text that continues from or "completes" provided prompt data. * - * @param deploymentId deployment id of the deployed model. + * @param deploymentOrModelName Specifies either the model deployment name (when using Azure OpenAI) or model name + * (when using non-Azure OpenAI) to use for this request. * @param chatCompletionsOptions The configuration information for a chat completions request. Completions support a * wide variety of tasks and generate text that continues from or "completes" provided prompt data. * @throws IllegalArgumentException thrown if parameters fail the validation. @@ -384,11 +389,11 @@ public Flux getCompletionsStream(String deploymentId, CompletionsOp @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Mono getChatCompletions( - String deploymentId, ChatCompletionsOptions chatCompletionsOptions) { + String deploymentOrModelName, ChatCompletionsOptions chatCompletionsOptions) { // Generated convenience method for getChatCompletionsWithResponse RequestOptions requestOptions = new RequestOptions(); return getChatCompletionsWithResponse( - deploymentId, BinaryData.fromObject(chatCompletionsOptions), requestOptions) + deploymentOrModelName, BinaryData.fromObject(chatCompletionsOptions), requestOptions) .flatMap(FluxUtil::toMono) .map(protocolMethodData -> protocolMethodData.toObject(ChatCompletions.class)); } diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/OpenAIClient.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/OpenAIClient.java index 29f66b350061e..278003f61e6c6 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/OpenAIClient.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/OpenAIClient.java @@ -262,7 +262,8 @@ public Response getChatCompletionsWithResponse( /** * Return the embeddings for a given prompt. * - * @param deploymentId deployment id of the deployed model. + * @param deploymentOrModelName Specifies either the model deployment name (when using Azure OpenAI) or model name + * (when using non-Azure OpenAI) to use for this request. * @param embeddingsOptions The configuration information for an embeddings request. Embeddings measure the * relatedness of text strings and are commonly used for search, clustering, recommendations, and other similar * scenarios. @@ -277,10 +278,11 @@ public Response getChatCompletionsWithResponse( */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) - public Embeddings getEmbeddings(String deploymentId, EmbeddingsOptions embeddingsOptions) { + public Embeddings getEmbeddings(String deploymentOrModelName, EmbeddingsOptions embeddingsOptions) { // Generated convenience method for getEmbeddingsWithResponse RequestOptions requestOptions = new RequestOptions(); - return getEmbeddingsWithResponse(deploymentId, BinaryData.fromObject(embeddingsOptions), requestOptions) + return getEmbeddingsWithResponse( + deploymentOrModelName, BinaryData.fromObject(embeddingsOptions), requestOptions) .getValue() .toObject(Embeddings.class); } @@ -289,7 +291,8 @@ public Embeddings getEmbeddings(String deploymentId, EmbeddingsOptions embedding * Gets completions for the provided input prompts. Completions support a wide variety of tasks and generate text * that continues from or "completes" provided prompt data. * - * @param deploymentId deployment id of the deployed model. + * @param deploymentOrModelName Specifies either the model deployment name (when using Azure OpenAI) or model name + * (when using non-Azure OpenAI) to use for this request. * @param completionsOptions The configuration information for a completions request. Completions support a wide * variety of tasks and generate text that continues from or "completes" provided prompt data. * @throws IllegalArgumentException thrown if parameters fail the validation. @@ -303,10 +306,11 @@ public Embeddings getEmbeddings(String deploymentId, EmbeddingsOptions embedding */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) - public Completions getCompletions(String deploymentId, CompletionsOptions completionsOptions) { + public Completions getCompletions(String deploymentOrModelName, CompletionsOptions completionsOptions) { // Generated convenience method for getCompletionsWithResponse RequestOptions requestOptions = new RequestOptions(); - return getCompletionsWithResponse(deploymentId, BinaryData.fromObject(completionsOptions), requestOptions) + return getCompletionsWithResponse( + deploymentOrModelName, BinaryData.fromObject(completionsOptions), requestOptions) .getValue() .toObject(Completions.class); } @@ -365,7 +369,8 @@ public IterableStream getCompletionsStream( * Gets chat completions for the provided chat messages. Completions support a wide variety of tasks and generate * text that continues from or "completes" provided prompt data. * - * @param deploymentId deployment id of the deployed model. + * @param deploymentOrModelName Specifies either the model deployment name (when using Azure OpenAI) or model name + * (when using non-Azure OpenAI) to use for this request. * @param chatCompletionsOptions The configuration information for a chat completions request. Completions support a * wide variety of tasks and generate text that continues from or "completes" provided prompt data. * @throws IllegalArgumentException thrown if parameters fail the validation. @@ -379,11 +384,12 @@ public IterableStream getCompletionsStream( */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) - public ChatCompletions getChatCompletions(String deploymentId, ChatCompletionsOptions chatCompletionsOptions) { + public ChatCompletions getChatCompletions( + String deploymentOrModelName, ChatCompletionsOptions chatCompletionsOptions) { // Generated convenience method for getChatCompletionsWithResponse RequestOptions requestOptions = new RequestOptions(); return getChatCompletionsWithResponse( - deploymentId, BinaryData.fromObject(chatCompletionsOptions), requestOptions) + deploymentOrModelName, BinaryData.fromObject(chatCompletionsOptions), requestOptions) .getValue() .toObject(ChatCompletions.class); } diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/OpenAIServiceVersion.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/OpenAIServiceVersion.java index 8d15584caf4d4..6a0186a342c12 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/OpenAIServiceVersion.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/OpenAIServiceVersion.java @@ -15,7 +15,10 @@ public enum OpenAIServiceVersion implements ServiceVersion { V2023_05_15("2023-05-15"), /** Enum value 2023-06-01-preview. */ - V2023_06_01_PREVIEW("2023-06-01-preview"); + V2023_06_01_PREVIEW("2023-06-01-preview"), + + /** Enum value 2023-07-01-preview. */ + V2023_07_01_PREVIEW("2023-07-01-preview"); private final String version; @@ -35,6 +38,6 @@ public String getVersion() { * @return The latest {@link OpenAIServiceVersion}. */ public static OpenAIServiceVersion getLatest() { - return V2023_06_01_PREVIEW; + return V2023_07_01_PREVIEW; } } diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/implementation/OpenAIClientImpl.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/implementation/OpenAIClientImpl.java index 788bf745dcae4..5f03fcc6de3e0 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/implementation/OpenAIClientImpl.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/implementation/OpenAIClientImpl.java @@ -172,7 +172,7 @@ public interface OpenAIClientService { Mono> getEmbeddings( @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, - @PathParam("deploymentId") String deploymentId, + @PathParam("deploymentId") String deploymentOrModelName, @HeaderParam("accept") String accept, @BodyParam("application/json") BinaryData embeddingsOptions, RequestOptions requestOptions, @@ -193,7 +193,7 @@ Mono> getEmbeddings( Response getEmbeddingsSync( @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, - @PathParam("deploymentId") String deploymentId, + @PathParam("deploymentId") String deploymentOrModelName, @HeaderParam("accept") String accept, @BodyParam("application/json") BinaryData embeddingsOptions, RequestOptions requestOptions, @@ -214,7 +214,7 @@ Response getEmbeddingsSync( Mono> getCompletions( @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, - @PathParam("deploymentId") String deploymentId, + @PathParam("deploymentId") String deploymentOrModelName, @HeaderParam("accept") String accept, @BodyParam("application/json") BinaryData completionsOptions, RequestOptions requestOptions, @@ -235,7 +235,7 @@ Mono> getCompletions( Response getCompletionsSync( @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, - @PathParam("deploymentId") String deploymentId, + @PathParam("deploymentId") String deploymentOrModelName, @HeaderParam("accept") String accept, @BodyParam("application/json") BinaryData completionsOptions, RequestOptions requestOptions, @@ -256,7 +256,7 @@ Response getCompletionsSync( Mono> getChatCompletions( @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, - @PathParam("deploymentId") String deploymentId, + @PathParam("deploymentId") String deploymentOrModelName, @HeaderParam("accept") String accept, @BodyParam("application/json") BinaryData chatCompletionsOptions, RequestOptions requestOptions, @@ -277,7 +277,7 @@ Mono> getChatCompletions( Response getChatCompletionsSync( @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, - @PathParam("deploymentId") String deploymentId, + @PathParam("deploymentId") String deploymentOrModelName, @HeaderParam("accept") String accept, @BodyParam("application/json") BinaryData chatCompletionsOptions, RequestOptions requestOptions, @@ -398,7 +398,8 @@ Response beginAzureBatchImageGenerationSync( * } * } * - * @param deploymentId deployment id of the deployed model. + * @param deploymentOrModelName Specifies either the model deployment name (when using Azure OpenAI) or model name + * (when using non-Azure OpenAI) to use for this request. * @param embeddingsOptions The configuration information for an embeddings request. Embeddings measure the * relatedness of text strings and are commonly used for search, clustering, recommendations, and other similar * scenarios. @@ -413,14 +414,14 @@ Response beginAzureBatchImageGenerationSync( */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> getEmbeddingsWithResponseAsync( - String deploymentId, BinaryData embeddingsOptions, RequestOptions requestOptions) { + String deploymentOrModelName, BinaryData embeddingsOptions, RequestOptions requestOptions) { final String accept = "application/json"; return FluxUtil.withContext( context -> service.getEmbeddings( this.getEndpoint(), this.getServiceVersion().getVersion(), - deploymentId, + deploymentOrModelName, accept, embeddingsOptions, requestOptions, @@ -461,7 +462,8 @@ public Mono> getEmbeddingsWithResponseAsync( * } * } * - * @param deploymentId deployment id of the deployed model. + * @param deploymentOrModelName Specifies either the model deployment name (when using Azure OpenAI) or model name + * (when using non-Azure OpenAI) to use for this request. * @param embeddingsOptions The configuration information for an embeddings request. Embeddings measure the * relatedness of text strings and are commonly used for search, clustering, recommendations, and other similar * scenarios. @@ -476,12 +478,12 @@ public Mono> getEmbeddingsWithResponseAsync( */ @ServiceMethod(returns = ReturnType.SINGLE) public Response getEmbeddingsWithResponse( - String deploymentId, BinaryData embeddingsOptions, RequestOptions requestOptions) { + String deploymentOrModelName, BinaryData embeddingsOptions, RequestOptions requestOptions) { final String accept = "application/json"; return service.getEmbeddingsSync( this.getEndpoint(), this.getServiceVersion().getVersion(), - deploymentId, + deploymentOrModelName, accept, embeddingsOptions, requestOptions, @@ -526,10 +528,25 @@ public Response getEmbeddingsWithResponse( * { * id: String (Required) * created: int (Required) + * prompt_annotations (Optional): [ + * (Optional){ + * prompt_index: int (Required) + * content_filter_results (Optional): { + * sexual (Required): { + * severity: String(safe/low/medium/high) (Required) + * filtered: boolean (Required) + * } + * violence (Required): (recursive schema, see violence above) + * hate (Required): (recursive schema, see hate above) + * self_harm (Required): (recursive schema, see self_harm above) + * } + * } + * ] * choices (Required): [ * (Required){ * text: String (Required) * index: int (Required) + * content_filter_results (Optional): (recursive schema, see content_filter_results above) * logprobs (Required): { * tokens (Required): [ * String (Required) @@ -546,7 +563,7 @@ public Response getEmbeddingsWithResponse( * int (Required) * ] * } - * finish_reason: String(stop/length/content_filter) (Required) + * finish_reason: String(stop/length/content_filter/function_call) (Required) * } * ] * usage (Required): { @@ -557,7 +574,8 @@ public Response getEmbeddingsWithResponse( * } * } * - * @param deploymentId deployment id of the deployed model. + * @param deploymentOrModelName Specifies either the model deployment name (when using Azure OpenAI) or model name + * (when using non-Azure OpenAI) to use for this request. * @param completionsOptions The configuration information for a completions request. Completions support a wide * variety of tasks and generate text that continues from or "completes" provided prompt data. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. @@ -571,14 +589,14 @@ public Response getEmbeddingsWithResponse( */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> getCompletionsWithResponseAsync( - String deploymentId, BinaryData completionsOptions, RequestOptions requestOptions) { + String deploymentOrModelName, BinaryData completionsOptions, RequestOptions requestOptions) { final String accept = "application/json"; return FluxUtil.withContext( context -> service.getCompletions( this.getEndpoint(), this.getServiceVersion().getVersion(), - deploymentId, + deploymentOrModelName, accept, completionsOptions, requestOptions, @@ -623,10 +641,25 @@ public Mono> getCompletionsWithResponseAsync( * { * id: String (Required) * created: int (Required) + * prompt_annotations (Optional): [ + * (Optional){ + * prompt_index: int (Required) + * content_filter_results (Optional): { + * sexual (Required): { + * severity: String(safe/low/medium/high) (Required) + * filtered: boolean (Required) + * } + * violence (Required): (recursive schema, see violence above) + * hate (Required): (recursive schema, see hate above) + * self_harm (Required): (recursive schema, see self_harm above) + * } + * } + * ] * choices (Required): [ * (Required){ * text: String (Required) * index: int (Required) + * content_filter_results (Optional): (recursive schema, see content_filter_results above) * logprobs (Required): { * tokens (Required): [ * String (Required) @@ -643,7 +676,7 @@ public Mono> getCompletionsWithResponseAsync( * int (Required) * ] * } - * finish_reason: String(stop/length/content_filter) (Required) + * finish_reason: String(stop/length/content_filter/function_call) (Required) * } * ] * usage (Required): { @@ -654,7 +687,8 @@ public Mono> getCompletionsWithResponseAsync( * } * } * - * @param deploymentId deployment id of the deployed model. + * @param deploymentOrModelName Specifies either the model deployment name (when using Azure OpenAI) or model name + * (when using non-Azure OpenAI) to use for this request. * @param completionsOptions The configuration information for a completions request. Completions support a wide * variety of tasks and generate text that continues from or "completes" provided prompt data. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. @@ -667,12 +701,12 @@ public Mono> getCompletionsWithResponseAsync( */ @ServiceMethod(returns = ReturnType.SINGLE) public Response getCompletionsWithResponse( - String deploymentId, BinaryData completionsOptions, RequestOptions requestOptions) { + String deploymentOrModelName, BinaryData completionsOptions, RequestOptions requestOptions) { final String accept = "application/json"; return service.getCompletionsSync( this.getEndpoint(), this.getServiceVersion().getVersion(), - deploymentId, + deploymentOrModelName, accept, completionsOptions, requestOptions, @@ -689,10 +723,23 @@ public Response getCompletionsWithResponse( * { * messages (Required): [ * (Required){ - * role: String(system/assistant/user) (Required) + * role: String(system/assistant/user/function) (Required) * content: String (Optional) + * name: String (Optional) + * function_call (Optional): { + * name: String (Required) + * arguments: String (Required) + * } + * } + * ] + * functions (Optional): [ + * (Optional){ + * name: String (Required) + * description: String (Optional) + * parameters: Object (Optional) * } * ] + * function_call: FunctionCallModelBase (Optional) * max_tokens: Integer (Optional) * temperature: Double (Optional) * top_p: Double (Optional) @@ -720,12 +767,32 @@ public Response getCompletionsWithResponse( * choices (Required): [ * (Required){ * message (Optional): { - * role: String(system/assistant/user) (Required) + * role: String(system/assistant/user/function) (Required) * content: String (Optional) + * name: String (Optional) + * function_call (Optional): { + * name: String (Required) + * arguments: String (Required) + * } * } * index: int (Required) - * finish_reason: String(stop/length/content_filter) (Required) + * finish_reason: String(stop/length/content_filter/function_call) (Required) * delta (Optional): (recursive schema, see delta above) + * content_filter_results (Optional): { + * sexual (Required): { + * severity: String(safe/low/medium/high) (Required) + * filtered: boolean (Required) + * } + * violence (Required): (recursive schema, see violence above) + * hate (Required): (recursive schema, see hate above) + * self_harm (Required): (recursive schema, see self_harm above) + * } + * } + * ] + * prompt_annotations (Optional): [ + * (Optional){ + * prompt_index: int (Required) + * content_filter_results (Optional): (recursive schema, see content_filter_results above) * } * ] * usage (Required): { @@ -736,7 +803,8 @@ public Response getCompletionsWithResponse( * } * } * - * @param deploymentId deployment id of the deployed model. + * @param deploymentOrModelName Specifies either the model deployment name (when using Azure OpenAI) or model name + * (when using non-Azure OpenAI) to use for this request. * @param chatCompletionsOptions The configuration information for a chat completions request. Completions support a * wide variety of tasks and generate text that continues from or "completes" provided prompt data. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. @@ -750,14 +818,14 @@ public Response getCompletionsWithResponse( */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> getChatCompletionsWithResponseAsync( - String deploymentId, BinaryData chatCompletionsOptions, RequestOptions requestOptions) { + String deploymentOrModelName, BinaryData chatCompletionsOptions, RequestOptions requestOptions) { final String accept = "application/json"; return FluxUtil.withContext( context -> service.getChatCompletions( this.getEndpoint(), this.getServiceVersion().getVersion(), - deploymentId, + deploymentOrModelName, accept, chatCompletionsOptions, requestOptions, @@ -774,10 +842,23 @@ public Mono> getChatCompletionsWithResponseAsync( * { * messages (Required): [ * (Required){ - * role: String(system/assistant/user) (Required) + * role: String(system/assistant/user/function) (Required) * content: String (Optional) + * name: String (Optional) + * function_call (Optional): { + * name: String (Required) + * arguments: String (Required) + * } * } * ] + * functions (Optional): [ + * (Optional){ + * name: String (Required) + * description: String (Optional) + * parameters: Object (Optional) + * } + * ] + * function_call: FunctionCallModelBase (Optional) * max_tokens: Integer (Optional) * temperature: Double (Optional) * top_p: Double (Optional) @@ -805,12 +886,32 @@ public Mono> getChatCompletionsWithResponseAsync( * choices (Required): [ * (Required){ * message (Optional): { - * role: String(system/assistant/user) (Required) + * role: String(system/assistant/user/function) (Required) * content: String (Optional) + * name: String (Optional) + * function_call (Optional): { + * name: String (Required) + * arguments: String (Required) + * } * } * index: int (Required) - * finish_reason: String(stop/length/content_filter) (Required) + * finish_reason: String(stop/length/content_filter/function_call) (Required) * delta (Optional): (recursive schema, see delta above) + * content_filter_results (Optional): { + * sexual (Required): { + * severity: String(safe/low/medium/high) (Required) + * filtered: boolean (Required) + * } + * violence (Required): (recursive schema, see violence above) + * hate (Required): (recursive schema, see hate above) + * self_harm (Required): (recursive schema, see self_harm above) + * } + * } + * ] + * prompt_annotations (Optional): [ + * (Optional){ + * prompt_index: int (Required) + * content_filter_results (Optional): (recursive schema, see content_filter_results above) * } * ] * usage (Required): { @@ -821,7 +922,8 @@ public Mono> getChatCompletionsWithResponseAsync( * } * } * - * @param deploymentId deployment id of the deployed model. + * @param deploymentOrModelName Specifies either the model deployment name (when using Azure OpenAI) or model name + * (when using non-Azure OpenAI) to use for this request. * @param chatCompletionsOptions The configuration information for a chat completions request. Completions support a * wide variety of tasks and generate text that continues from or "completes" provided prompt data. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. @@ -834,12 +936,12 @@ public Mono> getChatCompletionsWithResponseAsync( */ @ServiceMethod(returns = ReturnType.SINGLE) public Response getChatCompletionsWithResponse( - String deploymentId, BinaryData chatCompletionsOptions, RequestOptions requestOptions) { + String deploymentOrModelName, BinaryData chatCompletionsOptions, RequestOptions requestOptions) { final String accept = "application/json"; return service.getChatCompletionsSync( this.getEndpoint(), this.getServiceVersion().getVersion(), - deploymentId, + deploymentOrModelName, accept, chatCompletionsOptions, requestOptions, diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatChoice.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatChoice.java index 9ae3774df6df1..c75b16f0e4cb3 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatChoice.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatChoice.java @@ -98,4 +98,25 @@ public CompletionsFinishReason getFinishReason() { public ChatMessage getDelta() { return this.delta; } + + /* + * Information about the content filtering category (hate, sexual, violence, self_harm), if it + * has been detected, as well as the severity level (very_low, low, medium, high-scale that + * determines the intensity and risk level of harmful content) and if it has been filtered or not. + */ + @Generated + @JsonProperty(value = "content_filter_results") + private ContentFilterResults contentFilterResults; + + /** + * Get the contentFilterResults property: Information about the content filtering category (hate, sexual, violence, + * self_harm), if it has been detected, as well as the severity level (very_low, low, medium, high-scale that + * determines the intensity and risk level of harmful content) and if it has been filtered or not. + * + * @return the contentFilterResults value. + */ + @Generated + public ContentFilterResults getContentFilterResults() { + return this.contentFilterResults; + } } diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatCompletions.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatCompletions.java index 431e79f27c63f..d1703563596ba 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatCompletions.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatCompletions.java @@ -23,14 +23,6 @@ public final class ChatCompletions { @JsonProperty(value = "id") private String id; - /* - * The first timestamp associated with generation activity for this completions response, - * represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970. - */ - @Generated - @JsonProperty(value = "created") - private int created; - /* * The collection of completions choices associated with this completions response. * Generally, `n` choices are generated per provided prompt with a default value of 1. @@ -51,7 +43,7 @@ public final class ChatCompletions { * Creates an instance of ChatCompletions class. * * @param id the id value to set. - * @param created the created value to set. + * @param createdAt the createdAt value to set. * @param choices the choices value to set. * @param usage the usage value to set. */ @@ -59,11 +51,11 @@ public final class ChatCompletions { @JsonCreator private ChatCompletions( @JsonProperty(value = "id") String id, - @JsonProperty(value = "created") int created, + @JsonProperty(value = "created") int createdAt, @JsonProperty(value = "choices") List choices, @JsonProperty(value = "usage") CompletionsUsage usage) { this.id = id; - this.created = created; + this.createdAt = createdAt; this.choices = choices; this.usage = usage; } @@ -78,17 +70,6 @@ public String getId() { return this.id; } - /** - * Get the created property: The first timestamp associated with generation activity for this completions response, - * represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970. - * - * @return the created value. - */ - @Generated - public int getCreated() { - return this.created; - } - /** * Get the choices property: The collection of completions choices associated with this completions response. * Generally, `n` choices are generated per provided prompt with a default value of 1. Token limits and other @@ -111,4 +92,42 @@ public List getChoices() { public CompletionsUsage getUsage() { return this.usage; } + + /* + * The first timestamp associated with generation activity for this completions response, + * represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970. + */ + @Generated + @JsonProperty(value = "created") + private int createdAt; + + /* + * Content filtering results for zero or more prompts in the request. In a streaming request, + * results for different prompts may arrive at different times or in different orders. + */ + @Generated + @JsonProperty(value = "prompt_annotations") + private List promptFilterResults; + + /** + * Get the createdAt property: The first timestamp associated with generation activity for this completions + * response, represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970. + * + * @return the createdAt value. + */ + @Generated + public int getCreatedAt() { + return this.createdAt; + } + + /** + * Get the promptFilterResults property: Content filtering results for zero or more prompts in the request. In a + * streaming request, results for different prompts may arrive at different times or in different orders. + * + * @return the promptFilterResults value. + */ + @Generated + public List getPromptFilterResults() { + return this.promptFilterResults; + } } diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatCompletionsOptions.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatCompletionsOptions.java index 7df28d9666c77..eaf04039f1d6a 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatCompletionsOptions.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatCompletionsOptions.java @@ -429,4 +429,71 @@ public ChatCompletionsOptions setModel(String model) { this.model = model; return this; } + + /* + * A list of functions the model may generate JSON inputs for. + */ + @Generated + @JsonProperty(value = "functions") + private List functions; + + /* + * Controls how the model responds to function calls. "none" means the model does not call a function, + * and responds to the end-user. "auto" means the model can pick between an end-user or calling a function. + * Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. + * "none" is the default when no functions are present. "auto" is the default if functions are present. + */ + @Generated + @JsonProperty(value = "function_call") + private FunctionCallModelBase functionCall; + + /** + * Get the functions property: A list of functions the model may generate JSON inputs for. + * + * @return the functions value. + */ + @Generated + public List getFunctions() { + return this.functions; + } + + /** + * Set the functions property: A list of functions the model may generate JSON inputs for. + * + * @param functions the functions value to set. + * @return the ChatCompletionsOptions object itself. + */ + @Generated + public ChatCompletionsOptions setFunctions(List functions) { + this.functions = functions; + return this; + } + + /** + * Get the functionCall property: Controls how the model responds to function calls. "none" means the model does not + * call a function, and responds to the end-user. "auto" means the model can pick between an end-user or calling a + * function. Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. + * "none" is the default when no functions are present. "auto" is the default if functions are present. + * + * @return the functionCall value. + */ + @Generated + public FunctionCallModelBase getFunctionCall() { + return this.functionCall; + } + + /** + * Set the functionCall property: Controls how the model responds to function calls. "none" means the model does not + * call a function, and responds to the end-user. "auto" means the model can pick between an end-user or calling a + * function. Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. + * "none" is the default when no functions are present. "auto" is the default if functions are present. + * + * @param functionCall the functionCall value to set. + * @return the ChatCompletionsOptions object itself. + */ + @Generated + public ChatCompletionsOptions setFunctionCall(FunctionCallModelBase functionCall) { + this.functionCall = functionCall; + return this; + } } diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatMessage.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatMessage.java index 2ad4cfd18b972..2594743f63f79 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatMessage.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatMessage.java @@ -68,4 +68,72 @@ public ChatMessage setContent(String content) { this.content = content; return this; } + + /* + * The name of the author of this message. `name` is required if role is `function`, and it should be the name of + * the + * function whose response is in the `content`. May contain a-z, A-Z, 0-9, and underscores, with a maximum length + * of + * 64 characters. + */ + @Generated + @JsonProperty(value = "name") + private String name; + + /* + * The name and arguments of a function that should be called, as generated by the model. + */ + @Generated + @JsonProperty(value = "function_call") + private FunctionCall functionCall; + + /** + * Get the name property: The name of the author of this message. `name` is required if role is `function`, and it + * should be the name of the function whose response is in the `content`. May contain a-z, A-Z, 0-9, and + * underscores, with a maximum length of 64 characters. + * + * @return the name value. + */ + @Generated + public String getName() { + return this.name; + } + + /** + * Set the name property: The name of the author of this message. `name` is required if role is `function`, and it + * should be the name of the function whose response is in the `content`. May contain a-z, A-Z, 0-9, and + * underscores, with a maximum length of 64 characters. + * + * @param name the name value to set. + * @return the ChatMessage object itself. + */ + @Generated + public ChatMessage setName(String name) { + this.name = name; + return this; + } + + /** + * Get the functionCall property: The name and arguments of a function that should be called, as generated by the + * model. + * + * @return the functionCall value. + */ + @Generated + public FunctionCall getFunctionCall() { + return this.functionCall; + } + + /** + * Set the functionCall property: The name and arguments of a function that should be called, as generated by the + * model. + * + * @param functionCall the functionCall value to set. + * @return the ChatMessage object itself. + */ + @Generated + public ChatMessage setFunctionCall(FunctionCall functionCall) { + this.functionCall = functionCall; + return this; + } } diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatRole.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatRole.java index dcf8fe55f839d..621ea748768e1 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatRole.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ChatRole.java @@ -50,4 +50,7 @@ public static ChatRole fromString(String name) { public static Collection values() { return values(ChatRole.class); } + + /** The role that provides function results for char completions. */ + @Generated public static final ChatRole FUNCTION = fromString("function"); } diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/Choice.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/Choice.java index e4c625edd3060..a0b95afde1c64 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/Choice.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/Choice.java @@ -104,4 +104,25 @@ public CompletionsLogProbabilityModel getLogprobs() { public CompletionsFinishReason getFinishReason() { return this.finishReason; } + + /* + * Information about the content filtering category (hate, sexual, violence, self_harm), if it + * has been detected, as well as the severity level (very_low, low, medium, high-scale that + * determines the intensity and risk level of harmful content) and if it has been filtered or not. + */ + @Generated + @JsonProperty(value = "content_filter_results") + private ContentFilterResults contentFilterResults; + + /** + * Get the contentFilterResults property: Information about the content filtering category (hate, sexual, violence, + * self_harm), if it has been detected, as well as the severity level (very_low, low, medium, high-scale that + * determines the intensity and risk level of harmful content) and if it has been filtered or not. + * + * @return the contentFilterResults value. + */ + @Generated + public ContentFilterResults getContentFilterResults() { + return this.contentFilterResults; + } } diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/Completions.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/Completions.java index 627fa22219fa6..51a9faa84c1ce 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/Completions.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/Completions.java @@ -111,4 +111,23 @@ public List getChoices() { public CompletionsUsage getUsage() { return this.usage; } + + /* + * Content filtering results for zero or more prompts in the request. In a streaming request, + * results for different prompts may arrive at different times or in different orders. + */ + @Generated + @JsonProperty(value = "prompt_annotations") + private List promptFilterResults; + + /** + * Get the promptFilterResults property: Content filtering results for zero or more prompts in the request. In a + * streaming request, results for different prompts may arrive at different times or in different orders. + * + * @return the promptFilterResults value. + */ + @Generated + public List getPromptFilterResults() { + return this.promptFilterResults; + } } diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/CompletionsFinishReason.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/CompletionsFinishReason.java index c267098eca5b4..b16c05f00c3fc 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/CompletionsFinishReason.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/CompletionsFinishReason.java @@ -52,4 +52,7 @@ public static CompletionsFinishReason fromString(String name) { public static Collection values() { return values(CompletionsFinishReason.class); } + + /** Completion ended normally, with the model requesting a function to be called. */ + @Generated public static final CompletionsFinishReason FUNCTION_CALL = fromString("function_call"); } diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/CompletionsLogProbabilityModel.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/CompletionsLogProbabilityModel.java index af195175a85e2..52f10e15a54ad 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/CompletionsLogProbabilityModel.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/CompletionsLogProbabilityModel.java @@ -21,46 +21,25 @@ public final class CompletionsLogProbabilityModel { @JsonProperty(value = "tokens") private List tokens; - /* - * A collection of log probability values for the tokens in this completions data. - */ - @Generated - @JsonProperty(value = "token_logprobs") - private List tokenLogprobs; - - /* - * A mapping of tokens to maximum log probability values in this completions data. - */ - @Generated - @JsonProperty(value = "top_logprobs") - private List> topLogprobs; - - /* - * The text offsets associated with tokens in this completions data. - */ - @Generated - @JsonProperty(value = "text_offset") - private List textOffset; - /** * Creates an instance of CompletionsLogProbabilityModel class. * * @param tokens the tokens value to set. - * @param tokenLogprobs the tokenLogprobs value to set. - * @param topLogprobs the topLogprobs value to set. - * @param textOffset the textOffset value to set. + * @param tokenLogProbabilities the tokenLogProbabilities value to set. + * @param topLogProbabilities the topLogProbabilities value to set. + * @param textOffsets the textOffsets value to set. */ @Generated @JsonCreator private CompletionsLogProbabilityModel( @JsonProperty(value = "tokens") List tokens, - @JsonProperty(value = "token_logprobs") List tokenLogprobs, - @JsonProperty(value = "top_logprobs") List> topLogprobs, - @JsonProperty(value = "text_offset") List textOffset) { + @JsonProperty(value = "token_logprobs") List tokenLogProbabilities, + @JsonProperty(value = "top_logprobs") List> topLogProbabilities, + @JsonProperty(value = "text_offset") List textOffsets) { this.tokens = tokens; - this.tokenLogprobs = tokenLogprobs; - this.topLogprobs = topLogprobs; - this.textOffset = textOffset; + this.tokenLogProbabilities = tokenLogProbabilities; + this.topLogProbabilities = topLogProbabilities; + this.textOffsets = textOffsets; } /** @@ -73,33 +52,56 @@ public List getTokens() { return this.tokens; } + /* + * A collection of log probability values for the tokens in this completions data. + */ + @Generated + @JsonProperty(value = "token_logprobs") + private List tokenLogProbabilities; + + /* + * A mapping of tokens to maximum log probability values in this completions data. + */ + @Generated + @JsonProperty(value = "top_logprobs") + private List> topLogProbabilities; + + /* + * The text offsets associated with tokens in this completions data. + */ + @Generated + @JsonProperty(value = "text_offset") + private List textOffsets; + /** - * Get the tokenLogprobs property: A collection of log probability values for the tokens in this completions data. + * Get the tokenLogProbabilities property: A collection of log probability values for the tokens in this completions + * data. * - * @return the tokenLogprobs value. + * @return the tokenLogProbabilities value. */ @Generated - public List getTokenLogprobs() { - return this.tokenLogprobs; + public List getTokenLogProbabilities() { + return this.tokenLogProbabilities; } /** - * Get the topLogprobs property: A mapping of tokens to maximum log probability values in this completions data. + * Get the topLogProbabilities property: A mapping of tokens to maximum log probability values in this completions + * data. * - * @return the topLogprobs value. + * @return the topLogProbabilities value. */ @Generated - public List> getTopLogprobs() { - return this.topLogprobs; + public List> getTopLogProbabilities() { + return this.topLogProbabilities; } /** - * Get the textOffset property: The text offsets associated with tokens in this completions data. + * Get the textOffsets property: The text offsets associated with tokens in this completions data. * - * @return the textOffset value. + * @return the textOffsets value. */ @Generated - public List getTextOffset() { - return this.textOffset; + public List getTextOffsets() { + return this.textOffsets; } } diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ContentFilterResult.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ContentFilterResult.java new file mode 100644 index 0000000000000..0ea4867d3bb85 --- /dev/null +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ContentFilterResult.java @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.ai.openai.models; + +import com.azure.core.annotation.Generated; +import com.azure.core.annotation.Immutable; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Information about filtered content severity level and if it has been filtered or not. */ +@Immutable +public final class ContentFilterResult { + /* + * Ratings for the intensity and risk level of filtered content. + */ + @Generated + @JsonProperty(value = "severity") + private ContentFilterSeverity severity; + + /* + * A value indicating whether or not the content has been filtered. + */ + @Generated + @JsonProperty(value = "filtered") + private boolean filtered; + + /** + * Creates an instance of ContentFilterResult class. + * + * @param severity the severity value to set. + * @param filtered the filtered value to set. + */ + @Generated + @JsonCreator + private ContentFilterResult( + @JsonProperty(value = "severity") ContentFilterSeverity severity, + @JsonProperty(value = "filtered") boolean filtered) { + this.severity = severity; + this.filtered = filtered; + } + + /** + * Get the severity property: Ratings for the intensity and risk level of filtered content. + * + * @return the severity value. + */ + @Generated + public ContentFilterSeverity getSeverity() { + return this.severity; + } + + /** + * Get the filtered property: A value indicating whether or not the content has been filtered. + * + * @return the filtered value. + */ + @Generated + public boolean isFiltered() { + return this.filtered; + } +} diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ContentFilterResults.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ContentFilterResults.java new file mode 100644 index 0000000000000..24be264d2dd6e --- /dev/null +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ContentFilterResults.java @@ -0,0 +1,119 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.ai.openai.models; + +import com.azure.core.annotation.Generated; +import com.azure.core.annotation.Immutable; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Information about the content filtering category, if it has been detected. */ +@Immutable +public final class ContentFilterResults { + /* + * Describes language related to anatomical organs and genitals, romantic relationships, + * acts portrayed in erotic or affectionate terms, physical sexual acts, including + * those portrayed as an assault or a forced sexual violent act against one’s will, + * prostitution, pornography, and abuse. + */ + @Generated + @JsonProperty(value = "sexual") + private ContentFilterResult sexual; + + /* + * Describes language related to physical actions intended to hurt, injure, damage, or + * kill someone or something; describes weapons, etc. + */ + @Generated + @JsonProperty(value = "violence") + private ContentFilterResult violence; + + /* + * Describes language attacks or uses that include pejorative or discriminatory language + * with reference to a person or identity group on the basis of certain differentiating + * attributes of these groups including but not limited to race, ethnicity, nationality, + * gender identity and expression, sexual orientation, religion, immigration status, ability + * status, personal appearance, and body size. + */ + @Generated + @JsonProperty(value = "hate") + private ContentFilterResult hate; + + /* + * Describes language related to physical actions intended to purposely hurt, injure, + * or damage one’s body, or kill oneself. + */ + @Generated + @JsonProperty(value = "self_harm") + private ContentFilterResult selfHarm; + + /** + * Creates an instance of ContentFilterResults class. + * + * @param sexual the sexual value to set. + * @param violence the violence value to set. + * @param hate the hate value to set. + * @param selfHarm the selfHarm value to set. + */ + @Generated + @JsonCreator + private ContentFilterResults( + @JsonProperty(value = "sexual") ContentFilterResult sexual, + @JsonProperty(value = "violence") ContentFilterResult violence, + @JsonProperty(value = "hate") ContentFilterResult hate, + @JsonProperty(value = "self_harm") ContentFilterResult selfHarm) { + this.sexual = sexual; + this.violence = violence; + this.hate = hate; + this.selfHarm = selfHarm; + } + + /** + * Get the sexual property: Describes language related to anatomical organs and genitals, romantic relationships, + * acts portrayed in erotic or affectionate terms, physical sexual acts, including those portrayed as an assault or + * a forced sexual violent act against one’s will, prostitution, pornography, and abuse. + * + * @return the sexual value. + */ + @Generated + public ContentFilterResult getSexual() { + return this.sexual; + } + + /** + * Get the violence property: Describes language related to physical actions intended to hurt, injure, damage, or + * kill someone or something; describes weapons, etc. + * + * @return the violence value. + */ + @Generated + public ContentFilterResult getViolence() { + return this.violence; + } + + /** + * Get the hate property: Describes language attacks or uses that include pejorative or discriminatory language with + * reference to a person or identity group on the basis of certain differentiating attributes of these groups + * including but not limited to race, ethnicity, nationality, gender identity and expression, sexual orientation, + * religion, immigration status, ability status, personal appearance, and body size. + * + * @return the hate value. + */ + @Generated + public ContentFilterResult getHate() { + return this.hate; + } + + /** + * Get the selfHarm property: Describes language related to physical actions intended to purposely hurt, injure, or + * damage one’s body, or kill oneself. + * + * @return the selfHarm value. + */ + @Generated + public ContentFilterResult getSelfHarm() { + return this.selfHarm; + } +} diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ContentFilterSeverity.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ContentFilterSeverity.java new file mode 100644 index 0000000000000..5f636a7b3a92a --- /dev/null +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/ContentFilterSeverity.java @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.ai.openai.models; + +import com.azure.core.annotation.Generated; +import com.azure.core.util.ExpandableStringEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import java.util.Collection; + +/** Ratings for the intensity and risk level of harmful content. */ +public final class ContentFilterSeverity extends ExpandableStringEnum { + /** + * Content may be related to violence, self-harm, sexual, or hate categories but the terms are used in general, + * journalistic, scientific, medical, and similar professional contexts, which are appropriate for most audiences. + */ + @Generated public static final ContentFilterSeverity SAFE = fromString("safe"); + + /** + * Content that expresses prejudiced, judgmental, or opinionated views, includes offensive use of language, + * stereotyping, use cases exploring a fictional world (for example, gaming, literature) and depictions at low + * intensity. + */ + @Generated public static final ContentFilterSeverity LOW = fromString("low"); + + /** + * Content that uses offensive, insulting, mocking, intimidating, or demeaning language towards specific identity + * groups, includes depictions of seeking and executing harmful instructions, fantasies, glorification, promotion of + * harm at medium intensity. + */ + @Generated public static final ContentFilterSeverity MEDIUM = fromString("medium"); + + /** + * Content that displays explicit and severe harmful instructions, actions, damage, or abuse; includes endorsement, + * glorification, or promotion of severe harmful acts, extreme or illegal forms of harm, radicalization, or + * non-consensual power exchange or abuse. + */ + @Generated public static final ContentFilterSeverity HIGH = fromString("high"); + + /** + * Creates a new instance of ContentFilterSeverity value. + * + * @deprecated Use the {@link #fromString(String)} factory method. + */ + @Generated + @Deprecated + public ContentFilterSeverity() {} + + /** + * Creates or finds a ContentFilterSeverity from its string representation. + * + * @param name a name to look for. + * @return the corresponding ContentFilterSeverity. + */ + @Generated + @JsonCreator + public static ContentFilterSeverity fromString(String name) { + return fromString(name, ContentFilterSeverity.class); + } + + /** + * Gets known ContentFilterSeverity values. + * + * @return known ContentFilterSeverity values. + */ + @Generated + public static Collection values() { + return values(ContentFilterSeverity.class); + } +} diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/EmbeddingItem.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/EmbeddingItem.java index 4403acc88ec7f..307130d189e48 100644 --- a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/EmbeddingItem.java +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/EmbeddingItem.java @@ -21,25 +21,18 @@ public final class EmbeddingItem { @JsonProperty(value = "embedding") private List embedding; - /* - * Index of the prompt to which the EmbeddingItem corresponds. - */ - @Generated - @JsonProperty(value = "index") - private int index; - /** * Creates an instance of EmbeddingItem class. * * @param embedding the embedding value to set. - * @param index the index value to set. + * @param promptIndex the promptIndex value to set. */ @Generated @JsonCreator private EmbeddingItem( - @JsonProperty(value = "embedding") List embedding, @JsonProperty(value = "index") int index) { + @JsonProperty(value = "embedding") List embedding, @JsonProperty(value = "index") int promptIndex) { this.embedding = embedding; - this.index = index; + this.promptIndex = promptIndex; } /** @@ -53,13 +46,20 @@ public List getEmbedding() { return this.embedding; } + /* + * Index of the prompt to which the EmbeddingItem corresponds. + */ + @Generated + @JsonProperty(value = "index") + private int promptIndex; + /** - * Get the index property: Index of the prompt to which the EmbeddingItem corresponds. + * Get the promptIndex property: Index of the prompt to which the EmbeddingItem corresponds. * - * @return the index value. + * @return the promptIndex value. */ @Generated - public int getIndex() { - return this.index; + public int getPromptIndex() { + return this.promptIndex; } } diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCall.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCall.java new file mode 100644 index 0000000000000..0d87182bd5865 --- /dev/null +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCall.java @@ -0,0 +1,67 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.ai.openai.models; + +import com.azure.core.annotation.Generated; +import com.azure.core.annotation.Immutable; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** The name and arguments of a function that should be called, as generated by the model. */ +@Immutable +public final class FunctionCall { + /* + * The name of the function to call. + */ + @Generated + @JsonProperty(value = "name") + private String name; + + /* + * The arguments to call the function with, as generated by the model in JSON format. + * Note that the model does not always generate valid JSON, and may hallucinate parameters + * not defined by your function schema. Validate the arguments in your code before calling + * your function. + */ + @Generated + @JsonProperty(value = "arguments") + private String arguments; + + /** + * Creates an instance of FunctionCall class. + * + * @param name the name value to set. + * @param arguments the arguments value to set. + */ + @Generated + @JsonCreator + public FunctionCall( + @JsonProperty(value = "name") String name, @JsonProperty(value = "arguments") String arguments) { + this.name = name; + this.arguments = arguments; + } + + /** + * Get the name property: The name of the function to call. + * + * @return the name value. + */ + @Generated + public String getName() { + return this.name; + } + + /** + * Get the arguments property: The arguments to call the function with, as generated by the model in JSON format. + * Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your + * function schema. Validate the arguments in your code before calling your function. + * + * @return the arguments value. + */ + @Generated + public String getArguments() { + return this.arguments; + } +} diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCallModelBase.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCallModelBase.java new file mode 100644 index 0000000000000..25beabc48f373 --- /dev/null +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCallModelBase.java @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.ai.openai.models; + +/** The FunctionCallModelBase model. */ +public abstract class FunctionCallModelBase { + /** Creates an instance of FunctionCallModelBase class. */ + protected FunctionCallModelBase() {} +} diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCallPreset.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCallPreset.java new file mode 100644 index 0000000000000..e3b6931417246 --- /dev/null +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCallPreset.java @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.ai.openai.models; + +import com.azure.core.annotation.Generated; +import com.azure.core.util.ExpandableStringEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import java.util.Collection; + +/** + * The collection of predefined behaviors for handling request-provided function information in a chat completions + * operation. + */ +public final class FunctionCallPreset extends ExpandableStringEnum { + /** + * Specifies that the model may either use any of the functions provided in this chat completions request or instead + * return a standard chat completions response as if no functions were provided. + */ + @Generated public static final FunctionCallPreset AUTO = fromString("auto"); + + /** + * Specifies that the model should not respond with a function call and should instead provide a standard chat + * completions response. Response content may still be influenced by the provided function information. + */ + @Generated public static final FunctionCallPreset NONE = fromString("none"); + + /** + * Creates a new instance of FunctionCallPreset value. + * + * @deprecated Use the {@link #fromString(String)} factory method. + */ + @Generated + @Deprecated + public FunctionCallPreset() {} + + /** + * Creates or finds a FunctionCallPreset from its string representation. + * + * @param name a name to look for. + * @return the corresponding FunctionCallPreset. + */ + @Generated + @JsonCreator + public static FunctionCallPreset fromString(String name) { + return fromString(name, FunctionCallPreset.class); + } + + /** + * Gets known FunctionCallPreset values. + * + * @return known FunctionCallPreset values. + */ + @Generated + public static Collection values() { + return values(FunctionCallPreset.class); + } +} diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCallPresetFunctionCallModel.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCallPresetFunctionCallModel.java new file mode 100644 index 0000000000000..9bcabdf0def9f --- /dev/null +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionCallPresetFunctionCallModel.java @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.ai.openai.models; + +import com.azure.core.annotation.Immutable; +import com.fasterxml.jackson.annotation.JsonValue; + +/** The FunctionCallPresetFunctionCallModel model. */ +@Immutable +public final class FunctionCallPresetFunctionCallModel extends FunctionCallModelBase { + private final FunctionCallPreset value; + + /** + * Creates an instance of FunctionCallPresetFunctionCallModel class. + * + * @param value the value. + */ + public FunctionCallPresetFunctionCallModel(FunctionCallPreset value) { + this.value = value; + } + + /** + * Gets the value. + * + * @return the value. + */ + @JsonValue + public FunctionCallPreset getValue() { + return this.value; + } +} diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionDefinition.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionDefinition.java new file mode 100644 index 0000000000000..379a5adb4ebae --- /dev/null +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionDefinition.java @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.ai.openai.models; + +import com.azure.core.annotation.Fluent; +import com.azure.core.annotation.Generated; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * The definition of a caller-specified function that chat completions may invoke in response to matching user input. + */ +@Fluent +public final class FunctionDefinition { + /* + * The name of the function to be called. + */ + @Generated + @JsonProperty(value = "name") + private String name; + + /* + * A description of what the function does. The model will use this description when selecting the function and + * interpreting its parameters. + */ + @Generated + @JsonProperty(value = "description") + private String description; + + /* + * The parameters the functions accepts, described as a JSON Schema object. + */ + @Generated + @JsonProperty(value = "parameters") + private Object parameters; + + /** + * Creates an instance of FunctionDefinition class. + * + * @param name the name value to set. + */ + @Generated + @JsonCreator + public FunctionDefinition(@JsonProperty(value = "name") String name) { + this.name = name; + } + + /** + * Get the name property: The name of the function to be called. + * + * @return the name value. + */ + @Generated + public String getName() { + return this.name; + } + + /** + * Get the description property: A description of what the function does. The model will use this description when + * selecting the function and interpreting its parameters. + * + * @return the description value. + */ + @Generated + public String getDescription() { + return this.description; + } + + /** + * Set the description property: A description of what the function does. The model will use this description when + * selecting the function and interpreting its parameters. + * + * @param description the description value to set. + * @return the FunctionDefinition object itself. + */ + @Generated + public FunctionDefinition setDescription(String description) { + this.description = description; + return this; + } + + /** + * Get the parameters property: The parameters the functions accepts, described as a JSON Schema object. + * + * @return the parameters value. + */ + @Generated + public Object getParameters() { + return this.parameters; + } + + /** + * Set the parameters property: The parameters the functions accepts, described as a JSON Schema object. + * + * @param parameters the parameters value to set. + * @return the FunctionDefinition object itself. + */ + @Generated + public FunctionDefinition setParameters(Object parameters) { + this.parameters = parameters; + return this; + } +} diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionName.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionName.java new file mode 100644 index 0000000000000..2db5ead3f1cee --- /dev/null +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionName.java @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.ai.openai.models; + +import com.azure.core.annotation.Generated; +import com.azure.core.annotation.Immutable; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * A structure that specifies the exact name of a specific, request-provided function to use when processing a chat + * completions operation. + */ +@Immutable +public final class FunctionName { + /* + * The name of the function to call. + */ + @Generated + @JsonProperty(value = "name") + private String name; + + /** + * Creates an instance of FunctionName class. + * + * @param name the name value to set. + */ + @Generated + @JsonCreator + public FunctionName(@JsonProperty(value = "name") String name) { + this.name = name; + } + + /** + * Get the name property: The name of the function to call. + * + * @return the name value. + */ + @Generated + public String getName() { + return this.name; + } +} diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionNameFunctionCallModel.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionNameFunctionCallModel.java new file mode 100644 index 0000000000000..df40077ac3ea9 --- /dev/null +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/FunctionNameFunctionCallModel.java @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.ai.openai.models; + +import com.azure.core.annotation.Immutable; +import com.fasterxml.jackson.annotation.JsonValue; + +/** The FunctionNameFunctionCallModel model. */ +@Immutable +public final class FunctionNameFunctionCallModel extends FunctionCallModelBase { + private final FunctionName value; + + /** + * Creates an instance of FunctionNameFunctionCallModel class. + * + * @param value the value. + */ + public FunctionNameFunctionCallModel(FunctionName value) { + this.value = value; + } + + /** + * Gets the value. + * + * @return the value. + */ + @JsonValue + public FunctionName getValue() { + return this.value; + } +} diff --git a/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/PromptFilterResult.java b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/PromptFilterResult.java new file mode 100644 index 0000000000000..2100c57381b54 --- /dev/null +++ b/sdk/openai/azure-ai-openai/src/main/java/com/azure/ai/openai/models/PromptFilterResult.java @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.ai.openai.models; + +import com.azure.core.annotation.Generated; +import com.azure.core.annotation.Immutable; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Content filtering results for a single prompt in the request. */ +@Immutable +public final class PromptFilterResult { + /* + * The index of this prompt in the set of prompt results + */ + @Generated + @JsonProperty(value = "prompt_index") + private int promptIndex; + + /* + * Content filtering results for this prompt + */ + @Generated + @JsonProperty(value = "content_filter_results") + private ContentFilterResults contentFilterResults; + + /** + * Creates an instance of PromptFilterResult class. + * + * @param promptIndex the promptIndex value to set. + */ + @Generated + @JsonCreator + private PromptFilterResult(@JsonProperty(value = "prompt_index") int promptIndex) { + this.promptIndex = promptIndex; + } + + /** + * Get the promptIndex property: The index of this prompt in the set of prompt results. + * + * @return the promptIndex value. + */ + @Generated + public int getPromptIndex() { + return this.promptIndex; + } + + /** + * Get the contentFilterResults property: Content filtering results for this prompt. + * + * @return the contentFilterResults value. + */ + @Generated + public ContentFilterResults getContentFilterResults() { + return this.contentFilterResults; + } +} diff --git a/sdk/openai/azure-ai-openai/tsp-location.yaml b/sdk/openai/azure-ai-openai/tsp-location.yaml index 4eba2fde995ae..e220fa95427c9 100644 --- a/sdk/openai/azure-ai-openai/tsp-location.yaml +++ b/sdk/openai/azure-ai-openai/tsp-location.yaml @@ -1,5 +1,5 @@ -directory: specification/cognitiveservices/OpenAI.Inference -additionalDirectories: - - specification/cognitiveservices/OpenAI.Authoring -commit: db23874c147db173933476b3b352cbf12abe84a9 +commit: 05f9c3e9728e21358db1a5b11adc4ea62679dfca repo: Azure/azure-rest-api-specs +directory: specification/cognitiveservices/OpenAI.Inference +additionalDirectories: [] +