diff --git a/clients/client-bedrock-agent/src/commands/CreateFlowCommand.ts b/clients/client-bedrock-agent/src/commands/CreateFlowCommand.ts index c4279e062dc2..66a8dd6222f9 100644 --- a/clients/client-bedrock-agent/src/commands/CreateFlowCommand.ts +++ b/clients/client-bedrock-agent/src/commands/CreateFlowCommand.ts @@ -91,7 +91,6 @@ export interface CreateFlowCommandOutput extends CreateFlowResponse, __MetadataB * text: { // PromptModelInferenceConfiguration * temperature: Number("float"), * topP: Number("float"), - * topK: Number("int"), * maxTokens: Number("int"), * stopSequences: [ // StopSequences * "STRING_VALUE", @@ -221,7 +220,6 @@ export interface CreateFlowCommandOutput extends CreateFlowResponse, __MetadataB * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/CreateFlowVersionCommand.ts b/clients/client-bedrock-agent/src/commands/CreateFlowVersionCommand.ts index 92a01e0dfe15..4c3ac5d9accd 100644 --- a/clients/client-bedrock-agent/src/commands/CreateFlowVersionCommand.ts +++ b/clients/client-bedrock-agent/src/commands/CreateFlowVersionCommand.ts @@ -102,7 +102,6 @@ export interface CreateFlowVersionCommandOutput extends CreateFlowVersionRespons * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/CreatePromptCommand.ts b/clients/client-bedrock-agent/src/commands/CreatePromptCommand.ts index d2b0b6b01480..cf4443c9d204 100644 --- a/clients/client-bedrock-agent/src/commands/CreatePromptCommand.ts +++ b/clients/client-bedrock-agent/src/commands/CreatePromptCommand.ts @@ -64,7 +64,6 @@ export interface CreatePromptCommandOutput extends CreatePromptResponse, __Metad * text: { // PromptModelInferenceConfiguration * temperature: Number("float"), * topP: Number("float"), - * topK: Number("int"), * maxTokens: Number("int"), * stopSequences: [ // StopSequences * "STRING_VALUE", @@ -110,7 +109,6 @@ export interface CreatePromptCommandOutput extends CreatePromptResponse, __Metad * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/CreatePromptVersionCommand.ts b/clients/client-bedrock-agent/src/commands/CreatePromptVersionCommand.ts index 81477bcbfd26..73b6a04252fe 100644 --- a/clients/client-bedrock-agent/src/commands/CreatePromptVersionCommand.ts +++ b/clients/client-bedrock-agent/src/commands/CreatePromptVersionCommand.ts @@ -73,7 +73,6 @@ export interface CreatePromptVersionCommandOutput extends CreatePromptVersionRes * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/GetFlowCommand.ts b/clients/client-bedrock-agent/src/commands/GetFlowCommand.ts index 08e53cd2f9d1..293c20aebba1 100644 --- a/clients/client-bedrock-agent/src/commands/GetFlowCommand.ts +++ b/clients/client-bedrock-agent/src/commands/GetFlowCommand.ts @@ -97,7 +97,6 @@ export interface GetFlowCommandOutput extends GetFlowResponse, __MetadataBearer * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/GetFlowVersionCommand.ts b/clients/client-bedrock-agent/src/commands/GetFlowVersionCommand.ts index 2b965edf1c85..38bd428a4d65 100644 --- a/clients/client-bedrock-agent/src/commands/GetFlowVersionCommand.ts +++ b/clients/client-bedrock-agent/src/commands/GetFlowVersionCommand.ts @@ -101,7 +101,6 @@ export interface GetFlowVersionCommandOutput extends GetFlowVersionResponse, __M * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/GetIngestionJobCommand.ts b/clients/client-bedrock-agent/src/commands/GetIngestionJobCommand.ts index 91a41a4d2529..6f0b5c1ecd0e 100644 --- a/clients/client-bedrock-agent/src/commands/GetIngestionJobCommand.ts +++ b/clients/client-bedrock-agent/src/commands/GetIngestionJobCommand.ts @@ -28,7 +28,7 @@ export interface GetIngestionJobCommandInput extends GetIngestionJobRequest {} export interface GetIngestionJobCommandOutput extends GetIngestionJobResponse, __MetadataBearer {} /** - *

Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Lanaguage Models (LLMs) can use your data.

+ *

Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Language Models (LLMs) can use your data.

* @example * Use a bare-bones client and the command you need to make an API call. * ```javascript diff --git a/clients/client-bedrock-agent/src/commands/GetPromptCommand.ts b/clients/client-bedrock-agent/src/commands/GetPromptCommand.ts index 41a8631c004f..38d4863513b7 100644 --- a/clients/client-bedrock-agent/src/commands/GetPromptCommand.ts +++ b/clients/client-bedrock-agent/src/commands/GetPromptCommand.ts @@ -65,7 +65,6 @@ export interface GetPromptCommandOutput extends GetPromptResponse, __MetadataBea * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/UpdateFlowCommand.ts b/clients/client-bedrock-agent/src/commands/UpdateFlowCommand.ts index 00e463f1434b..e306413070bd 100644 --- a/clients/client-bedrock-agent/src/commands/UpdateFlowCommand.ts +++ b/clients/client-bedrock-agent/src/commands/UpdateFlowCommand.ts @@ -91,7 +91,6 @@ export interface UpdateFlowCommandOutput extends UpdateFlowResponse, __MetadataB * text: { // PromptModelInferenceConfiguration * temperature: Number("float"), * topP: Number("float"), - * topK: Number("int"), * maxTokens: Number("int"), * stopSequences: [ // StopSequences * "STRING_VALUE", @@ -218,7 +217,6 @@ export interface UpdateFlowCommandOutput extends UpdateFlowResponse, __MetadataB * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/UpdatePromptCommand.ts b/clients/client-bedrock-agent/src/commands/UpdatePromptCommand.ts index 5edd3b94fa6e..38e6c050c869 100644 --- a/clients/client-bedrock-agent/src/commands/UpdatePromptCommand.ts +++ b/clients/client-bedrock-agent/src/commands/UpdatePromptCommand.ts @@ -64,7 +64,6 @@ export interface UpdatePromptCommandOutput extends UpdatePromptResponse, __Metad * text: { // PromptModelInferenceConfiguration * temperature: Number("float"), * topP: Number("float"), - * topK: Number("int"), * maxTokens: Number("int"), * stopSequences: [ // StopSequences * "STRING_VALUE", @@ -107,7 +106,6 @@ export interface UpdatePromptCommandOutput extends UpdatePromptResponse, __Metad * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/models/models_0.ts b/clients/client-bedrock-agent/src/models/models_0.ts index ed458d33552b..5bebad6d2b1a 100644 --- a/clients/client-bedrock-agent/src/models/models_0.ts +++ b/clients/client-bedrock-agent/src/models/models_0.ts @@ -3972,12 +3972,6 @@ export interface PromptModelInferenceConfiguration { */ topP?: number; - /** - *

The number of most-likely candidates that the model considers for the next token during generation.

- * @public - */ - topK?: number; - /** *

The maximum number of tokens to return in the response.

* @public @@ -7514,7 +7508,7 @@ export interface PromptVariant { *

Contains configurations for the prompt template.

* @public */ - templateConfiguration?: PromptTemplateConfiguration; + templateConfiguration: PromptTemplateConfiguration | undefined; /** *

The unique identifier of the model or inference profile with which to run inference on the prompt.

diff --git a/clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts b/clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts index 353cdb8e430d..7e4915f1d4b2 100644 --- a/clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts +++ b/clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts @@ -3533,7 +3533,6 @@ const se_PromptModelInferenceConfiguration = ( maxTokens: [], stopSequences: _json, temperature: __serializeFloat, - topK: [], topP: __serializeFloat, }); }; @@ -4484,7 +4483,6 @@ const de_PromptModelInferenceConfiguration = ( maxTokens: __expectInt32, stopSequences: _json, temperature: __limitedParseFloat32, - topK: __expectInt32, topP: __limitedParseFloat32, }) as any; }; diff --git a/codegen/sdk-codegen/aws-models/bedrock-agent.json b/codegen/sdk-codegen/aws-models/bedrock-agent.json index 612b4d427d27..7e3c284bfc03 100644 --- a/codegen/sdk-codegen/aws-models/bedrock-agent.json +++ b/codegen/sdk-codegen/aws-models/bedrock-agent.json @@ -7088,7 +7088,7 @@ } ], "traits": { - "smithy.api#documentation": "

Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Lanaguage Models (LLMs) can use your data.

", + "smithy.api#documentation": "

Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Language Models (LLMs) can use your data.

", "smithy.api#http": { "code": 200, "method": "GET", @@ -10520,12 +10520,6 @@ "smithy.api#documentation": "

The percentage of most-likely candidates that the model considers for the next token.

" } }, - "topK": { - "target": "com.amazonaws.bedrockagent#TopK", - "traits": { - "smithy.api#documentation": "

The number of most-likely candidates that the model considers for the next token during generation.

" - } - }, "maxTokens": { "target": "com.amazonaws.bedrockagent#MaximumLength", "traits": { @@ -10762,7 +10756,8 @@ "templateConfiguration": { "target": "com.amazonaws.bedrockagent#PromptTemplateConfiguration", "traits": { - "smithy.api#documentation": "

Contains configurations for the prompt template.

" + "smithy.api#documentation": "

Contains configurations for the prompt template.

", + "smithy.api#required": {} } }, "modelId": { @@ -10796,7 +10791,7 @@ }, "traits": { "smithy.api#length": { - "max": 3 + "max": 1 }, "smithy.api#sensitive": {} }