diff --git a/aws-opentelemetry-distro/src/amazon/opentelemetry/distro/patches/_bedrock_patches.py b/aws-opentelemetry-distro/src/amazon/opentelemetry/distro/patches/_bedrock_patches.py index 4a6eb10f5..a8d61f4d2 100644 --- a/aws-opentelemetry-distro/src/amazon/opentelemetry/distro/patches/_bedrock_patches.py +++ b/aws-opentelemetry-distro/src/amazon/opentelemetry/distro/patches/_bedrock_patches.py @@ -41,7 +41,7 @@ _GUARDRAIL_ID: str = "guardrailId" _GUARDRAIL_ARN: str = "guardrailArn" _MODEL_ID: str = "modelId" -_AWS_BEDROCK_SYSTEM: str = "aws_bedrock" +_AWS_BEDROCK_SYSTEM: str = "aws.bedrock" _logger = logging.getLogger(__name__) # Set logger level to DEBUG diff --git a/aws-opentelemetry-distro/tests/amazon/opentelemetry/distro/test_instrumentation_patch.py b/aws-opentelemetry-distro/tests/amazon/opentelemetry/distro/test_instrumentation_patch.py index 86c6bc39f..6fe16f15e 100644 --- a/aws-opentelemetry-distro/tests/amazon/opentelemetry/distro/test_instrumentation_patch.py +++ b/aws-opentelemetry-distro/tests/amazon/opentelemetry/distro/test_instrumentation_patch.py @@ -28,7 +28,7 @@ _BEDROCK_DATASOURCE_ID: str = "DataSourceId" _BEDROCK_GUARDRAIL_ID: str = "GuardrailId" _BEDROCK_KNOWLEDGEBASE_ID: str = "KnowledgeBaseId" -_GEN_AI_SYSTEM: str = "aws_bedrock" +_GEN_AI_SYSTEM: str = "aws.bedrock" _GEN_AI_REQUEST_MODEL: str = "genAiReuqestModelId" _SECRET_ARN: str = "arn:aws:secretsmanager:us-west-2:000000000000:secret:testSecret-ABCDEF" _TOPIC_ARN: str = "topicArn" diff --git a/contract-tests/tests/test/amazon/botocore/botocore_test.py b/contract-tests/tests/test/amazon/botocore/botocore_test.py index c8a346f5e..d444b9716 100644 --- a/contract-tests/tests/test/amazon/botocore/botocore_test.py +++ b/contract-tests/tests/test/amazon/botocore/botocore_test.py @@ -43,6 +43,7 @@ _GEN_AI_RESPONSE_FINISH_REASONS: str = "gen_ai.response.finish_reasons" _GEN_AI_USAGE_INPUT_TOKENS: str = "gen_ai.usage.input_tokens" _GEN_AI_USAGE_OUTPUT_TOKENS: str = "gen_ai.usage.output_tokens" +_GEN_AI_SYSTEM: str = "gen_ai.system" _AWS_SECRET_ARN: str = "aws.secretsmanager.secret.arn" _AWS_STATE_MACHINE_ARN: str = "aws.stepfunctions.state_machine.arn" @@ -428,6 +429,7 @@ def test_bedrock_runtime_invoke_model_amazon_titan(self): cloudformation_primary_identifier="amazon.titan-text-premier-v1:0", request_specific_attributes={ _GEN_AI_REQUEST_MODEL: "amazon.titan-text-premier-v1:0", + _GEN_AI_SYSTEM: "aws.bedrock", _GEN_AI_REQUEST_MAX_TOKENS: 3072, _GEN_AI_REQUEST_TEMPERATURE: 0.7, _GEN_AI_REQUEST_TOP_P: 0.9, @@ -455,6 +457,7 @@ def test_bedrock_runtime_invoke_model_anthropic_claude(self): cloudformation_primary_identifier="anthropic.claude-v2:1", request_specific_attributes={ _GEN_AI_REQUEST_MODEL: "anthropic.claude-v2:1", + _GEN_AI_SYSTEM: "aws.bedrock", _GEN_AI_REQUEST_MAX_TOKENS: 1000, _GEN_AI_REQUEST_TEMPERATURE: 0.99, _GEN_AI_REQUEST_TOP_P: 1, @@ -482,6 +485,7 @@ def test_bedrock_runtime_invoke_model_meta_llama(self): cloudformation_primary_identifier="meta.llama2-13b-chat-v1", request_specific_attributes={ _GEN_AI_REQUEST_MODEL: "meta.llama2-13b-chat-v1", + _GEN_AI_SYSTEM: "aws.bedrock", _GEN_AI_REQUEST_MAX_TOKENS: 512, _GEN_AI_REQUEST_TEMPERATURE: 0.5, _GEN_AI_REQUEST_TOP_P: 0.9, @@ -509,6 +513,7 @@ def test_bedrock_runtime_invoke_model_cohere_command(self): cloudformation_primary_identifier="cohere.command-r-v1:0", request_specific_attributes={ _GEN_AI_REQUEST_MODEL: "cohere.command-r-v1:0", + _GEN_AI_SYSTEM: "aws.bedrock", _GEN_AI_REQUEST_MAX_TOKENS: 512, _GEN_AI_REQUEST_TEMPERATURE: 0.5, _GEN_AI_REQUEST_TOP_P: 0.65, @@ -538,6 +543,7 @@ def test_bedrock_runtime_invoke_model_ai21_jamba(self): cloudformation_primary_identifier="ai21.jamba-1-5-large-v1:0", request_specific_attributes={ _GEN_AI_REQUEST_MODEL: "ai21.jamba-1-5-large-v1:0", + _GEN_AI_SYSTEM: "aws.bedrock", _GEN_AI_REQUEST_MAX_TOKENS: 512, _GEN_AI_REQUEST_TEMPERATURE: 0.6, _GEN_AI_REQUEST_TOP_P: 0.8, @@ -565,6 +571,7 @@ def test_bedrock_runtime_invoke_model_mistral(self): cloudformation_primary_identifier="mistral.mistral-7b-instruct-v0:2", request_specific_attributes={ _GEN_AI_REQUEST_MODEL: "mistral.mistral-7b-instruct-v0:2", + _GEN_AI_SYSTEM: "aws.bedrock", _GEN_AI_REQUEST_MAX_TOKENS: 4096, _GEN_AI_REQUEST_TEMPERATURE: 0.75, _GEN_AI_REQUEST_TOP_P: 0.99,