Skip to content

Commit

Permalink
Support for AnthropicBedrock client (#701)
Browse files Browse the repository at this point in the history
Co-authored-by: Stephen Hibbert <[email protected]>
Co-authored-by: Alex Hall <[email protected]>
  • Loading branch information
3 people authored Dec 23, 2024
1 parent ecdd723 commit db1f57a
Show file tree
Hide file tree
Showing 6 changed files with 256 additions and 18 deletions.
18 changes: 18 additions & 0 deletions docs/integrations/llms/anthropic.md
Original file line number Diff line number Diff line change
Expand Up @@ -103,3 +103,21 @@ Shows up like this in Logfire:
![Logfire Anthropic Streaming](../../images/logfire-screenshot-anthropic-stream.png){ width="500" }
<figcaption>Anthropic streaming response</figcaption>
</figure>

## Amazon Bedrock

You can also log Anthropic LLM calls to Amazon Bedrock using the `AmazonBedrock` and `AsyncAmazonBedrock` clients.

```python
import anthropic
import logfire

client = anthropic.AnthropicBedrock(
aws_region='us-east-1',
aws_access_key='access-key',
aws_secret_key='secret-key',
)

logfire.configure()
logfire.instrument_anthropic(client)
```
15 changes: 11 additions & 4 deletions logfire/_internal/integrations/llm_providers/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@


def get_endpoint_config(options: FinalRequestOptions) -> EndpointConfig:
"""Returns the endpoint config for Anthropic depending on the url."""
"""Returns the endpoint config for Anthropic or Bedrock depending on the url."""
url = options.url
json_data = options.json_data
if not isinstance(json_data, dict): # pragma: no cover
Expand Down Expand Up @@ -83,9 +83,16 @@ def on_response(response: ResponseT, span: LogfireSpan) -> ResponseT:
return response


def is_async_client(client: type[anthropic.Anthropic] | type[anthropic.AsyncAnthropic]):
def is_async_client(
client: type[anthropic.Anthropic]
| type[anthropic.AsyncAnthropic]
| type[anthropic.AnthropicBedrock]
| type[anthropic.AsyncAnthropicBedrock],
):
"""Returns whether or not the `client` class is async."""
if issubclass(client, anthropic.Anthropic):
if issubclass(client, (anthropic.Anthropic, anthropic.AnthropicBedrock)):
return False
assert issubclass(client, anthropic.AsyncAnthropic), f'Expected Anthropic or AsyncAnthropic type, got: {client}'
assert issubclass(
client, (anthropic.AsyncAnthropic, anthropic.AsyncAnthropicBedrock)
), f'Expected Anthropic, AsyncAnthropic, AnthropicBedrock or AsyncAnthropicBedrock type, got: {client}'
return True
38 changes: 24 additions & 14 deletions logfire/_internal/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -1072,17 +1072,23 @@ def instrument_openai(

def instrument_anthropic(
self,
anthropic_client: anthropic.Anthropic
| anthropic.AsyncAnthropic
| type[anthropic.Anthropic]
| type[anthropic.AsyncAnthropic]
| None = None,
anthropic_client: (
anthropic.Anthropic
| anthropic.AsyncAnthropic
| anthropic.AnthropicBedrock
| anthropic.AsyncAnthropicBedrock
| type[anthropic.Anthropic]
| type[anthropic.AsyncAnthropic]
| type[anthropic.AnthropicBedrock]
| type[anthropic.AsyncAnthropicBedrock]
| None
) = None,
*,
suppress_other_instrumentation: bool = True,
) -> ContextManager[None]:
"""Instrument an Anthropic client so that spans are automatically created for each request.
The following methods are instrumented for both the sync and the async clients:
The following methods are instrumented for both the sync and async clients:
- [`client.messages.create`](https://docs.anthropic.com/en/api/messages)
- [`client.messages.stream`](https://docs.anthropic.com/en/api/messages-streaming)
Expand All @@ -1097,6 +1103,7 @@ def instrument_anthropic(
import anthropic
client = anthropic.Anthropic()
logfire.configure()
logfire.instrument_anthropic(client)
Expand All @@ -1112,13 +1119,10 @@ def instrument_anthropic(
Args:
anthropic_client: The Anthropic client or class to instrument:
- `None` (the default) to instrument both the
`anthropic.Anthropic` and `anthropic.AsyncAnthropic` classes.
- The `anthropic.Anthropic` class or a subclass
- The `anthropic.AsyncAnthropic` class or a subclass
- An instance of `anthropic.Anthropic`
- An instance of `anthropic.AsyncAnthropic`
- `None` (the default) to instrument all Anthropic client types
- The `anthropic.Anthropic` or `anthropic.AnthropicBedrock` class or subclass
- The `anthropic.AsyncAnthropic` or `anthropic.AsyncAnthropicBedrock` class or subclass
- An instance of any of the above classes
suppress_other_instrumentation: If True, suppress any other OTEL instrumentation that may be otherwise
enabled. In reality, this means the HTTPX instrumentation, which could otherwise be called since
Expand All @@ -1136,7 +1140,13 @@ def instrument_anthropic(
self._warn_if_not_initialized_for_instrumentation()
return instrument_llm_provider(
self,
anthropic_client or (anthropic.Anthropic, anthropic.AsyncAnthropic),
anthropic_client
or (
anthropic.Anthropic,
anthropic.AsyncAnthropic,
anthropic.AnthropicBedrock,
anthropic.AsyncAnthropicBedrock,
),
suppress_other_instrumentation,
'Anthropic',
get_endpoint_config,
Expand Down
3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,9 @@ dev = [
"requests",
"setuptools>=75.3.0",
"aiosqlite>=0.20.0",
"boto3 >= 1.28.57",
"botocore >= 1.31.57",

]
docs = [
"mkdocs>=1.5.0",
Expand Down
147 changes: 147 additions & 0 deletions tests/otel_integrations/test_anthropic_bedrock.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
from typing import Iterator

import httpx
import pytest
from anthropic import Anthropic, AnthropicBedrock, AsyncAnthropic, AsyncAnthropicBedrock
from anthropic.types import Message, TextBlock, Usage
from dirty_equals import IsJson
from httpx._transports.mock import MockTransport
from inline_snapshot import snapshot

import logfire
from logfire._internal.integrations.llm_providers.anthropic import is_async_client
from logfire.testing import TestExporter


def request_handler(request: httpx.Request) -> httpx.Response:
"""Used to mock httpx requests"""
model_id = 'anthropic.claude-3-haiku-20240307-v1:0'

assert request.method == 'POST'
assert request.url == f'https://bedrock-runtime.us-east-1.amazonaws.com/model/{model_id}/invoke'

return httpx.Response(
200,
json=Message(
id='test_id',
content=[
TextBlock(
text='Nine',
type='text',
)
],
model=model_id,
role='assistant',
type='message',
usage=Usage(input_tokens=2, output_tokens=3), # Match the snapshot values
).model_dump(mode='json'),
)


@pytest.fixture
def mock_client() -> Iterator[AnthropicBedrock]:
"""Fixture that provides a mocked Anthropic client with AWS credentials"""
with httpx.Client(transport=MockTransport(request_handler)) as http_client:
client = AnthropicBedrock(
aws_region='us-east-1',
aws_access_key='test-access-key',
aws_secret_key='test-secret-key',
aws_session_token='test-session-token',
http_client=http_client,
)
with logfire.instrument_anthropic():
yield client


@pytest.mark.filterwarnings('ignore:datetime.datetime.utcnow:DeprecationWarning')
def test_sync_messages(mock_client: AnthropicBedrock, exporter: TestExporter):
"""Test basic synchronous message creation"""
model_id = 'anthropic.claude-3-haiku-20240307-v1:0'
response = mock_client.messages.create(
max_tokens=1000,
model=model_id,
system='You are a helpful assistant.',
messages=[{'role': 'user', 'content': 'What is four plus five?'}],
)

# Verify response structure
assert isinstance(response.content[0], TextBlock)
assert response.content[0].text == 'Nine'

# Verify exported spans
assert exporter.exported_spans_as_dict() == snapshot(
[
{
'name': 'Message with {request_data[model]!r}',
'context': {'trace_id': 1, 'span_id': 1, 'is_remote': False},
'parent': None,
'start_time': 1000000000,
'end_time': 2000000000,
'attributes': {
'code.filepath': 'test_anthropic_bedrock.py',
'code.function': 'test_sync_messages',
'code.lineno': 123,
'request_data': IsJson(
{
'max_tokens': 1000,
'system': 'You are a helpful assistant.',
'messages': [{'role': 'user', 'content': 'What is four plus five?'}],
'model': model_id,
}
),
'async': False,
'logfire.msg_template': 'Message with {request_data[model]!r}',
'logfire.msg': f"Message with '{model_id}'",
'logfire.span_type': 'span',
'logfire.tags': ('LLM',),
'response_data': IsJson(
{
'message': {
'content': 'Nine',
'role': 'assistant',
},
'usage': {
'input_tokens': 2,
'output_tokens': 3,
'cache_creation_input_tokens': None,
'cache_read_input_tokens': None,
},
}
),
'logfire.json_schema': IsJson(
{
'type': 'object',
'properties': {
'request_data': {'type': 'object'},
'async': {},
'response_data': {
'type': 'object',
'properties': {
'usage': {
'type': 'object',
'title': 'Usage',
'x-python-datatype': 'PydanticModel',
},
},
},
},
}
),
},
}
]
)


def test_is_async_client() -> None:
# Test sync clients
assert not is_async_client(Anthropic)
assert not is_async_client(AnthropicBedrock)

# Test async clients
assert is_async_client(AsyncAnthropic)
assert is_async_client(AsyncAnthropicBedrock)

# Test invalid input
with pytest.raises(AssertionError):
is_async_client(str) # type: ignore
53 changes: 53 additions & 0 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit db1f57a

Please sign in to comment.