Skip to content

Commit

Permalink
Added docs and split out instrument_anthropic_bedrock
Browse files Browse the repository at this point in the history
  • Loading branch information
Stephen Hibbert committed Dec 19, 2024
1 parent 21c03ba commit a43ad2b
Show file tree
Hide file tree
Showing 4 changed files with 84 additions and 91 deletions.
14 changes: 14 additions & 0 deletions docs/integrations/llms/anthropic.md
Original file line number Diff line number Diff line change
Expand Up @@ -103,3 +103,17 @@ Shows up like this in Logfire:
![Logfire Anthropic Streaming](../../images/logfire-screenshot-anthropic-stream.png){ width="500" }
<figcaption>Anthropic streaming response</figcaption>
</figure>

# Amazon Bedrock
You can also run inference on AWS with Amazon Bedrock using the `AmazonBedrock` and `AsyncAmazonBedrock` clients.

```
import anthropic
import logfire
client = anthropic.AnthropicBedrock()
logfire.configure()
logfire.instrument_anthropic_bedrock(client)
```
Or if you don't have access to the client instance you can use `logfire.instrument_anthropic_bedrock()`) to instrument both the `anthropic.Anthropic` and `anthropic.AsyncAnthropic` classes.
2 changes: 2 additions & 0 deletions logfire/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
instrument_fastapi = DEFAULT_LOGFIRE_INSTANCE.instrument_fastapi
instrument_openai = DEFAULT_LOGFIRE_INSTANCE.instrument_openai
instrument_anthropic = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic
instrument_anthropic_bedrock = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic_bedrock
instrument_asyncpg = DEFAULT_LOGFIRE_INSTANCE.instrument_asyncpg
instrument_httpx = DEFAULT_LOGFIRE_INSTANCE.instrument_httpx
instrument_celery = DEFAULT_LOGFIRE_INSTANCE.instrument_celery
Expand Down Expand Up @@ -122,6 +123,7 @@ def loguru_handler() -> Any:
'instrument_fastapi',
'instrument_openai',
'instrument_anthropic',
'instrument_anthropic_bedrock',
'instrument_asyncpg',
'instrument_httpx',
'instrument_celery',
Expand Down
75 changes: 66 additions & 9 deletions logfire/_internal/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -1081,12 +1081,8 @@ def instrument_anthropic(
self,
anthropic_client: anthropic.Anthropic
| anthropic.AsyncAnthropic
| anthropic.AnthropicBedrock
| anthropic.AsyncAnthropicBedrock
| type[anthropic.Anthropic]
| type[anthropic.AsyncAnthropic]
| type[anthropic.AnthropicBedrock]
| type[anthropic.AsyncAnthropicBedrock]
| None = None,
*,
suppress_other_instrumentation: bool = True,
Expand Down Expand Up @@ -1124,13 +1120,76 @@ def instrument_anthropic(
Args:
anthropic_client: The Anthropic client or class to instrument:
- `None` (the default) to instrument both the
`anthropic.Anthropic`, `anthropic.AsyncAnthropic`,
`anthropic.AnthropicBedrock` and `anthropic.AsyncAnthropicBedrock` classes.
- `None` (the default) to instrument both `anthropic.Anthropic` and `anthropic.AsyncAnthropic`
- The `anthropic.Anthropic` class or a subclass
- The `anthropic.AsyncAnthropic` class or a subclass
- An instance of `anthropic.Anthropic`
- An instance of `anthropic.AsyncAnthropic`
suppress_other_instrumentation: If True, suppress any other OTEL instrumentation that may be otherwise
enabled. In reality, this means the HTTPX instrumentation, which could otherwise be called since
OpenAI uses HTTPX to make HTTP requests.
Returns:
A context manager that will revert the instrumentation when exited.
Use of this context manager is optional.
"""
import anthropic

from .integrations.llm_providers.anthropic import get_endpoint_config, is_async_client, on_response
from .integrations.llm_providers.llm_provider import instrument_llm_provider

self._warn_if_not_initialized_for_instrumentation()
return instrument_llm_provider(
self,
anthropic_client or (anthropic.Anthropic, anthropic.AsyncAnthropic),
suppress_other_instrumentation,
'Anthropic',
get_endpoint_config,
on_response,
is_async_client,
)

def instrument_anthropic_bedrock(
self,
anthropic_client: anthropic.AnthropicBedrock
| anthropic.AsyncAnthropicBedrock
| type[anthropic.AnthropicBedrock]
| type[anthropic.AsyncAnthropicBedrock]
| None = None,
*,
suppress_other_instrumentation: bool = True,
) -> ContextManager[None]:
"""Instrument an Anthropic Bedrock client so that spans are automatically created for each request.
When `stream=True` a second span is created to instrument the streamed response.
Example usage:
```python
import logfire
import anthropic
client = anthropic.AnthropicBedrock()
logfire.configure()
logfire.instrument_anthropic_bedrock(client)
response = client.messages.create(
model='anthropic.claude-3-haiku-20240307-v1:0',
system='You are a helpful assistant.',
messages=[
{'role': 'user', 'content': 'What is four plus five?'},
],
)
print('answer:', response.content[0].text)
```
Args:
anthropic_client: The Anthropic client or class to instrument:
- `None` (the default) to instrument both the `anthropic.AnthropicBedrock` and `anthropic.AsyncAnthropicBedrock` classes.
- The `anthropic.AnthropicBedrock` class or a subclass
- The `anthropic.AsyncAnthropicBedrock` class or a subclass
- An instance of `anthropic.AnthropicBedrock`
- An instance of `anthropic.AsyncAnthropicBedrock`
Expand All @@ -1152,8 +1211,6 @@ def instrument_anthropic(
self,
anthropic_client
or (
anthropic.Anthropic,
anthropic.AsyncAnthropic,
anthropic.AnthropicBedrock,
anthropic.AsyncAnthropicBedrock,
),
Expand Down
84 changes: 2 additions & 82 deletions tests/otel_integrations/test_anthropic_bedrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def mock_client() -> Iterator[AnthropicBedrock]:
aws_session_token='test-session-token',
http_client=http_client,
)
with logfire.instrument_anthropic(client):
with logfire.instrument_anthropic_bedrock(client):
yield client


Expand All @@ -63,7 +63,7 @@ async def mock_async_client() -> AsyncIterator[AsyncAnthropicBedrock]:
aws_session_token='test-session-token',
http_client=http_client,
)
with logfire.instrument_anthropic(): # Test instrumenting EVERYTHING
with logfire.instrument_anthropic_bedrock(): # Test instrumenting EVERYTHING
yield client


Expand Down Expand Up @@ -145,83 +145,3 @@ def test_sync_messages(mock_client: AnthropicBedrock, exporter: TestExporter):
}
]
)


@pytest.mark.filterwarnings('ignore:datetime.datetime.utcnow:DeprecationWarning')
async def test_async_messages(mock_async_client: AsyncAnthropicBedrock, exporter: TestExporter):
"""Test basic asynchronous message creation"""
model_id = 'anthropic.claude-3-haiku-20240307-v1:0'
response = await mock_async_client.messages.create(
max_tokens=1000,
model=model_id,
system='You are a helpful assistant.',
messages=[{'role': 'user', 'content': 'What is four plus five?'}],
)

# Verify response structure
assert isinstance(response.content[0], TextBlock)
assert response.content[0].text == 'Nine'

# Verify exported spans
assert exporter.exported_spans_as_dict() == snapshot(
[
{
'name': 'Message with {request_data[model]!r}',
'context': {'trace_id': 1, 'span_id': 1, 'is_remote': False},
'parent': None,
'start_time': 1000000000,
'end_time': 2000000000,
'attributes': {
'code.filepath': 'test_anthropic_bedrock.py',
'code.function': 'test_async_messages',
'code.lineno': 123,
'request_data': IsJson(
{
'max_tokens': 1000,
'system': 'You are a helpful assistant.',
'messages': [{'role': 'user', 'content': 'What is four plus five?'}],
'model': model_id,
}
),
'async': True, # Note this is True for async test
'logfire.msg_template': 'Message with {request_data[model]!r}',
'logfire.msg': f"Message with '{model_id}'",
'logfire.span_type': 'span',
'logfire.tags': ('LLM',),
'response_data': IsJson(
{
'message': {
'content': 'Nine',
'role': 'assistant',
},
'usage': {
'input_tokens': 2,
'output_tokens': 3,
'cache_creation_input_tokens': None,
'cache_read_input_tokens': None,
},
}
),
'logfire.json_schema': IsJson(
{
'type': 'object',
'properties': {
'request_data': {'type': 'object'},
'async': {},
'response_data': {
'type': 'object',
'properties': {
'usage': {
'type': 'object',
'title': 'Usage',
'x-python-datatype': 'PydanticModel',
},
},
},
},
}
),
},
}
]
)

0 comments on commit a43ad2b

Please sign in to comment.