Skip to content

Commit

Permalink
Integrated into one client
Browse files Browse the repository at this point in the history
  • Loading branch information
Stephen Hibbert committed Dec 19, 2024
1 parent a43ad2b commit 17f36ea
Show file tree
Hide file tree
Showing 5 changed files with 127 additions and 100 deletions.
5 changes: 2 additions & 3 deletions docs/integrations/llms/anthropic.md
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ Shows up like this in Logfire:
</figure>

# Amazon Bedrock
You can also run inference on AWS with Amazon Bedrock using the `AmazonBedrock` and `AsyncAmazonBedrock` clients.
You can also log Anthropic LLM calls to Amazon Bedrock using the `AmazonBedrock` and `AsyncAmazonBedrock` clients.

```
import anthropic
Expand All @@ -114,6 +114,5 @@ import logfire
client = anthropic.AnthropicBedrock()
logfire.configure()
logfire.instrument_anthropic_bedrock(client)
logfire.instrument_anthropic(client)
```
Or if you don't have access to the client instance you can use `logfire.instrument_anthropic_bedrock()`) to instrument both the `anthropic.Anthropic` and `anthropic.AsyncAnthropic` classes.
2 changes: 0 additions & 2 deletions logfire/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
instrument_fastapi = DEFAULT_LOGFIRE_INSTANCE.instrument_fastapi
instrument_openai = DEFAULT_LOGFIRE_INSTANCE.instrument_openai
instrument_anthropic = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic
instrument_anthropic_bedrock = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic_bedrock
instrument_asyncpg = DEFAULT_LOGFIRE_INSTANCE.instrument_asyncpg
instrument_httpx = DEFAULT_LOGFIRE_INSTANCE.instrument_httpx
instrument_celery = DEFAULT_LOGFIRE_INSTANCE.instrument_celery
Expand Down Expand Up @@ -123,7 +122,6 @@ def loguru_handler() -> Any:
'instrument_fastapi',
'instrument_openai',
'instrument_anthropic',
'instrument_anthropic_bedrock',
'instrument_asyncpg',
'instrument_httpx',
'instrument_celery',
Expand Down
33 changes: 19 additions & 14 deletions logfire/_internal/integrations/llm_providers/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@


def get_endpoint_config(options: FinalRequestOptions) -> EndpointConfig:
"""Returns the endpoint config for Anthropic depending on the url."""
"""Returns the endpoint config for Anthropic or Bedrock depending on the url."""
url = options.url
json_data = options.json_data
if not isinstance(json_data, dict): # pragma: no cover
Expand All @@ -34,6 +34,13 @@ def get_endpoint_config(options: FinalRequestOptions) -> EndpointConfig:
span_data={'request_data': json_data},
stream_state_cls=AnthropicMessageStreamState,
)
# Handle Amazon Bedrock URLs
elif url.startswith('https://bedrock-runtime.'):
return EndpointConfig(
message_template='Message with {request_data[model]!r}',
span_data={'request_data': json_data},
stream_state_cls=AnthropicMessageStreamState,
)
else:
return EndpointConfig(
message_template='Anthropic API call to {url!r}',
Expand Down Expand Up @@ -83,16 +90,14 @@ def on_response(response: ResponseT, span: LogfireSpan) -> ResponseT:
return response


def is_async_client(
client: type[anthropic.Anthropic]
| type[anthropic.AsyncAnthropic]
| type[anthropic.AnthropicBedrock]
| type[anthropic.AsyncAnthropicBedrock],
):
"""Returns whether or not the `client` class is async."""
if issubclass(client, anthropic.Anthropic | anthropic.AnthropicBedrock):
return False
assert issubclass(
client, anthropic.AsyncAnthropic | anthropic.AsyncAnthropicBedrock
), f'Expected Anthropic or AsyncAnthropic or AnthropicBedrock or AsyncAnthropicBedrock type, got: {client}'
return True
def is_async_client(client_type: Any) -> bool:
"""Returns whether the `client` class is async."""
if isinstance(client_type, type):
if issubclass(client_type, (anthropic.Anthropic, anthropic.AnthropicBedrock)):
return False
if issubclass(client_type, (anthropic.AsyncAnthropic, anthropic.AsyncAnthropicBedrock)):
return True
raise TypeError(
f'Expected Anthropic, AsyncAnthropic, AnthropicBedrock, or AsyncAnthropicBedrock type, got: {client_type}'
)
return isinstance(client_type, (anthropic.AsyncAnthropic, anthropic.AsyncAnthropicBedrock))
103 changes: 24 additions & 79 deletions logfire/_internal/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -1079,17 +1079,23 @@ def instrument_openai(

def instrument_anthropic(
self,
anthropic_client: anthropic.Anthropic
| anthropic.AsyncAnthropic
| type[anthropic.Anthropic]
| type[anthropic.AsyncAnthropic]
| None = None,
anthropic_client: (
anthropic.Anthropic
| anthropic.AsyncAnthropic
| anthropic.AnthropicBedrock
| anthropic.AsyncAnthropicBedrock
| type[anthropic.Anthropic]
| type[anthropic.AsyncAnthropic]
| type[anthropic.AnthropicBedrock]
| type[anthropic.AsyncAnthropicBedrock]
| None
) = None,
*,
suppress_other_instrumentation: bool = True,
) -> ContextManager[None]:
"""Instrument an Anthropic client so that spans are automatically created for each request.
The following methods are instrumented for both the sync and the async clients:
The following methods are instrumented for both the sync and async clients:
- [`client.messages.create`](https://docs.anthropic.com/en/api/messages)
- [`client.messages.stream`](https://docs.anthropic.com/en/api/messages-streaming)
Expand All @@ -1103,79 +1109,18 @@ def instrument_anthropic(
import logfire
import anthropic
# For regular Anthropic client
client = anthropic.Anthropic()
logfire.configure()
logfire.instrument_anthropic(client)
response = client.messages.create(
model='claude-3-haiku-20240307',
system='You are a helpful assistant.',
messages=[
{'role': 'user', 'content': 'What is four plus five?'},
],
# Or for Bedrock client
client = anthropic.AnthropicBedrock(
aws_region='us-east-1', aws_access_key='access-key', aws_secret_key='secret-key'
)
print('answer:', response.content[0].text)
```
Args:
anthropic_client: The Anthropic client or class to instrument:
- `None` (the default) to instrument both `anthropic.Anthropic` and `anthropic.AsyncAnthropic`
- The `anthropic.Anthropic` class or a subclass
- The `anthropic.AsyncAnthropic` class or a subclass
- An instance of `anthropic.Anthropic`
- An instance of `anthropic.AsyncAnthropic`
suppress_other_instrumentation: If True, suppress any other OTEL instrumentation that may be otherwise
enabled. In reality, this means the HTTPX instrumentation, which could otherwise be called since
OpenAI uses HTTPX to make HTTP requests.
Returns:
A context manager that will revert the instrumentation when exited.
Use of this context manager is optional.
"""
import anthropic

from .integrations.llm_providers.anthropic import get_endpoint_config, is_async_client, on_response
from .integrations.llm_providers.llm_provider import instrument_llm_provider

self._warn_if_not_initialized_for_instrumentation()
return instrument_llm_provider(
self,
anthropic_client or (anthropic.Anthropic, anthropic.AsyncAnthropic),
suppress_other_instrumentation,
'Anthropic',
get_endpoint_config,
on_response,
is_async_client,
)

def instrument_anthropic_bedrock(
self,
anthropic_client: anthropic.AnthropicBedrock
| anthropic.AsyncAnthropicBedrock
| type[anthropic.AnthropicBedrock]
| type[anthropic.AsyncAnthropicBedrock]
| None = None,
*,
suppress_other_instrumentation: bool = True,
) -> ContextManager[None]:
"""Instrument an Anthropic Bedrock client so that spans are automatically created for each request.
When `stream=True` a second span is created to instrument the streamed response.
Example usage:
```python
import logfire
import anthropic
client = anthropic.AnthropicBedrock()
logfire.configure()
logfire.instrument_anthropic_bedrock(client)
logfire.instrument_anthropic(client)
response = client.messages.create(
model='anthropic.claude-3-haiku-20240307-v1:0',
model='claude-3-haiku-20240307',
system='You are a helpful assistant.',
messages=[
{'role': 'user', 'content': 'What is four plus five?'},
Expand All @@ -1186,12 +1131,10 @@ def instrument_anthropic_bedrock(
Args:
anthropic_client: The Anthropic client or class to instrument:
- `None` (the default) to instrument both the `anthropic.AnthropicBedrock` and `anthropic.AsyncAnthropicBedrock` classes.
- The `anthropic.AnthropicBedrock` class or a subclass
- The `anthropic.AsyncAnthropicBedrock` class or a subclass
- An instance of `anthropic.AnthropicBedrock`
- An instance of `anthropic.AsyncAnthropicBedrock`
- `None` (the default) to instrument all Anthropic client types
- The `anthropic.Anthropic` or `anthropic.AnthropicBedrock` class or subclass
- The `anthropic.AsyncAnthropic` or `anthropic.AsyncAnthropicBedrock` class or subclass
- An instance of any of the above classes
suppress_other_instrumentation: If True, suppress any other OTEL instrumentation that may be otherwise
enabled. In reality, this means the HTTPX instrumentation, which could otherwise be called since
Expand All @@ -1211,6 +1154,8 @@ def instrument_anthropic_bedrock(
self,
anthropic_client
or (
anthropic.Anthropic,
anthropic.AsyncAnthropic,
anthropic.AnthropicBedrock,
anthropic.AsyncAnthropicBedrock,
),
Expand Down
84 changes: 82 additions & 2 deletions tests/otel_integrations/test_anthropic_bedrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def mock_client() -> Iterator[AnthropicBedrock]:
aws_session_token='test-session-token',
http_client=http_client,
)
with logfire.instrument_anthropic_bedrock(client):
with logfire.instrument_anthropic(client):
yield client


Expand All @@ -63,7 +63,7 @@ async def mock_async_client() -> AsyncIterator[AsyncAnthropicBedrock]:
aws_session_token='test-session-token',
http_client=http_client,
)
with logfire.instrument_anthropic_bedrock(): # Test instrumenting EVERYTHING
with logfire.instrument_anthropic(): # Test instrumenting EVERYTHING
yield client


Expand Down Expand Up @@ -145,3 +145,83 @@ def test_sync_messages(mock_client: AnthropicBedrock, exporter: TestExporter):
}
]
)


@pytest.mark.filterwarnings('ignore:datetime.datetime.utcnow:DeprecationWarning')
async def test_async_messages(mock_async_client: AsyncAnthropicBedrock, exporter: TestExporter):
"""Test basic asynchronous message creation"""
model_id = 'anthropic.claude-3-haiku-20240307-v1:0'
response = await mock_async_client.messages.create(
max_tokens=1000,
model=model_id,
system='You are a helpful assistant.',
messages=[{'role': 'user', 'content': 'What is four plus five?'}],
)

# Verify response structure
assert isinstance(response.content[0], TextBlock)
assert response.content[0].text == 'Nine'

# Verify exported spans
assert exporter.exported_spans_as_dict() == snapshot(
[
{
'name': 'Message with {request_data[model]!r}',
'context': {'trace_id': 1, 'span_id': 1, 'is_remote': False},
'parent': None,
'start_time': 1000000000,
'end_time': 2000000000,
'attributes': {
'code.filepath': 'test_anthropic_bedrock.py',
'code.function': 'test_async_messages',
'code.lineno': 123,
'request_data': IsJson(
{
'max_tokens': 1000,
'system': 'You are a helpful assistant.',
'messages': [{'role': 'user', 'content': 'What is four plus five?'}],
'model': model_id,
}
),
'async': True,
'logfire.msg_template': 'Message with {request_data[model]!r}',
'logfire.msg': f"Message with '{model_id}'",
'logfire.span_type': 'span',
'logfire.tags': ('LLM',),
'response_data': IsJson(
{
'message': {
'content': 'Nine',
'role': 'assistant',
},
'usage': {
'input_tokens': 2,
'output_tokens': 3,
'cache_creation_input_tokens': None,
'cache_read_input_tokens': None,
},
}
),
'logfire.json_schema': IsJson(
{
'type': 'object',
'properties': {
'request_data': {'type': 'object'},
'async': {},
'response_data': {
'type': 'object',
'properties': {
'usage': {
'type': 'object',
'title': 'Usage',
'x-python-datatype': 'PydanticModel',
},
},
},
},
}
),
},
}
]
)

0 comments on commit 17f36ea

Please sign in to comment.