-
Notifications
You must be signed in to change notification settings - Fork 517
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat(integrations): Add Anthropic Integration #2831
Changes from 10 commits
fbe50a7
d04bc3c
c531e8e
f14afaf
ea41bda
6955759
79595d4
3d54718
7c079b6
46182d3
d102d73
c860e43
5ed220c
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -66,6 +66,7 @@ | |
"gcp", | ||
], | ||
"Data Processing": [ | ||
"anthropic", | ||
"arq", | ||
"beam", | ||
"celery", | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,179 @@ | ||
from functools import wraps | ||
|
||
import sentry_sdk | ||
from sentry_sdk.consts import OP | ||
from sentry_sdk.integrations import DidNotEnable, Integration | ||
from sentry_sdk.scope import should_send_default_pii | ||
from sentry_sdk.utils import ( | ||
capture_internal_exceptions, | ||
ensure_integration_enabled, | ||
event_from_exception, | ||
package_version, | ||
) | ||
|
||
from anthropic.resources import Messages | ||
|
||
from typing import TYPE_CHECKING | ||
|
||
if TYPE_CHECKING: | ||
from typing import Any, Iterator | ||
from anthropic.types import MessageStreamEvent | ||
from sentry_sdk.tracing import Span | ||
|
||
|
||
COMPLETION_TOKENS_USED = "ai.completion_tоkens.used" | ||
PROMPT_TOKENS_USED = "ai.prompt_tоkens.used" | ||
TOTAL_TOKENS_USED = "ai.total_tоkens.used" | ||
|
||
|
||
class AnthropicIntegration(Integration): | ||
identifier = "anthropic" | ||
|
||
def __init__(self, include_prompts=True): | ||
# type: (AnthropicIntegration, bool) -> None | ||
self.include_prompts = include_prompts | ||
|
||
@staticmethod | ||
def setup_once(): | ||
# type: () -> None | ||
version = package_version("anthropic") | ||
|
||
if version is None: | ||
raise DidNotEnable("Unparsable anthropic version.") | ||
|
||
if version < (0, 16): | ||
raise DidNotEnable("anthropic 0.16 or newer required.") | ||
|
||
Messages.create = _wrap_message_create(Messages.create) | ||
|
||
|
||
def _capture_exception(exc): | ||
# type: (Any) -> None | ||
event, hint = event_from_exception( | ||
exc, | ||
client_options=sentry_sdk.get_client().options, | ||
mechanism={"type": "anthropic", "handled": False}, | ||
) | ||
sentry_sdk.capture_event(event, hint=hint) | ||
|
||
|
||
def _calculate_token_usage(result, span): | ||
# type: (Messages, Span) -> None | ||
input_tokens = 0 | ||
output_tokens = 0 | ||
if hasattr(result, "usage"): | ||
usage = result.usage | ||
if hasattr(usage, "input_tokens") and isinstance(usage.input_tokens, int): | ||
input_tokens = usage.input_tokens | ||
if hasattr(usage, "output_tokens") and isinstance(usage.output_tokens, int): | ||
output_tokens = usage.output_tokens | ||
|
||
total_tokens = input_tokens + output_tokens | ||
|
||
if total_tokens != 0: | ||
span.set_data(TOTAL_TOKENS_USED, total_tokens) | ||
if input_tokens != 0: | ||
span.set_data(PROMPT_TOKENS_USED, input_tokens) | ||
if output_tokens != 0: | ||
span.set_data(COMPLETION_TOKENS_USED, output_tokens) | ||
|
||
|
||
def _wrap_message_create(f): | ||
# type: (Any) -> Any | ||
@wraps(f) | ||
@ensure_integration_enabled(AnthropicIntegration, f) | ||
def _sentry_patched_create(*args, **kwargs): | ||
# type: (*Any, **Any) -> Any | ||
if "messages" not in kwargs: | ||
return f(*args, **kwargs) | ||
|
||
try: | ||
iter(kwargs["messages"]) | ||
except TypeError: | ||
return f(*args, **kwargs) | ||
|
||
messages = list(kwargs["messages"]) | ||
model = kwargs.get("model") | ||
|
||
span = sentry_sdk.start_span( | ||
op=OP.ANTHROPIC_MESSAGES_CREATE, description="Anthropic messages create" | ||
) | ||
span.__enter__() | ||
|
||
try: | ||
result = f(*args, **kwargs) | ||
except Exception as exc: | ||
_capture_exception(exc) | ||
span.__exit__(None, None, None) | ||
raise exc from None | ||
|
||
integration = sentry_sdk.get_client().get_integration(AnthropicIntegration) | ||
|
||
with capture_internal_exceptions(): | ||
span.set_data("ai.model_id", model) | ||
span.set_data("ai.streaming", False) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It'd be nice to pull the tags we have in langchain too if any exist in the params: https://github.com/getsentry/sentry-python/pull/2911/files#diff-bba846cf7b623cb617bd628d482cd71fe41160f0cea1f48f321fcf5166472925R52-R62 |
||
if should_send_default_pii() and integration.include_prompts: | ||
span.set_data("ai.input_messages", messages) | ||
if hasattr(result, "content"): | ||
if should_send_default_pii() and integration.include_prompts: | ||
span.set_data( | ||
"ai.responses", | ||
list( | ||
map( | ||
lambda message: { | ||
"type": message.type, | ||
"text": message.text, | ||
}, | ||
result.content, | ||
) | ||
), | ||
) | ||
_calculate_token_usage(result, span) | ||
span.__exit__(None, None, None) | ||
elif hasattr(result, "_iterator"): | ||
old_iterator = result._iterator | ||
|
||
def new_iterator(): | ||
# type: () -> Iterator[MessageStreamEvent] | ||
input_tokens = 0 | ||
output_tokens = 0 | ||
content_blocks = [] | ||
with capture_internal_exceptions(): | ||
for event in old_iterator: | ||
if hasattr(event, "type"): | ||
if event.type == "message_start": | ||
usage = event.message.usage | ||
input_tokens += usage.input_tokens | ||
output_tokens += usage.output_tokens | ||
elif event.type == "content_block_start": | ||
pass | ||
elif event.type == "content_block_delta": | ||
content_blocks.append(event.delta.text) | ||
elif event.type == "content_block_stop": | ||
pass | ||
elif event.type == "message_delta": | ||
output_tokens += event.usage.output_tokens | ||
elif event.type == "message_stop": | ||
continue | ||
yield event | ||
|
||
if should_send_default_pii() and integration.include_prompts: | ||
complete_message = "".join(content_blocks) | ||
span.set_data( | ||
"ai.responses", | ||
[{"type": "text", "text": complete_message}], | ||
) | ||
span.set_data(TOTAL_TOKENS_USED, input_tokens + output_tokens) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. New helper function again (after langchain merged) |
||
span.set_data(PROMPT_TOKENS_USED, input_tokens) | ||
span.set_data(COMPLETION_TOKENS_USED, output_tokens) | ||
span.set_data("ai.streaming", True) | ||
span.__exit__(None, None, None) | ||
|
||
result._iterator = new_iterator() | ||
else: | ||
span.set_data("unknown_response", True) | ||
span.__exit__(None, None, None) | ||
|
||
return result | ||
|
||
return _sentry_patched_create |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
import pytest | ||
|
||
pytest.importorskip("anthropic") |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Once langchain is merged, this should use the new helper function https://github.com/getsentry/sentry-python/pull/2911/files#diff-c566959db6701d8426b02bcf30caa0222d01aa9931ce9913b1a35fedfd0953c1R59