From fbe50a7af50601cb018db1bfe1ffc147a27853ae Mon Sep 17 00:00:00 2001 From: czyber Date: Sat, 16 Mar 2024 19:10:44 +0100 Subject: [PATCH 1/9] feat(integrations): Add Anthropic Integration This commit adds a basic integration for the anthropic sdk. --- .../test-integrations-data-processing.yml | 12 + mypy.ini | 2 + .../split-tox-gh-actions.py | 1 + sentry_sdk/consts.py | 1 + sentry_sdk/integrations/anthropic.py | 195 ++++++++++++++++ setup.py | 1 + tests/integrations/anthropic/__init__.py | 3 + .../integrations/anthropic/test_anthropic.py | 213 ++++++++++++++++++ tox.ini | 10 + 9 files changed, 438 insertions(+) create mode 100644 sentry_sdk/integrations/anthropic.py create mode 100644 tests/integrations/anthropic/__init__.py create mode 100644 tests/integrations/anthropic/test_anthropic.py diff --git a/.github/workflows/test-integrations-data-processing.yml b/.github/workflows/test-integrations-data-processing.yml index c40d45845d..369c24c0bf 100644 --- a/.github/workflows/test-integrations-data-processing.yml +++ b/.github/workflows/test-integrations-data-processing.yml @@ -42,6 +42,10 @@ jobs: - name: Erase coverage run: | coverage erase + - name: Test anthropic latest + run: | + set -x # print commands that are executed + ./scripts/runtox.sh "py${{ matrix.python-version }}-anthropic-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test arq latest run: | set -x # print commands that are executed @@ -98,6 +102,10 @@ jobs: - name: Erase coverage run: | coverage erase + - name: Test anthropic pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-anthropic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test arq pinned run: | set -x # print commands that are executed @@ -143,6 +151,10 @@ jobs: - name: Erase coverage run: | coverage erase + - name: Test anthropic py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-anthropic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test arq py27 run: | set -x # print commands that are executed diff --git a/mypy.ini b/mypy.ini index c1444d61e5..cdf1c090c9 100644 --- a/mypy.ini +++ b/mypy.ini @@ -36,6 +36,8 @@ ignore_missing_imports = True ignore_missing_imports = True [mypy-aiohttp.*] ignore_missing_imports = True +[mypy-anthropic.*] +ignore_missing_imports = True [mypy-sanic.*] ignore_missing_imports = True [mypy-tornado.*] diff --git a/scripts/split-tox-gh-actions/split-tox-gh-actions.py b/scripts/split-tox-gh-actions/split-tox-gh-actions.py index 13b81283ca..56d961e6fa 100755 --- a/scripts/split-tox-gh-actions/split-tox-gh-actions.py +++ b/scripts/split-tox-gh-actions/split-tox-gh-actions.py @@ -66,6 +66,7 @@ "gcp", ], "Data Processing": [ + "anthropic", "arq", "beam", "celery", diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 83076c762f..e8b395d6bb 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -193,6 +193,7 @@ class SPANDATA: class OP: + ANTHROPIC_MESSAGES_CREATE = "ai.messages.create.anthropic" CACHE_GET_ITEM = "cache.get_item" DB = "db" DB_REDIS = "db.redis" diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py new file mode 100644 index 0000000000..ec6e393275 --- /dev/null +++ b/sentry_sdk/integrations/anthropic.py @@ -0,0 +1,195 @@ +from sentry_sdk.consts import OP +from sentry_sdk._functools import wraps +from sentry_sdk.integrations import DidNotEnable, Integration +import sentry_sdk +from sentry_sdk.utils import ( + package_version, + event_from_exception, + capture_internal_exceptions, +) +from sentry_sdk.hub import Hub, _should_send_default_pii +from typing import TYPE_CHECKING + +from anthropic.resources import Messages + +if TYPE_CHECKING: + from typing import Any, Iterator + from anthropic.types import ( + MessageStartEvent, + MessageDeltaEvent, + MessageStopEvent, + ContentBlockStartEvent, + ContentBlockDeltaEvent, + ContentBlockStopEvent, + MessageStreamEvent, + ) + +COMPLETION_TOKENS_USED = "ai.completion_tоkens.used" +PROMPT_TOKENS_USED = "ai.prompt_tоkens.used" +TOTAL_TOKENS_USED = "ai.total_tоkens.used" + + +def _capture_exception(hub, exc): + # type: (Hub, Any) -> None + + if hub.client is not None: + event, hint = event_from_exception( + exc, + client_options=hub.client.options, + mechanism={"type": "anthropic", "handled": False}, + ) + hub.capture_event(event, hint=hint) + + +class AnthropicIntegration(Integration): + identifier = "anthropic" + + def __init__(self, include_prompts=True): + # type: (AnthropicIntegration, bool) -> None + self.include_prompts = include_prompts + + @staticmethod + def setup_once(): + # type: () -> None + version = package_version("anthropic") + + if version is None: + raise DidNotEnable("Unparsable anthropic version.") + + if version < (0, 16): + raise DidNotEnable("anthropic 0.16 or newer required.") + + Messages.create = _wrap_message_create(Messages.create) + + +def _calculate_token_usage(result, span): + input_tokens = 0 + output_tokens = 0 + if hasattr(result, "usage"): + usage = result.usage + if hasattr(usage, "input_tokens") and isinstance(usage.input_tokens, int): + input_tokens = usage.input_tokens + if hasattr(usage, "output_tokens") and isinstance(usage.output_tokens, int): + output_tokens = usage.output_tokens + + total_tokens = input_tokens + output_tokens + + if total_tokens != 0: + span.set_data(TOTAL_TOKENS_USED, total_tokens) + if input_tokens != 0: + span.set_data(PROMPT_TOKENS_USED, input_tokens) + if output_tokens != 0: + span.set_data(COMPLETION_TOKENS_USED, output_tokens) + + +def _wrap_message_create(f): + @wraps(f) + def _sentry_patched_create(*args, **kwargs): + hub = Hub.current + if not hub: + return f(*args, **kwargs) + + integration = hub.get_integration(AnthropicIntegration) + if not integration: + return f(*args, **kwargs) + + if "messages" not in kwargs: + return f(*args, **kwargs) + + try: + iter(kwargs["messages"]) + except TypeError: + return f(*args, **kwargs) + + messages = list(kwargs["messages"]) + model = kwargs.get("model") + + span = sentry_sdk.start_span( + op=OP.ANTHROPIC_MESSAGES_CREATE, description="Anthropic messages create" + ) + + try: + result = f(*args, **kwargs) + except Exception as exc: + _capture_exception(hub, exc) + span.finish() + raise exc from None + + with capture_internal_exceptions(): + span.set_data("ai.model_id", model) + if _should_send_default_pii() and integration.include_prompts: + span.set_data("ai.messages", messages) + if hasattr(result, "content"): + if _should_send_default_pii() and integration.include_prompts: + span.set_data( + "ai.responses", + list( + map( + lambda message: { + "type": message.type, + "text": message.text, + }, + result.content, + ) + ), + ) + _calculate_token_usage(result, span) + span.finish() + elif hasattr(result, "_iterator"): + old_iterator = result._iterator + + def new_iterator(): + # type: () -> Iterator[MessageStreamEvent] + input_tokens = 0 + output_tokens = 0 + content_blocks = [] + with capture_internal_exceptions(): + for event in old_iterator: + if hasattr(event, "type"): + if ( + event.type == "message_start" + ): # type: MessageStartEvent + usage = event.message.usage + input_tokens += usage.input_tokens + output_tokens += usage.output_tokens + elif ( + event.type == "content_block_start" + ): # type: ContentBlockStartEvent + pass + elif ( + event.type == "content_block_delta" + ): # type: ContentBlockDeltaEvent + content_blocks.append(event.delta.text) + elif ( + event.type == "content_block_stop" + ): # type: ContentBlockStopEvent + pass + elif ( + event.type == "message_delta" + ): # type: MessageDeltaEvent + output_tokens += event.usage.output_tokens + elif ( + event.type == "message_stop" + ): # type: MessageStopEvent + continue + yield event + + if _should_send_default_pii() and integration.include_prompts: + complete_message = "".join(content_blocks) + span.set_data( + "ai.responses", + [{"type": "text", "text": complete_message}], + ) + span.set_data(TOTAL_TOKENS_USED, input_tokens + output_tokens) + span.set_data(PROMPT_TOKENS_USED, input_tokens) + span.set_data(COMPLETION_TOKENS_USED, output_tokens) + span.finish() + + result._iterator = new_iterator() + else: + span.set_data("unknown_response", True) + span.finish() + + return result + + return _sentry_patched_create diff --git a/setup.py b/setup.py index f17ee954b1..58c24d5cbe 100644 --- a/setup.py +++ b/setup.py @@ -45,6 +45,7 @@ def get_file_text(file_name): ], extras_require={ "aiohttp": ["aiohttp>=3.5"], + "anthropic": ["anthropic>=0.16"], "arq": ["arq>=0.23"], "asyncpg": ["asyncpg>=0.23"], "beam": ["apache-beam>=2.12"], diff --git a/tests/integrations/anthropic/__init__.py b/tests/integrations/anthropic/__init__.py new file mode 100644 index 0000000000..29ac4e6ff4 --- /dev/null +++ b/tests/integrations/anthropic/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("anthropic") diff --git a/tests/integrations/anthropic/test_anthropic.py b/tests/integrations/anthropic/test_anthropic.py new file mode 100644 index 0000000000..8c96d9019c --- /dev/null +++ b/tests/integrations/anthropic/test_anthropic.py @@ -0,0 +1,213 @@ +import pytest +from unittest import mock +from anthropic import Anthropic, Stream, AnthropicError +from anthropic.types import Usage, ContentBlock, MessageDeltaUsage, TextDelta +from anthropic.types.message import Message +from anthropic.types.message_start_event import MessageStartEvent +from anthropic.types.content_block_start_event import ContentBlockStartEvent +from anthropic.types.content_block_delta_event import ContentBlockDeltaEvent +from anthropic.types.content_block_stop_event import ContentBlockStopEvent +from anthropic.types.message_delta_event import MessageDeltaEvent, Delta + +from sentry_sdk import start_transaction +from sentry_sdk.consts import OP +from sentry_sdk.integrations.anthropic import ( + AnthropicIntegration, + TOTAL_TOKENS_USED, + PROMPT_TOKENS_USED, + COMPLETION_TOKENS_USED, +) + + +EXAMPLE_MESSAGE = Message( + id="id", + model="model", + role="assistant", + content=[ContentBlock(type="text", text="Hi, I'm Claude.")], + type="message", + usage=Usage(input_tokens=10, output_tokens=20), +) + + +@pytest.mark.parametrize( + "send_default_pii, include_prompts", + [ + (True, True), + (True, False), + (False, True), + (False, False), + ], +) +def test_nonstreaming_create_message( + sentry_init, capture_events, send_default_pii, include_prompts +): + sentry_init( + integrations=[AnthropicIntegration(include_prompts=include_prompts)], + traces_sample_rate=1.0, + send_default_pii=send_default_pii, + ) + events = capture_events() + client = Anthropic(api_key="z") + client.messages._post = mock.Mock(return_value=EXAMPLE_MESSAGE) + + messages = [ + { + "role": "user", + "content": "Hello, Claude", + } + ] + + with start_transaction(name="anthropic"): + response = client.messages.create( + max_tokens=1024, messages=messages, model="model" + ) + + assert response == EXAMPLE_MESSAGE + usage = response.usage + + assert usage.input_tokens == 10 + assert usage.output_tokens == 20 + + assert len(events) == 1 + (event,) = events + + assert event["type"] == "transaction" + assert event["transaction"] == "anthropic" + + assert len(event["spans"]) == 1 + (span,) = event["spans"] + + assert span["op"] == OP.ANTHROPIC_MESSAGES_CREATE + assert span["description"] == "Anthropic messages create" + assert span["data"]["ai.model_id"] == "model" + + if send_default_pii and include_prompts: + assert span["data"]["ai.messages"] == messages + assert span["data"]["ai.responses"] == [ + {"type": "text", "text": "Hi, I'm Claude."} + ] + else: + assert "ai.messages" not in span["data"] + assert "ai.responses" not in span["data"] + + assert span["data"][PROMPT_TOKENS_USED] == 10 + assert span["data"][COMPLETION_TOKENS_USED] == 20 + assert span["data"][TOTAL_TOKENS_USED] == 30 + + +@pytest.mark.parametrize( + "send_default_pii, include_prompts", + [ + (True, True), + (True, False), + (False, True), + (False, False), + ], +) +def test_streaming_create_message( + sentry_init, capture_events, send_default_pii, include_prompts +): + client = Anthropic(api_key="z") + returned_stream = Stream(cast_to=None, response=None, client=client) + returned_stream._iterator = [ + MessageStartEvent( + message=EXAMPLE_MESSAGE, + type="message_start", + ), + ContentBlockStartEvent( + type="content_block_start", + index=0, + content_block=ContentBlock(type="text", text=""), + ), + ContentBlockDeltaEvent( + delta=TextDelta(text="Hi", type="text_delta"), + index=0, + type="content_block_delta", + ), + ContentBlockDeltaEvent( + delta=TextDelta(text="!", type="text_delta"), + index=0, + type="content_block_delta", + ), + ContentBlockDeltaEvent( + delta=TextDelta(text=" I'm Claude!", type="text_delta"), + index=0, + type="content_block_delta", + ), + ContentBlockStopEvent(type="content_block_stop", index=0), + MessageDeltaEvent( + delta=Delta(), + usage=MessageDeltaUsage(output_tokens=10), + type="message_delta", + ), + ] + + sentry_init( + integrations=[AnthropicIntegration(include_prompts=include_prompts)], + traces_sample_rate=1.0, + send_default_pii=send_default_pii, + ) + events = capture_events() + client.messages._post = mock.Mock(return_value=returned_stream) + + messages = [ + { + "role": "user", + "content": "Hello, Claude", + } + ] + + with start_transaction(name="anthropic"): + message = client.messages.create( + max_tokens=1024, messages=messages, model="model", stream=True + ) + + for _ in message: + pass + + assert message == returned_stream + assert len(events) == 1 + (event,) = events + + assert event["type"] == "transaction" + assert event["transaction"] == "anthropic" + + assert len(event["spans"]) == 1 + (span,) = event["spans"] + + assert span["op"] == OP.ANTHROPIC_MESSAGES_CREATE + assert span["description"] == "Anthropic messages create" + assert span["data"]["ai.model_id"] == "model" + + if send_default_pii and include_prompts: + assert span["data"]["ai.messages"] == messages + assert span["data"]["ai.responses"] == [ + {"type": "text", "text": "Hi! I'm Claude!"} + ] + + else: + assert "ai.messages" not in span["data"] + assert "ai.responses" not in span["data"] + + assert span["data"][PROMPT_TOKENS_USED] == 10 + assert span["data"][COMPLETION_TOKENS_USED] == 30 + assert span["data"][TOTAL_TOKENS_USED] == 40 + + +def test_exception_message_create(sentry_init, capture_events): + sentry_init(integrations=[AnthropicIntegration()], traces_sample_rate=1.0) + events = capture_events() + + client = Anthropic(api_key="z") + client.messages._post = mock.Mock( + side_effect=AnthropicError("API rate limit reached") + ) + with pytest.raises(AnthropicError): + client.messages.create( + model="some-model", + messages=[{"role": "system", "content": "I'm throwing an exception"}], + max_tokens=1024, + ) + + (event,) = events + assert event["level"] == "error" diff --git a/tox.ini b/tox.ini index 1e7ba06a00..91d872904e 100644 --- a/tox.ini +++ b/tox.ini @@ -29,6 +29,10 @@ envlist = {py3.7,py3.9,py3.11}-aiohttp-v{3.8} {py3.8,py3.11}-aiohttp-latest + # Anthropic + {py3.7,py3.11,py3.12}-anthropic-v{0.16,0.20} + {py3.7,py3.11,py3.12}-anthropic-latest + # Ariadne {py3.8,py3.11}-ariadne-v{0.20} {py3.8,py3.11,py3.12}-ariadne-latest @@ -268,6 +272,11 @@ deps = aiohttp-v3.8: pytest-asyncio<=0.21.1 aiohttp-latest: pytest-asyncio<=0.21.1 + # Anthropic + anthropic-v0.20: anthropic~=0.20.0 + anthropic-v0.16: anthropic~=0.16.0 + anthropic-latest: anthropic + # Ariadne ariadne-v0.20: ariadne~=0.20.0 ariadne-latest: ariadne @@ -587,6 +596,7 @@ setenv = common: TESTPATH=tests gevent: TESTPATH=tests aiohttp: TESTPATH=tests/integrations/aiohttp + anthropic: TESTPATH=tests/integrations/anthropic ariadne: TESTPATH=tests/integrations/ariadne arq: TESTPATH=tests/integrations/arq asgi: TESTPATH=tests/integrations/asgi From d04bc3cb4a48866ada90068dc0c8509235c275fa Mon Sep 17 00:00:00 2001 From: czyber Date: Sun, 17 Mar 2024 09:37:38 +0100 Subject: [PATCH 2/9] feat(integrations): refactor span data to adhere conventions --- sentry_sdk/integrations/anthropic.py | 4 +++- tests/integrations/anthropic/test_anthropic.py | 10 ++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index ec6e393275..5c8698c44e 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -117,8 +117,9 @@ def _sentry_patched_create(*args, **kwargs): with capture_internal_exceptions(): span.set_data("ai.model_id", model) + span.set_data("ai.streaming", False) if _should_send_default_pii() and integration.include_prompts: - span.set_data("ai.messages", messages) + span.set_data("ai.input_messages", messages) if hasattr(result, "content"): if _should_send_default_pii() and integration.include_prompts: span.set_data( @@ -183,6 +184,7 @@ def new_iterator(): span.set_data(TOTAL_TOKENS_USED, input_tokens + output_tokens) span.set_data(PROMPT_TOKENS_USED, input_tokens) span.set_data(COMPLETION_TOKENS_USED, output_tokens) + span.set_data("ai.streaming", True) span.finish() result._iterator = new_iterator() diff --git a/tests/integrations/anthropic/test_anthropic.py b/tests/integrations/anthropic/test_anthropic.py index 8c96d9019c..48085f065e 100644 --- a/tests/integrations/anthropic/test_anthropic.py +++ b/tests/integrations/anthropic/test_anthropic.py @@ -82,17 +82,18 @@ def test_nonstreaming_create_message( assert span["data"]["ai.model_id"] == "model" if send_default_pii and include_prompts: - assert span["data"]["ai.messages"] == messages + assert span["data"]["ai.input_messages"] == messages assert span["data"]["ai.responses"] == [ {"type": "text", "text": "Hi, I'm Claude."} ] else: - assert "ai.messages" not in span["data"] + assert "ai.input_messages" not in span["data"] assert "ai.responses" not in span["data"] assert span["data"][PROMPT_TOKENS_USED] == 10 assert span["data"][COMPLETION_TOKENS_USED] == 20 assert span["data"][TOTAL_TOKENS_USED] == 30 + assert span["data"]["ai.streaming"] is False @pytest.mark.parametrize( @@ -180,18 +181,19 @@ def test_streaming_create_message( assert span["data"]["ai.model_id"] == "model" if send_default_pii and include_prompts: - assert span["data"]["ai.messages"] == messages + assert span["data"]["ai.input_messages"] == messages assert span["data"]["ai.responses"] == [ {"type": "text", "text": "Hi! I'm Claude!"} ] else: - assert "ai.messages" not in span["data"] + assert "ai.input_messages" not in span["data"] assert "ai.responses" not in span["data"] assert span["data"][PROMPT_TOKENS_USED] == 10 assert span["data"][COMPLETION_TOKENS_USED] == 30 assert span["data"][TOTAL_TOKENS_USED] == 40 + assert span["data"]["ai.streaming"] is True def test_exception_message_create(sentry_init, capture_events): From f14afafd5b5c3405fd1f8eaf6af017d8ddf51e17 Mon Sep 17 00:00:00 2001 From: czyber Date: Wed, 24 Apr 2024 08:20:45 +0200 Subject: [PATCH 3/9] feat(integrations): fix typing errors --- sentry_sdk/integrations/anthropic.py | 40 +++++++++------------------- 1 file changed, 12 insertions(+), 28 deletions(-) diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index 5c8698c44e..8278c387f5 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -14,15 +14,8 @@ if TYPE_CHECKING: from typing import Any, Iterator - from anthropic.types import ( - MessageStartEvent, - MessageDeltaEvent, - MessageStopEvent, - ContentBlockStartEvent, - ContentBlockDeltaEvent, - ContentBlockStopEvent, - MessageStreamEvent, - ) + from anthropic.types import MessageStreamEvent + from sentry_sdk.tracing import Span COMPLETION_TOKENS_USED = "ai.completion_tоkens.used" PROMPT_TOKENS_USED = "ai.prompt_tоkens.used" @@ -63,6 +56,7 @@ def setup_once(): def _calculate_token_usage(result, span): + # type: (Messages, Span) -> None input_tokens = 0 output_tokens = 0 if hasattr(result, "usage"): @@ -89,7 +83,9 @@ def _sentry_patched_create(*args, **kwargs): if not hub: return f(*args, **kwargs) - integration = hub.get_integration(AnthropicIntegration) + integration = hub.get_integration( + AnthropicIntegration + ) # type: AnthropicIntegration if not integration: return f(*args, **kwargs) @@ -147,31 +143,19 @@ def new_iterator(): with capture_internal_exceptions(): for event in old_iterator: if hasattr(event, "type"): - if ( - event.type == "message_start" - ): # type: MessageStartEvent + if event.type == "message_start": usage = event.message.usage input_tokens += usage.input_tokens output_tokens += usage.output_tokens - elif ( - event.type == "content_block_start" - ): # type: ContentBlockStartEvent + elif event.type == "content_block_start": pass - elif ( - event.type == "content_block_delta" - ): # type: ContentBlockDeltaEvent + elif event.type == "content_block_delta": content_blocks.append(event.delta.text) - elif ( - event.type == "content_block_stop" - ): # type: ContentBlockStopEvent + elif event.type == "content_block_stop": pass - elif ( - event.type == "message_delta" - ): # type: MessageDeltaEvent + elif event.type == "message_delta": output_tokens += event.usage.output_tokens - elif ( - event.type == "message_stop" - ): # type: MessageStopEvent + elif event.type == "message_stop": continue yield event From ea41bda9a4703a8131b46851a536f783a53f7adf Mon Sep 17 00:00:00 2001 From: czyber Date: Thu, 25 Apr 2024 09:09:56 +0200 Subject: [PATCH 4/9] feat(integrations): fix typing errors --- sentry_sdk/integrations/anthropic.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index 8278c387f5..4d578e20ed 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -77,8 +77,10 @@ def _calculate_token_usage(result, span): def _wrap_message_create(f): + # type: (Any) -> Any @wraps(f) def _sentry_patched_create(*args, **kwargs): + # type: (*Any, **Any) -> Any hub = Hub.current if not hub: return f(*args, **kwargs) From 79595d406c4c953de73850e9b2212af19936c3e4 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 25 Apr 2024 15:06:55 +0200 Subject: [PATCH 5/9] Updated wraps import --- sentry_sdk/integrations/anthropic.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index 4d578e20ed..34dfb3cd6b 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -1,5 +1,6 @@ +from functools import wraps + from sentry_sdk.consts import OP -from sentry_sdk._functools import wraps from sentry_sdk.integrations import DidNotEnable, Integration import sentry_sdk from sentry_sdk.utils import ( From 3d54718365eeae342e8fb4bab3715a5c3211ad6f Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 25 Apr 2024 15:17:13 +0200 Subject: [PATCH 6/9] Make anthropic integration compatible with SDK 2.0 --- sentry_sdk/integrations/anthropic.py | 56 +++++++++++++--------------- 1 file changed, 25 insertions(+), 31 deletions(-) diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index 34dfb3cd6b..c0778c6761 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -1,40 +1,31 @@ from functools import wraps +import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable, Integration -import sentry_sdk +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( - package_version, - event_from_exception, capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + package_version, ) -from sentry_sdk.hub import Hub, _should_send_default_pii -from typing import TYPE_CHECKING from anthropic.resources import Messages +from typing import TYPE_CHECKING + if TYPE_CHECKING: from typing import Any, Iterator from anthropic.types import MessageStreamEvent from sentry_sdk.tracing import Span + COMPLETION_TOKENS_USED = "ai.completion_tоkens.used" PROMPT_TOKENS_USED = "ai.prompt_tоkens.used" TOTAL_TOKENS_USED = "ai.total_tоkens.used" -def _capture_exception(hub, exc): - # type: (Hub, Any) -> None - - if hub.client is not None: - event, hint = event_from_exception( - exc, - client_options=hub.client.options, - mechanism={"type": "anthropic", "handled": False}, - ) - hub.capture_event(event, hint=hint) - - class AnthropicIntegration(Integration): identifier = "anthropic" @@ -56,6 +47,16 @@ def setup_once(): Messages.create = _wrap_message_create(Messages.create) +def _capture_exception(exc): + # type: (Any) -> None + event, hint = event_from_exception( + exc, + client_options=sentry_sdk.get_client().options, + mechanism={"type": "anthropic", "handled": False}, + ) + sentry_sdk.capture_event(event, hint=hint) + + def _calculate_token_usage(result, span): # type: (Messages, Span) -> None input_tokens = 0 @@ -80,18 +81,9 @@ def _calculate_token_usage(result, span): def _wrap_message_create(f): # type: (Any) -> Any @wraps(f) + @ensure_integration_enabled(AnthropicIntegration, f) def _sentry_patched_create(*args, **kwargs): # type: (*Any, **Any) -> Any - hub = Hub.current - if not hub: - return f(*args, **kwargs) - - integration = hub.get_integration( - AnthropicIntegration - ) # type: AnthropicIntegration - if not integration: - return f(*args, **kwargs) - if "messages" not in kwargs: return f(*args, **kwargs) @@ -110,17 +102,19 @@ def _sentry_patched_create(*args, **kwargs): try: result = f(*args, **kwargs) except Exception as exc: - _capture_exception(hub, exc) + _capture_exception(exc) span.finish() raise exc from None + integration = sentry_sdk.get_client().get_integration(AnthropicIntegration) + with capture_internal_exceptions(): span.set_data("ai.model_id", model) span.set_data("ai.streaming", False) - if _should_send_default_pii() and integration.include_prompts: + if should_send_default_pii() and integration.include_prompts: span.set_data("ai.input_messages", messages) if hasattr(result, "content"): - if _should_send_default_pii() and integration.include_prompts: + if should_send_default_pii() and integration.include_prompts: span.set_data( "ai.responses", list( @@ -162,7 +156,7 @@ def new_iterator(): continue yield event - if _should_send_default_pii() and integration.include_prompts: + if should_send_default_pii() and integration.include_prompts: complete_message = "".join(content_blocks) span.set_data( "ai.responses", From 7c079b6273678e28b39df4ed970d4717d27e43a5 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 25 Apr 2024 15:50:01 +0200 Subject: [PATCH 7/9] Using __enter__ and __exit__ of the span to have it on the scope --- sentry_sdk/integrations/anthropic.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index c0778c6761..0149c3ed8c 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -98,12 +98,13 @@ def _sentry_patched_create(*args, **kwargs): span = sentry_sdk.start_span( op=OP.ANTHROPIC_MESSAGES_CREATE, description="Anthropic messages create" ) + span.__enter__() try: result = f(*args, **kwargs) except Exception as exc: _capture_exception(exc) - span.finish() + span.__exit__(None, None, None) raise exc from None integration = sentry_sdk.get_client().get_integration(AnthropicIntegration) @@ -128,7 +129,7 @@ def _sentry_patched_create(*args, **kwargs): ), ) _calculate_token_usage(result, span) - span.finish() + span.__exit__(None, None, None) elif hasattr(result, "_iterator"): old_iterator = result._iterator @@ -166,12 +167,12 @@ def new_iterator(): span.set_data(PROMPT_TOKENS_USED, input_tokens) span.set_data(COMPLETION_TOKENS_USED, output_tokens) span.set_data("ai.streaming", True) - span.finish() + span.__exit__(None, None, None) result._iterator = new_iterator() else: span.set_data("unknown_response", True) - span.finish() + span.__exit__(None, None, None) return result From c860e4372ae249ed92d8cb020383100379aec707 Mon Sep 17 00:00:00 2001 From: czyber Date: Wed, 1 May 2024 09:42:08 +0200 Subject: [PATCH 8/9] feat(integrations): resolve PR change requests This commit updates the anthropic integration to include the newest changes from the langchain integration --- sentry_sdk/integrations/anthropic.py | 35 ++++++---------- .../integrations/anthropic/test_anthropic.py | 41 ++++++++----------- 2 files changed, 31 insertions(+), 45 deletions(-) diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index 0149c3ed8c..9d43093ac4 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -1,7 +1,8 @@ from functools import wraps import sentry_sdk -from sentry_sdk.consts import OP +from sentry_sdk.ai.monitoring import record_token_usage +from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( @@ -21,11 +22,6 @@ from sentry_sdk.tracing import Span -COMPLETION_TOKENS_USED = "ai.completion_tоkens.used" -PROMPT_TOKENS_USED = "ai.prompt_tоkens.used" -TOTAL_TOKENS_USED = "ai.total_tоkens.used" - - class AnthropicIntegration(Integration): identifier = "anthropic" @@ -69,13 +65,7 @@ def _calculate_token_usage(result, span): output_tokens = usage.output_tokens total_tokens = input_tokens + output_tokens - - if total_tokens != 0: - span.set_data(TOTAL_TOKENS_USED, total_tokens) - if input_tokens != 0: - span.set_data(PROMPT_TOKENS_USED, input_tokens) - if output_tokens != 0: - span.set_data(COMPLETION_TOKENS_USED, output_tokens) + record_token_usage(span, input_tokens, output_tokens, total_tokens) def _wrap_message_create(f): @@ -110,14 +100,14 @@ def _sentry_patched_create(*args, **kwargs): integration = sentry_sdk.get_client().get_integration(AnthropicIntegration) with capture_internal_exceptions(): - span.set_data("ai.model_id", model) - span.set_data("ai.streaming", False) + span.set_data(SPANDATA.AI_MODEL_ID, model) + span.set_data(SPANDATA.AI_STREAMING, False) if should_send_default_pii() and integration.include_prompts: - span.set_data("ai.input_messages", messages) + span.set_data(SPANDATA.AI_INPUT_MESSAGES, messages) if hasattr(result, "content"): if should_send_default_pii() and integration.include_prompts: span.set_data( - "ai.responses", + SPANDATA.AI_RESPONSES, list( map( lambda message: { @@ -160,13 +150,14 @@ def new_iterator(): if should_send_default_pii() and integration.include_prompts: complete_message = "".join(content_blocks) span.set_data( - "ai.responses", + SPANDATA.AI_RESPONSES, [{"type": "text", "text": complete_message}], ) - span.set_data(TOTAL_TOKENS_USED, input_tokens + output_tokens) - span.set_data(PROMPT_TOKENS_USED, input_tokens) - span.set_data(COMPLETION_TOKENS_USED, output_tokens) - span.set_data("ai.streaming", True) + total_tokens = input_tokens + output_tokens + record_token_usage( + span, input_tokens, output_tokens, total_tokens + ) + span.set_data(SPANDATA.AI_STREAMING, True) span.__exit__(None, None, None) result._iterator = new_iterator() diff --git a/tests/integrations/anthropic/test_anthropic.py b/tests/integrations/anthropic/test_anthropic.py index 48085f065e..10424771b6 100644 --- a/tests/integrations/anthropic/test_anthropic.py +++ b/tests/integrations/anthropic/test_anthropic.py @@ -10,13 +10,8 @@ from anthropic.types.message_delta_event import MessageDeltaEvent, Delta from sentry_sdk import start_transaction -from sentry_sdk.consts import OP -from sentry_sdk.integrations.anthropic import ( - AnthropicIntegration, - TOTAL_TOKENS_USED, - PROMPT_TOKENS_USED, - COMPLETION_TOKENS_USED, -) +from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.integrations.anthropic import AnthropicIntegration EXAMPLE_MESSAGE = Message( @@ -79,20 +74,20 @@ def test_nonstreaming_create_message( assert span["op"] == OP.ANTHROPIC_MESSAGES_CREATE assert span["description"] == "Anthropic messages create" - assert span["data"]["ai.model_id"] == "model" + assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"]["ai.input_messages"] == messages - assert span["data"]["ai.responses"] == [ + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages + assert span["data"][SPANDATA.AI_RESPONSES] == [ {"type": "text", "text": "Hi, I'm Claude."} ] else: - assert "ai.input_messages" not in span["data"] - assert "ai.responses" not in span["data"] + assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] + assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["data"][PROMPT_TOKENS_USED] == 10 - assert span["data"][COMPLETION_TOKENS_USED] == 20 - assert span["data"][TOTAL_TOKENS_USED] == 30 + assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 + assert span["measurements"]["ai_completion_tokens_used"]["value"] == 20 + assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 assert span["data"]["ai.streaming"] is False @@ -178,21 +173,21 @@ def test_streaming_create_message( assert span["op"] == OP.ANTHROPIC_MESSAGES_CREATE assert span["description"] == "Anthropic messages create" - assert span["data"]["ai.model_id"] == "model" + assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"]["ai.input_messages"] == messages - assert span["data"]["ai.responses"] == [ + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages + assert span["data"][SPANDATA.AI_RESPONSES] == [ {"type": "text", "text": "Hi! I'm Claude!"} ] else: - assert "ai.input_messages" not in span["data"] - assert "ai.responses" not in span["data"] + assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] + assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["data"][PROMPT_TOKENS_USED] == 10 - assert span["data"][COMPLETION_TOKENS_USED] == 30 - assert span["data"][TOTAL_TOKENS_USED] == 40 + assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 + assert span["measurements"]["ai_completion_tokens_used"]["value"] == 30 + assert span["measurements"]["ai_total_tokens_used"]["value"] == 40 assert span["data"]["ai.streaming"] is True From 5ed220c61d32af738e2e0c03dc93397c62080dda Mon Sep 17 00:00:00 2001 From: czyber Date: Wed, 1 May 2024 09:42:38 +0200 Subject: [PATCH 9/9] feat(integrations): update anthropic version --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 889c028bc3..47651c0faf 100644 --- a/tox.ini +++ b/tox.ini @@ -30,7 +30,7 @@ envlist = {py3.8,py3.11}-aiohttp-latest # Anthropic - {py3.7,py3.11,py3.12}-anthropic-v{0.16,0.20} + {py3.7,py3.11,py3.12}-anthropic-v{0.16,0.25} {py3.7,py3.11,py3.12}-anthropic-latest # Ariadne @@ -276,7 +276,7 @@ deps = aiohttp-latest: pytest-asyncio # Anthropic - anthropic-v0.20: anthropic~=0.20.0 + anthropic-v0.25: anthropic~=0.25.0 anthropic-v0.16: anthropic~=0.16.0 anthropic-latest: anthropic