Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use new scopes API in OpenAI integration. #2853

Merged
merged 1 commit into from
Mar 20, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 26 additions & 35 deletions sentry_sdk/integrations/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,14 @@
from sentry_sdk.tracing import Span

import sentry_sdk
from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.scope import should_send_default_pii
from sentry_sdk.integrations import DidNotEnable, Integration
from sentry_sdk.utils import logger, capture_internal_exceptions, event_from_exception
from sentry_sdk.utils import (
logger,
capture_internal_exceptions,
event_from_exception,
ensure_integration_enabled,
)

try:
from openai.resources.chat.completions import Completions
Expand Down Expand Up @@ -62,16 +67,14 @@
Embeddings.create = _wrap_embeddings_create(Embeddings.create)


def _capture_exception(hub, exc):
# type: (Hub, Any) -> None

if hub.client is not None:
event, hint = event_from_exception(
exc,
client_options=hub.client.options,
mechanism={"type": "openai", "handled": False},
)
hub.capture_event(event, hint=hint)
def _capture_exception(exc):
# type: (Any) -> None
event, hint = event_from_exception(
exc,
client_options=sentry_sdk.get_client().options,
mechanism={"type": "openai", "handled": False},
)
sentry_sdk.capture_event(event, hint=hint)


def _normalize_data(data):
Expand Down Expand Up @@ -145,16 +148,9 @@
def _wrap_chat_completion_create(f):
# type: (Callable[..., Any]) -> Callable[..., Any]
@wraps(f)
@ensure_integration_enabled(OpenAIIntegration, f)
def new_chat_completion(*args, **kwargs):
# type: (*Any, **Any) -> Any
hub = Hub.current
if not hub:
return f(*args, **kwargs)

integration = hub.get_integration(OpenAIIntegration) # type: OpenAIIntegration
if not integration:
return f(*args, **kwargs)

if "messages" not in kwargs:
# invalid call (in all versions of openai), let it return error
return f(*args, **kwargs)
Expand All @@ -177,19 +173,21 @@
try:
res = f(*args, **kwargs)
except Exception as e:
_capture_exception(Hub.current, e)
_capture_exception(e)
span.__exit__(None, None, None)
raise e from None

integration = sentry_sdk.get_client().get_integration(OpenAIIntegration)

with capture_internal_exceptions():
if _should_send_default_pii() and integration.include_prompts:
if should_send_default_pii() and integration.include_prompts:
set_data_normalized(span, "ai.input_messages", messages)

set_data_normalized(span, "ai.model_id", model)
set_data_normalized(span, "ai.streaming", streaming)

if hasattr(res, "choices"):
if _should_send_default_pii() and integration.include_prompts:
if should_send_default_pii() and integration.include_prompts:
set_data_normalized(
span,
"ai.responses",
Expand Down Expand Up @@ -223,7 +221,7 @@
map(lambda chunk: "".join(chunk), data_buf)
)
if (
_should_send_default_pii()
should_send_default_pii()
and integration.include_prompts
):
set_data_normalized(span, "ai.responses", all_responses)
Expand All @@ -245,23 +243,16 @@
# type: (Callable[..., Any]) -> Callable[..., Any]

@wraps(f)
@ensure_integration_enabled(OpenAIIntegration, f)
def new_embeddings_create(*args, **kwargs):
# type: (*Any, **Any) -> Any

hub = Hub.current
if not hub:
return f(*args, **kwargs)

integration = hub.get_integration(OpenAIIntegration) # type: OpenAIIntegration
if not integration:
return f(*args, **kwargs)

with sentry_sdk.start_span(
op=consts.OP.OPENAI_EMBEDDINGS_CREATE,
description="OpenAI Embedding Creation",
) as span:
integration = sentry_sdk.get_client().get_integration(OpenAIIntegration)
if "input" in kwargs and (
_should_send_default_pii() and integration.include_prompts
should_send_default_pii() and integration.include_prompts
):
if isinstance(kwargs["input"], str):
set_data_normalized(span, "ai.input_messages", [kwargs["input"]])
Expand All @@ -276,7 +267,7 @@
try:
response = f(*args, **kwargs)
except Exception as e:
_capture_exception(Hub.current, e)
_capture_exception(e)

Check warning on line 270 in sentry_sdk/integrations/openai.py

View check run for this annotation

Codecov / codecov/patch

sentry_sdk/integrations/openai.py#L270

Added line #L270 was not covered by tests
raise e from None

prompt_tokens = 0
Expand Down
Loading