Skip to content

Commit

Permalink
Release (#431)
Browse files Browse the repository at this point in the history
* remove logs

* remove requirements

* Bump version

* Squash

* minor

* switch to http exporter

* fix: add usage_metadata None checks to prevent NoneType errors (#429)

Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com>
Co-authored-by: [email protected] <[email protected]>

* chore: bump version to 3.3.14 (#430)

Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com>
Co-authored-by: [email protected] <[email protected]>

---------

Co-authored-by: devin-ai-integration[bot] <158243242+devin-ai-integration[bot]@users.noreply.github.com>
  • Loading branch information
karthikscale3 and devin-ai-integration[bot] authored Dec 10, 2024
1 parent 31fc432 commit 752d823
Show file tree
Hide file tree
Showing 5 changed files with 7 additions and 7 deletions.
6 changes: 3 additions & 3 deletions src/langtrace_python_sdk/instrumentation/gemini/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def set_response_attributes(
if hasattr(result, "text"):
set_event_completion(span, [{"role": "assistant", "content": result.text}])

if hasattr(result, "usage_metadata"):
if hasattr(result, "usage_metadata") and result.usage_metadata is not None:
usage = result.usage_metadata
input_tokens = usage.prompt_token_count
output_tokens = usage.candidates_token_count
Expand All @@ -152,7 +152,7 @@ def build_streaming_response(span, response):
item_to_yield = item
complete_response += str(item.text)
yield item_to_yield
if hasattr(item, "usage_metadata"):
if hasattr(item, "usage_metadata") and item.usage_metadata is not None:
usage = item.usage_metadata
input_tokens = usage.prompt_token_count
output_tokens = usage.candidates_token_count
Expand All @@ -171,7 +171,7 @@ async def abuild_streaming_response(span, response):
item_to_yield = item
complete_response += str(item.text)
yield item_to_yield
if hasattr(item, "usage_metadata"):
if hasattr(item, "usage_metadata") and item.usage_metadata is not None:
usage = item.usage_metadata
input_tokens = usage.prompt_token_count
output_tokens = usage.candidates_token_count
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def traced_method(wrapped, instance, args, kwargs):
result = wrapped(*args, **kwargs)
if trace_output:
span.set_attribute("langchain.outputs", to_json_string(result))
if hasattr(result, "usage_metadata"):
if hasattr(result, "usage_metadata") and result.usage_metadata is not None:
span.set_attribute(
SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
result.usage_metadata["input_tokens"],
Expand Down
2 changes: 1 addition & 1 deletion src/langtrace_python_sdk/instrumentation/vertexai/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def set_response_attributes(span: Span, result):
if hasattr(result, "text"):
set_event_completion(span, [{"role": "assistant", "content": result.text}])

if hasattr(result, "usage_metadata"):
if hasattr(result, "usage_metadata") and result.usage_metadata is not None:
usage = result.usage_metadata
input_tokens = usage.prompt_token_count
output_tokens = usage.candidates_token_count
Expand Down
2 changes: 1 addition & 1 deletion src/langtrace_python_sdk/utils/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -421,7 +421,7 @@ def set_usage_attributes(self, chunk):
self.completion_tokens = chunk.usage.completion_tokens

# VertexAI
if hasattr(chunk, "usage_metadata"):
if hasattr(chunk, "usage_metadata") and chunk.usage_metadata is not None:
self.completion_tokens = chunk.usage_metadata.candidates_token_count
self.prompt_tokens = chunk.usage_metadata.prompt_token_count

Expand Down
2 changes: 1 addition & 1 deletion src/langtrace_python_sdk/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "3.3.13"
__version__ = "3.3.14"

0 comments on commit 752d823

Please sign in to comment.