diff --git a/README.md b/README.md index b256306a..43c3a4ba 100644 --- a/README.md +++ b/README.md @@ -139,14 +139,14 @@ langtrace.init(custom_remote_exporter=, batch=) ### Configure Langtrace -| Parameter | Type | Default Value | Description | -| -------------------------- | ----------------------------------- | ----------------------------- | ------------------------------------------------------------------------------ | -| `api_key` | `str` | `LANGTRACE_API_KEY` or `None` | The API key for authentication. | -| `batch` | `bool` | `True` | Whether to batch spans before sending them. | -| `write_spans_to_console` | `bool` | `False` | Whether to write spans to the console. | -| `custom_remote_exporter` | `Optional[Exporter]` | `None` | Custom remote exporter. If `None`, a default `LangTraceExporter` will be used. | -| `api_host` | `Optional[str]` | `https://langtrace.ai/` | The API host for the remote exporter. | -| `disable_instrumentations` | `Optional[DisableInstrumentations]` | `None` | You can pass an object to disable instrumentation for specific vendors ex: `{'only': ['openai']}` or `{'all_except': ['openai']}` +| Parameter | Type | Default Value | Description | +| -------------------------- | ----------------------------------- | ----------------------------- | --------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | `str` | `LANGTRACE_API_KEY` or `None` | The API key for authentication. | +| `batch` | `bool` | `True` | Whether to batch spans before sending them. | +| `write_spans_to_console` | `bool` | `False` | Whether to write spans to the console. | +| `custom_remote_exporter` | `Optional[Exporter]` | `None` | Custom remote exporter. If `None`, a default `LangTraceExporter` will be used. | +| `api_host` | `Optional[str]` | `https://langtrace.ai/` | The API host for the remote exporter. | +| `disable_instrumentations` | `Optional[DisableInstrumentations]` | `None` | You can pass an object to disable instrumentation for specific vendors ex: `{'only': ['openai']}` or `{'all_except': ['openai']}` | ### Additional Customization @@ -165,7 +165,30 @@ def example(): return response ``` -- `with_additional_attributes` - this function is designed to enhance the traces by adding custom attributes to the current context. These custom attributes provide extra details about the operations being performed, making it easier to analyze and understand their behavior. +- `inject_additional_attributes` - this function is designed to enhance the traces by adding custom attributes to the current context. These custom attributes provide extra details about the operations being performed, making it easier to analyze and understand their behavior. + +```python +from langtrace_python_sdk import inject_additional_attributes + + + +def do_llm_stuff(name=""): + response = client.chat.completions.create( + model="gpt-4", + messages=[{"role": "user", "content": "Say this is a test three times"}], + stream=False, + ) + return response + + +def main(): + response = inject_additional_attributes(lambda: do_llm_stuff(name="llm"), {'user.id': 'userId'}) + + # if the function do not take arguments then this syntax will work + response = inject_additional_attributes(do_llm_stuff, {'user.id': 'userId'}) +``` + +- `with_additional_attributes` - is behaving the same as `inject_additional_attributes` but as a decorator, this will be deprecated soon. ```python from langtrace_python_sdk import with_langtrace_root_span, with_additional_attributes @@ -237,7 +260,6 @@ We welcome contributions to this project. To get started, fork this repository a If you want to run any of the examples go to `run_example.py` file, you will find `ENABLED_EXAMPLES`. choose the example you want to run and just toggle the flag to `True` and run the file using `python src/run_example.py` - --- ## Security diff --git a/src/examples/langchain_example/basic.py b/src/examples/langchain_example/basic.py index e91677d8..cced1030 100644 --- a/src/examples/langchain_example/basic.py +++ b/src/examples/langchain_example/basic.py @@ -18,7 +18,6 @@ langtrace.init() -# @with_additional_attributes({"user.id": "1234", "user.feedback.rating": 1}) def api_call_1(): llm = ChatOpenAI() prompt = ChatPromptTemplate.from_messages( @@ -33,7 +32,6 @@ def api_call_1(): print(res) -# @with_additional_attributes({"user.id": "37373", "user.feedback.rating": 1}) def api_call_2(): llm = ChatOpenAI() prompt = ChatPromptTemplate.from_messages( diff --git a/src/examples/openai_example/chat_completion.py b/src/examples/openai_example/chat_completion.py index c5260d17..2bc11dcd 100644 --- a/src/examples/openai_example/chat_completion.py +++ b/src/examples/openai_example/chat_completion.py @@ -13,7 +13,6 @@ client = OpenAI() -@with_additional_attributes({"user.id": "1234", "user.feedback.rating": 1}) def api(): response = client.chat.completions.create( model="gpt-4", diff --git a/src/examples/pinecone_example/__init__.py b/src/examples/pinecone_example/__init__.py index 0b137678..73d839b1 100644 --- a/src/examples/pinecone_example/__init__.py +++ b/src/examples/pinecone_example/__init__.py @@ -1,9 +1,16 @@ -from langtrace_python_sdk import with_langtrace_root_span +from langtrace_python_sdk import ( + get_prompt_from_registry, + with_langtrace_root_span, + with_additional_attributes, + inject_additional_attributes, +) class PineconeRunner: @with_langtrace_root_span("Pinecone") def run(self): - from .basic import basic as basic_app + from .basic import basic as do_llm_stuff - basic_app() + response = inject_additional_attributes(do_llm_stuff, {"user.id": 1234}) + print(response) + return response diff --git a/src/examples/pinecone_example/basic.py b/src/examples/pinecone_example/basic.py index 621f03a8..a6c78044 100644 --- a/src/examples/pinecone_example/basic.py +++ b/src/examples/pinecone_example/basic.py @@ -7,6 +7,7 @@ from pinecone import Pinecone, ServerlessSpec from langtrace_python_sdk import ( + get_prompt_from_registry, langtrace, with_langtrace_root_span, with_additional_attributes, @@ -14,7 +15,6 @@ from langtrace_python_sdk.utils.with_root_span import SendUserFeedback _ = load_dotenv(find_dotenv()) - langtrace.init() client = OpenAI() @@ -32,10 +32,8 @@ def create_index(): ) -@with_additional_attributes({"db.embedding_model": "text-embedding-ada-002"}) @with_langtrace_root_span("Pinecone Basic") -def basic(span_id=None, trace_id=None): - +def basic(): result = client.embeddings.create( model="text-embedding-ada-002", input="Some random text string goes here", @@ -53,10 +51,7 @@ def basic(span_id=None, trace_id=None): resp = index.query( vector=embedding, top_k=1, include_values=False, namespace="test-namespace" ) - SendUserFeedback().evaluate( - {"spanId": span_id, "traceId": trace_id, "userScore": 1, "userId": "123"} - ) + # SendUserFeedback().evaluate( + # {"spanId": span_id, "traceId": trace_id, "userScore": 1, "userId": "123"} + # ) return [res, resp] - - -# create_index() diff --git a/src/langtrace_python_sdk/__init__.py b/src/langtrace_python_sdk/__init__.py index bdca4653..74f2623f 100644 --- a/src/langtrace_python_sdk/__init__.py +++ b/src/langtrace_python_sdk/__init__.py @@ -21,12 +21,16 @@ ) from langtrace_python_sdk.utils.prompt_registry import get_prompt_from_registry -from langtrace_python_sdk.utils.with_root_span import SendUserFeedback +from langtrace_python_sdk.utils.with_root_span import ( + SendUserFeedback, + inject_additional_attributes, +) __all__ = [ "langtrace", "with_langtrace_root_span", "with_additional_attributes", + "inject_additional_attributes", "get_prompt_from_registry", "SendUserFeedback", ] diff --git a/src/langtrace_python_sdk/utils/with_root_span.py b/src/langtrace_python_sdk/utils/with_root_span.py index 8dde43de..010ce0c9 100644 --- a/src/langtrace_python_sdk/utils/with_root_span.py +++ b/src/langtrace_python_sdk/utils/with_root_span.py @@ -16,6 +16,7 @@ import asyncio import os +from deprecated import deprecated from functools import wraps from typing import Optional @@ -23,9 +24,6 @@ from opentelemetry import baggage, context, trace from opentelemetry.trace import SpanKind -from langtrace_python_sdk.constants.exporter.langtrace_exporter import ( - LANGTRACE_REMOTE_URL, -) from langtrace_python_sdk.constants.instrumentation.common import ( LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY, ) @@ -87,7 +85,14 @@ async def async_wrapper(*args, **kwargs): return decorator +@deprecated(reason="Use inject_additional_attributes instead") def with_additional_attributes(attributes={}): + print( + Fore.YELLOW + + "with_additional_attributes is deprecated, use inject_additional_attributes instead" + + Fore.RESET + ) + def decorator(func): @wraps(func) def sync_wrapper(*args, **kwargs): @@ -113,6 +118,16 @@ async def async_wrapper(*args, **kwargs): return decorator +def inject_additional_attributes(fn, attributes=None): + if attributes: + new_ctx = baggage.set_baggage( + LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY, attributes + ) + context.attach(new_ctx) + + return fn() + + class SendUserFeedback: _langtrace_host: str _langtrace_api_key: str diff --git a/src/langtrace_python_sdk/version.py b/src/langtrace_python_sdk/version.py index 1fde5cdb..a2586c2c 100644 --- a/src/langtrace_python_sdk/version.py +++ b/src/langtrace_python_sdk/version.py @@ -1 +1 @@ -__version__ = "2.1.15" +__version__ = "2.1.16"