Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

logging related changes #612

Merged
merged 2 commits into from
Feb 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions llm-server/models/repository/action_repo.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def create_actions(chatbot_id: str, data: List[ActionDTO]) -> List[dict]:
return actions
except Exception as e:
session.rollback()
logger.error("An exception occurred", error=str(e))
logger.error("An exception occurred", error=e)
raise


Expand Down Expand Up @@ -71,7 +71,7 @@ def create_action(chatbot_id: str, data: ActionDTO) -> dict:
return new_action
except Exception as e:
session.rollback()
logger.error("An exception occurred", error=str(e))
logger.error("An exception occurred", error=e)
raise


Expand Down Expand Up @@ -99,7 +99,7 @@ def update_action(action_id: str, data: ActionDTO) -> Action:
return action
except Exception as e:
session.rollback()
logger.error("An exception occurred", error=str(e))
logger.error("An exception occurred", error=e)
raise


Expand Down
6 changes: 3 additions & 3 deletions llm-server/models/repository/copilot_repo.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def create_copilot(
logger.error(
"An exception occurred",
app="OPENCOPILOT",
error=str(e),
error=e,
incident="swagger",
)
raise e
Expand Down Expand Up @@ -266,7 +266,7 @@ def store_copilot_global_variables(copilot_id: str, new_variables: dict):
logger.error(
"An exception occurred",
app="OPENCOPILOT",
error=str(e),
error=e,
incident="update_global_variables",
)

Expand Down Expand Up @@ -331,7 +331,7 @@ def update_copilot(
logger.error(
"An exception occurred",
app="OPENCOPILOT",
error=str(e),
error=e,
incident="update_copilot",
)
finally:
Expand Down
4 changes: 3 additions & 1 deletion llm-server/routes/_swagger/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,9 @@ def save_swagger_paths_to_qdrant(swagger_doc: ResolvingParser, bot_id: str):
except KeyError as e:
# Handle the specific key error, log, or take necessary action
logger.error(
f"KeyError in processing document: {str(e)}", bot_id=bot_id
f"KeyError in processing document: {str(e)}",
bot_id=bot_id,
error=e,
)

point_ids = vector_store.add_documents(documents)
Expand Down
16 changes: 15 additions & 1 deletion llm-server/routes/chat/chat_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,14 @@ async def handle_chat_send_common(
if not bot_token:
return Response(response="bot token is required", status=400)

logger.error(
"chat/send",
error=Exception("Something went wrong"),
bot_token=bot_token,
x_message=message,
session_id=session_id,
)

try:
bot = find_one_or_fail_by_token(bot_token)
base_prompt = bot.prompt_message
Expand Down Expand Up @@ -229,6 +237,12 @@ async def handle_chat_send_common(
)
create_chat_histories(str(bot.id), chat_records)

logger.error(
"An exception occurred",
incident="chat/send",
bot_token=bot_token,
)

if result.error:
logger.error("chat_conversation_error", message=result.error)

Expand All @@ -237,7 +251,7 @@ async def handle_chat_send_common(
logger.error(
"An exception occurred",
incident="chat/send",
error=str(e),
error=e,
bot_token=bot_token,
)
emit(session_id, str(e))
Expand Down
2 changes: 1 addition & 1 deletion llm-server/routes/flow/utils/api_retrievers.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ async def get_relevant_documents(
incident=f"get_relevant_{collection_name}",
bot_id=bot_id,
payload=text,
error=str(e),
error=e,
)
return []

Expand Down
4 changes: 2 additions & 2 deletions llm-server/routes/flow/utils/run_openapi_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ async def run_actions(
text=text,
headers=headers,
app=app,
error=str(e),
error=e,
)

formatted_error = convert_json_error_to_text(
Expand All @@ -116,6 +116,6 @@ async def run_actions(
error_message = (
f"{str(e)}: {api_payload.endpoint}" if api_payload is not None else ""
)
logger.error("OpenAI exception", bot_id=bot_id, error=str(e))
logger.error("OpenAI exception", bot_id=bot_id, error=e)
emit(session_id, error_message) if is_streaming else None
return error_message, api_request_data
2 changes: 1 addition & 1 deletion llm-server/routes/flow/utils/run_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ async def run_flow(
"An exception occurred",
bot_id=bot_id,
payload=json.dumps(payload_data),
error=str(e),
error=e,
)

output = {"response": result if not error else "", "error": error}
Expand Down
49 changes: 22 additions & 27 deletions llm-server/utils/get_logger.py
Original file line number Diff line number Diff line change
@@ -1,49 +1,44 @@
import structlog
import os
import logging
import sentry_sdk
import os

sentry_sdk.init(
traces_sample_rate=1.0,
profiles_sample_rate=1.0,
)

dsn = os.getenv("SENTRY_DSN")
structlog.configure(
processors=[
structlog.processors.add_log_level,
structlog.processors.StackInfoRenderer(),
structlog.dev.set_exc_info,
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.dict_tracebacks,
structlog.processors.JSONRenderer(),
],
logger_factory=structlog.stdlib.LoggerFactory(),
cache_logger_on_first_use=True,
)

logging.basicConfig(level=logging.INFO, format="%(message)s")


class CustomLogger:
def __init__(self, module_name: str = __name__, level: int = logging.INFO):
self.logger = structlog.get_logger(module_name)
logging.basicConfig(level=level, format="%(message)s")
self.logger = logging.getLogger(module_name)
self.logger.setLevel(level)

def log(self, level, event, error=None, **kwargs):
with sentry_sdk.configure_scope() as scope:
exc_info = kwargs.pop("exc_info", None)
scope.set_extra("extra_info", kwargs)
# Log to Sentry if configured
if dsn is not None and error is not None:
sentry_sdk.capture_exception(error)
elif dsn is not None:
sentry_sdk.capture_message(event, level=level, scope=scope)

def log(self, level, event, **kwargs):
self.logger.log(level, event=event, **kwargs)
scope.clear()

# only enable this for prod environment
sentry_sdk.capture_message(
event, level=level, scope=None
) if dsn is not None else None
self.logger.log(level, event, exc_info=exc_info, extra=kwargs)

def info(self, event, **kwargs):
self.log(logging.INFO, event, **kwargs)
self.log(logging.INFO, event, error=None, **kwargs)

def warn(self, event, **kwargs):
self.log(logging.WARNING, event, **kwargs)
self.log(logging.WARNING, event, error=None, **kwargs)

def error(self, event, **kwargs):
self.log(logging.ERROR, event, exc_info=True, **kwargs)
def error(self, event, error=Exception("custom_error"), **kwargs):
self.log(logging.ERROR, event, error, **kwargs)

def debug(self, event, **kwargs):
self.log(logging.DEBUG, event, **kwargs)
self.log(logging.DEBUG, event, error=None, **kwargs)
2 changes: 1 addition & 1 deletion llm-server/utils/make_api_call.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ async def make_api_request(
except aiohttp.ClientError as e:
logger.error(
"API request failed",
e=str(e),
error=e,
headers=headers,
url=url,
params=path_params,
Expand Down
Loading