Skip to content

Commit

Permalink
Merge pull request #22 from plasma-umass/fix-21
Browse files Browse the repository at this point in the history
Fix #21, remove dependencies
  • Loading branch information
emeryberger authored Oct 3, 2023
2 parents bf91ceb + f9c3429 commit 680964f
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 110 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ version = "0.1.0"
authors = [
{ name="Emery Berger", email="[email protected]" },
]
dependencies = ["openai>=0.27.0", "openai_async>=0.0.3", "aiohttp>=3.8.3"]
dependencies = ["openai>=0.27.0"]
description = "ChatDBG."
readme = "README.md"
requires-python = ">=3.7"
Expand Down
7 changes: 3 additions & 4 deletions src/chatdbg/chatdbg_gdb.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
# Add 'source <path to chatdbg>/chatdbg_gdb.py' to ~/.gdbinit

import asyncio
import gdb
import os
import openai
import openai_async
import pathlib
import sys
import textwrap

import pathlib
import gdb
import openai

the_path = pathlib.Path(__file__).parent.resolve()

Expand Down
9 changes: 5 additions & 4 deletions src/chatdbg/chatdbg_lldb.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
#!env python3
import lldb
import asyncio
import os
import pathlib
import re
import sys

import pathlib
import lldb

the_path = pathlib.Path(__file__).parent.resolve()

Expand All @@ -14,10 +14,10 @@

sys.path.append(os.path.abspath(the_path))

import chatdbg_utils

from typing import Tuple, Union

import chatdbg_utils


def __lldb_init_module(debugger: lldb.SBDebugger, internal_dict: dict) -> None:
# Update the prompt.
Expand Down Expand Up @@ -267,4 +267,5 @@ def why(
def why_prompt(
debugger: lldb.SBDebugger, command: str, result: str, internal_dict: dict
) -> None:
"""Output the prompt that `why` would generate (for debugging purposes only)."""
why(debugger, command, result, internal_dict, really_run=False)
84 changes: 9 additions & 75 deletions src/chatdbg/chatdbg_utils.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import openai
import openai_async
import os
import sys
import textwrap

import openai


def get_model() -> str:
all_models = ["gpt-4", "gpt-3.5-turbo"]
Expand Down Expand Up @@ -69,56 +69,9 @@ def word_wrap_except_code_blocks(text: str) -> str:
return wrapped_text


def word_wrap_except_code_blocks_previous(text: str) -> str:
"""Wraps text except for code blocks.
Splits the text into paragraphs and wraps each paragraph,
except for paragraphs that are inside of code blocks denoted
by ` ``` `. Returns the updated text.
Args:
text: The text to wrap.
Returns:
The wrapped text.
"""
# Split text into paragraphs
paragraphs = text.split("\n\n")
wrapped_paragraphs = []
# Check if currently in a code block.
in_code_block = False
# Loop through each paragraph and apply appropriate wrapping.
for paragraph in paragraphs:
# If this paragraph starts and ends with a code block, add it as is.
if paragraph.startswith("```") and paragraph.endswith("```"):
wrapped_paragraphs.append(paragraph)
continue
# If this is the beginning of a code block add it as is.
if paragraph.startswith("```"):
in_code_block = True
wrapped_paragraphs.append(paragraph)
continue
# If this is the end of a code block stop skipping text.
if paragraph.endswith("```"):
in_code_block = False
wrapped_paragraphs.append(paragraph)
continue
# If we are currently in a code block add the paragraph as is.
if in_code_block:
wrapped_paragraphs.append(paragraph)
else:
# Otherwise, apply text wrapping to the paragraph.
wrapped_paragraph = textwrap.fill(paragraph)
wrapped_paragraphs.append(wrapped_paragraph)
# Join all paragraphs into a single string
wrapped_text = "\n\n".join(wrapped_paragraphs)
return wrapped_text


def read_lines_width() -> int:
return 10


def read_lines(file_path: str, start_line: int, end_line: int) -> str:
"""
Read lines from a file and return a string containing the lines between start_line and end_line.
Expand Down Expand Up @@ -165,39 +118,20 @@ async def explain(
print(user_prompt)
return

if not "OPENAI_API_KEY" in os.environ:
print(
"You need a valid OpenAI key to use ChatDBG. You can get a key here: https://openai.com/api/"
)
print("Set the environment variable OPENAI_API_KEY to your key value.")
return

model = get_model()
if not model:
return

try:
completion = await openai_async.chat_complete(
openai.api_key,
timeout=30,
payload={
"model": f"{model}",
"messages": [{"role": "user", "content": user_prompt}],
},
completion = openai.ChatCompletion.create(
model=model,
request_timeout=30,
messages=[{"role": "user", "content": user_prompt}],
)
json_payload = completion.json()
text = json_payload["choices"][0]["message"]["content"]
except (openai.error.AuthenticationError, httpx.LocalProtocolError, KeyError):
# Something went wrong.
print()
text = completion.choices[0].message.content
print(word_wrap_except_code_blocks(text))
except openai.error.AuthenticationError:
print(
"You need a valid OpenAI key to use ChatDBG. You can get a key here: https://openai.com/api/"
)
print("Set the environment variable OPENAI_API_KEY to your key value.")
import sys

sys.exit(1)
except Exception as e:
print(f"EXCEPTION {e}, {type(e)}")
pass
print(word_wrap_except_code_blocks(text))
36 changes: 10 additions & 26 deletions src/chatdbg/chatdbg_why.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
import openai
import openai_async
import os
import sys
import textwrap

import chatdbg_utils
import openai


async def why(self, arg):
def why(self, arg):
user_prompt = "Explain what the root cause of this error is, given the following source code and traceback, and generate code that fixes the error."
user_prompt += "\n"
user_prompt += "source code:\n```\n"
Expand Down Expand Up @@ -64,35 +63,20 @@ async def why(self, arg):
user_prompt += f"```\n{stack_trace}```\n"
user_prompt += f"Exception: {exception_name} ({exception_value})\n"

# print(user_prompt)
# return

import httpx

model = chatdbg_utils.get_model()
if not model:
return

text = ""
try:
completion = await openai_async.chat_complete(
openai.api_key or "",
timeout=30,
payload={
"model": model,
"messages": [{"role": "user", "content": user_prompt}],
},
completion = openai.ChatCompletion.create(
model=model,
request_timeout=30,
messages=[{"role": "user", "content": user_prompt}],
)
json_payload = completion.json()
if not "choices" in json_payload:
raise openai.error.AuthenticationError
text = json_payload["choices"][0]["message"]["content"]
except (openai.error.AuthenticationError, httpx.LocalProtocolError):
text = completion.choices[0].message.content
print(chatdbg_utils.word_wrap_except_code_blocks(text))
except openai.error.AuthenticationError:
print(
"You need a valid OpenAI key to use commentator. You can get a key here: https://openai.com/api/"
"You need a valid OpenAI key to use ChatDBG. You can get a key here: https://openai.com/api/"
)
print("Set the environment variable OPENAI_API_KEY to your key value.")
except Exception as e:
print(f"EXCEPTION {e}")
pass
print(chatdbg_utils.word_wrap_except_code_blocks(text))

0 comments on commit 680964f

Please sign in to comment.