Skip to content

Commit

Permalink
Create utils.codex
Browse files Browse the repository at this point in the history
Refactor functions/codex
Minor improvements
  • Loading branch information
montyly committed Dec 7, 2022
1 parent 00d33c6 commit f62433b
Show file tree
Hide file tree
Showing 5 changed files with 187 additions and 105 deletions.
80 changes: 43 additions & 37 deletions slither/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,6 @@ def process_from_asts(
def get_detectors_and_printers() -> Tuple[
List[Type[AbstractDetector]], List[Type[AbstractPrinter]]
]:

detectors_ = [getattr(all_detectors, name) for name in dir(all_detectors)]
detectors = [d for d in detectors_ if inspect.isclass(d) and issubclass(d, AbstractDetector)]

Expand Down Expand Up @@ -286,7 +285,6 @@ def parse_filter_paths(args: argparse.Namespace) -> List[str]:
def parse_args(
detector_classes: List[Type[AbstractDetector]], printer_classes: List[Type[AbstractPrinter]]
) -> argparse.Namespace:

usage = "slither target [flag]\n"
usage += "\ntarget can be:\n"
usage += "\t- file.sol // a Solidity file\n"
Expand All @@ -301,41 +299,6 @@ def parse_args(

parser.add_argument("filename", help=argparse.SUPPRESS)

parser.add_argument(
"--codex",
help="Enable codex (require an OpenAI API Key)",
action="store_true",
default=defaults_flag_in_config["codex"],
)

parser.add_argument(
"--codex-contracts",
help="Comma separated list of contracts to submit to OpenAI Codex",
action="store",
default=defaults_flag_in_config["codex_contracts"],
)

parser.add_argument(
"--codex-model",
help="Name of the Codex model to use (affects pricing). Defaults to 'text-davinci-003'",
action="store",
default=defaults_flag_in_config["codex_model"],
)

parser.add_argument(
"--codex-temperature",
help="Temperature to use with Codex. Lower number indicates a more precise answer while higher numbers return more creative answers. Defaults to 0",
action="store",
default=defaults_flag_in_config["codex_temperature"],
)

parser.add_argument(
"--codex-max-tokens",
help="Maximum amount of tokens to use on the response. This number plus the size of the prompt can be no larger than the limit (4097 for text-davinci-003)",
action="store",
default=defaults_flag_in_config["codex_max_tokens"],
)

cryticparser.init(parser)

parser.add_argument(
Expand All @@ -351,6 +314,7 @@ def parse_args(
"Checklist (consider using https://github.com/crytic/slither-action)"
)
group_misc = parser.add_argument_group("Additional options")
group_codex = parser.add_argument_group("Codex (https://beta.openai.com/docs/guides/code)")

group_detector.add_argument(
"--detect",
Expand Down Expand Up @@ -591,6 +555,48 @@ def parse_args(
default=False,
)

group_codex.add_argument(
"--codex",
help="Enable codex (require an OpenAI API Key)",
action="store_true",
default=defaults_flag_in_config["codex"],
)

group_codex.add_argument(
"--codex-log",
help="Log codex queries (in crytic_export/codex/)",
action="store_true",
default=False,
)

group_codex.add_argument(
"--codex-contracts",
help="Comma separated list of contracts to submit to OpenAI Codex",
action="store",
default=defaults_flag_in_config["codex_contracts"],
)

group_codex.add_argument(
"--codex-model",
help="Name of the Codex model to use (affects pricing). Defaults to 'text-davinci-003'",
action="store",
default=defaults_flag_in_config["codex_model"],
)

group_codex.add_argument(
"--codex-temperature",
help="Temperature to use with Codex. Lower number indicates a more precise answer while higher numbers return more creative answers. Defaults to 0",
action="store",
default=defaults_flag_in_config["codex_temperature"],
)

group_codex.add_argument(
"--codex-max-tokens",
help="Maximum amount of tokens to use on the response. This number plus the size of the prompt can be no larger than the limit (4097 for text-davinci-003)",
action="store",
default=defaults_flag_in_config["codex_max_tokens"],
)

# debugger command
parser.add_argument("--debug", help=argparse.SUPPRESS, action="store_true", default=False)

Expand Down
149 changes: 85 additions & 64 deletions slither/detectors/functions/codex.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
import logging
import os
from typing import List
import uuid
from typing import List, Union

from slither.detectors.abstract_detector import AbstractDetector, DetectorClassification
from slither.utils.output import Output
from slither.utils import codex
from slither.utils.output import Output, SupportedOutput

logger = logging.getLogger("Slither")

VULN_FOUND = "VULN_FOUND"


class Codex(AbstractDetector):
"""
Use codex to detect vulnerability
Expand All @@ -30,86 +32,105 @@ class Codex(AbstractDetector):

WIKI_RECOMMENDATION = "Review codex's message."

def _run_codex(self, logging_file: str, prompt: str) -> str:
"""
Handle the codex logic
Args:
logging_file (str): file where to log the queries
prompt (str): prompt to send to codex
Returns:
codex answer (str)
"""
openai_module = codex.openai_module() # type: ignore
if openai_module is None:
return ""

if self.slither.codex_log:
codex.log_codex(logging_file, "Q: " + prompt)

answer = ""
res = {}
try:
res = openai_module.Completion.create(
prompt=prompt,
model=self.slither.codex_model,
temperature=self.slither.codex_temperature,
max_tokens=self.slither.codex_max_tokens,
)
except Exception as e: # pylint: disable=broad-except
logger.info("OpenAI request failed: " + str(e))

# """ OpenAI completion response shape example:
# {
# "choices": [
# {
# "finish_reason": "stop",
# "index": 0,
# "logprobs": null,
# "text": "VULNERABILITIES:. The withdraw() function does not check..."
# }
# ],
# "created": 1670357537,
# "id": "cmpl-6KYaXdA6QIisHlTMM7RCJ1nR5wTKx",
# "model": "text-davinci-003",
# "object": "text_completion",
# "usage": {
# "completion_tokens": 80,
# "prompt_tokens": 249,
# "total_tokens": 329
# }
# } """

if res:
if self.slither.codex_log:
codex.log_codex(logging_file, "A: " + str(res))
else:
codex.log_codex(logging_file, "A: Codex failed")

if res.get("choices", []) and VULN_FOUND in res["choices"][0].get("text", ""):
# remove VULN_FOUND keyword and cleanup
answer = (
res["choices"][0]["text"]
.replace(VULN_FOUND, "")
.replace("\n", "")
.replace(": ", "")
)
return answer

def _detect(self) -> List[Output]:
results: List[Output] = []

if not self.slither.codex_enabled:
return []

try:
# pylint: disable=import-outside-toplevel
import openai
except ImportError:
logging.info("OpenAI was not installed")
logging.info('run "pip install openai"')
return []

api_key = os.getenv("OPENAI_API_KEY")
if api_key is None:
logging.info(
"Please provide an Open API Key in OPENAI_API_KEY (https://beta.openai.com/account/api-keys)"
)
return []
openai.api_key = api_key
logging_file = str(uuid.uuid4())

for contract in self.compilation_unit.contracts:
if self.slither.codex_contracts != "all" and contract.name not in self.slither.codex_contracts.split(","):
if (
self.slither.codex_contracts != "all"
and contract.name not in self.slither.codex_contracts.split(",")
):
continue
prompt = "Analyze this Solidity contract and find the vulnerabilities. If you find any vulnerabilities, begin the response with {}".format(VULN_FOUND)
prompt = f"Analyze this Solidity contract and find the vulnerabilities. If you find any vulnerabilities, begin the response with {VULN_FOUND}\n"
src_mapping = contract.source_mapping
content = contract.compilation_unit.core.source_code[src_mapping.filename.absolute]
start = src_mapping.start
end = src_mapping.start + src_mapping.length
prompt += content[start:end]
logging.info("Querying OpenAI")
print("Querying OpenAI")
answer = ""
res = {}
try:
res = openai.Completion.create( # type: ignore
prompt=prompt,
model=self.slither.codex_model,
temperature=self.slither.codex_temperature,
max_tokens=self.slither.codex_max_tokens,
)
except Exception as e:
print("OpenAI request failed: " + str(e))
logging.info("OpenAI request failed: " + str(e))

""" OpenAI completion response shape example:
{
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": null,
"text": "VULNERABILITIES:. The withdraw() function does not check..."
}
],
"created": 1670357537,
"id": "cmpl-6KYaXdA6QIisHlTMM7RCJ1nR5wTKx",
"model": "text-davinci-003",
"object": "text_completion",
"usage": {
"completion_tokens": 80,
"prompt_tokens": 249,
"total_tokens": 329
}
} """

if len(res.get("choices", [])) and VULN_FOUND in res["choices"][0].get("text", ""):
# remove VULN_FOUND keyword and cleanup
answer = res["choices"][0]["text"].replace(VULN_FOUND, "").replace("\n", "").replace(": ", "")

if len(answer):
info = [

answer = self._run_codex(logging_file, prompt)

if answer:
info: List[Union[str, SupportedOutput]] = [
"Codex detected a potential bug in ",
contract,
"\n",
answer,
"\n",
]

res = self.generate_result(info)
results.append(res)
new_result = self.generate_result(info)
results.append(new_result)
return results
9 changes: 5 additions & 4 deletions slither/slither.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,10 +85,11 @@ def __init__(self, target: Union[str, CryticCompile], **kwargs):

# Indicate if Codex related features should be used
self.codex_enabled = kwargs.get("codex", False)
self.codex_contracts = kwargs.get("codex_contracts")
self.codex_model = kwargs.get("codex_model")
self.codex_temperature = kwargs.get("codex_temperature")
self.codex_max_tokens = kwargs.get("codex_max_tokens")
self.codex_contracts = kwargs.get("codex_contracts", "all")
self.codex_model = kwargs.get("codex_model", "text-davinci-003")
self.codex_temperature = kwargs.get("codex_temperature", 0)
self.codex_max_tokens = kwargs.get("codex_max_tokens", 300)
self.codex_log = kwargs.get("codex_log", False)

self._parsers: List[SlitherCompilationUnitSolc] = []
try:
Expand Down
53 changes: 53 additions & 0 deletions slither/utils/codex.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import logging
import os
from pathlib import Path

logger = logging.getLogger("Slither")


# TODO: investigate how to set the correct return type
# So that the other modules can work with openai
def openai_module(): # type: ignore
"""
Return the openai module
Consider checking the usage of open (slither.codex_enabled) before using this function
Returns:
Optional[the openai module]
"""
try:
# pylint: disable=import-outside-toplevel
import openai

api_key = os.getenv("OPENAI_API_KEY")
if api_key is None:
logger.info(
"Please provide an Open API Key in OPENAI_API_KEY (https://beta.openai.com/account/api-keys)"
)
return None
openai.api_key = api_key
except ImportError:
logger.info("OpenAI was not installed") # type: ignore
logger.info('run "pip install openai"')
return None
return openai


def log_codex(filename: str, prompt: str) -> None:
"""
Log the prompt in crytic/export/codex/filename
Append to the file
Args:
filename: filename to write to
prompt: prompt to write
Returns:
None
"""

Path("crytic_export/codex").mkdir(parents=True, exist_ok=True)

with open(Path("crytic_export/codex", filename), "a", encoding="utf8") as file:
file.write(prompt)
file.write("\n")
1 change: 1 addition & 0 deletions slither/utils/command_line.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
"codex_model": "text-davinci-003",
"codex_temperature": 0,
"codex_max_tokens": 300,
"codex_log": False,
"detectors_to_run": "all",
"printers_to_run": None,
"detectors_to_exclude": None,
Expand Down

0 comments on commit f62433b

Please sign in to comment.