Skip to content

Commit

Permalink
Merge branch 'dev' into dominator-deadcode-problem-fix
Browse files Browse the repository at this point in the history
  • Loading branch information
Tiko7454 authored Jun 26, 2023
2 parents cee0c6b + 273cca4 commit 7a35d8f
Show file tree
Hide file tree
Showing 23 changed files with 841 additions and 191 deletions.
7 changes: 4 additions & 3 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ For each new detector, at least one regression tests must be present.

1. Create a folder in `tests/e2e/detectors/test_data` with the detector's argument name.
2. Create a test contract in `tests/e2e/detectors/test_data/<detector_name>/`.
3. Update `ALL_TEST` in `tests/e2e/detectors/test_detectors.py`
3. Update `ALL_TESTS` in `tests/e2e/detectors/test_detectors.py`.
4. Run `python tests/e2e/detectors/test_detectors.py --compile` to create a zip file of the compilation artifacts.
5. `pytest tests/e2e/detectors/test_detectors.py --insta update-new`. This will generate a snapshot of the detector output in `tests/e2e/detectors/snapshots/`. If updating an existing detector, run `pytest tests/e2e/detectors/test_detectors.py --insta review` and accept or reject the updates.
6. Run `pytest tests/e2e/detectors/test_detectors.py` to ensure everything worked. Then, add and commit the files to git.
Expand All @@ -97,8 +97,9 @@ For each new detector, at least one regression tests must be present.

1. Create a test in `tests/e2e/solc_parsing/`
2. Run `python tests/e2e/solc_parsing/test_ast_parsing.py --compile`. This will compile the artifact in `tests/e2e/solc_parsing/compile`. Add the compiled artifact to git.
3. Run `python tests/e2e/solc_parsing/test_ast_parsing.py --generate`. This will generate the json artifacts in `tests/e2e/solc_parsing/expected_json`. Add the generated files to git.
4. Run `pytest tests/e2e/solc_parsing/test_ast_parsing.py` and check that everything worked.
3. Update `ALL_TESTS` in `tests/e2e/solc_parsing/test_ast_parsing.py`.
4. Run `python tests/e2e/solc_parsing/test_ast_parsing.py --generate`. This will generate the json artifacts in `tests/e2e/solc_parsing/expected_json`. Add the generated files to git.
5. Run `pytest tests/e2e/solc_parsing/test_ast_parsing.py` and check that everything worked.

> ##### Helpful commands for parsing tests
>
Expand Down
212 changes: 113 additions & 99 deletions README.md

Large diffs are not rendered by default.

77 changes: 41 additions & 36 deletions slither/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
from slither.utils.output_capture import StandardOutputCapture
from slither.utils.colors import red, set_colorization_enabled
from slither.utils.command_line import (
FailOnLevel,
output_detectors,
output_results_to_markdown,
output_detectors_json,
Expand Down Expand Up @@ -206,22 +207,22 @@ def choose_detectors(
detectors_to_run = sorted(detectors_to_run, key=lambda x: x.IMPACT)
return detectors_to_run

if args.exclude_optimization and not args.fail_pedantic:
if args.exclude_optimization:
detectors_to_run = [
d for d in detectors_to_run if d.IMPACT != DetectorClassification.OPTIMIZATION
]

if args.exclude_informational and not args.fail_pedantic:
if args.exclude_informational:
detectors_to_run = [
d for d in detectors_to_run if d.IMPACT != DetectorClassification.INFORMATIONAL
]
if args.exclude_low and not args.fail_low:
if args.exclude_low:
detectors_to_run = [d for d in detectors_to_run if d.IMPACT != DetectorClassification.LOW]
if args.exclude_medium and not args.fail_medium:
if args.exclude_medium:
detectors_to_run = [
d for d in detectors_to_run if d.IMPACT != DetectorClassification.MEDIUM
]
if args.exclude_high and not args.fail_high:
if args.exclude_high:
detectors_to_run = [d for d in detectors_to_run if d.IMPACT != DetectorClassification.HIGH]
if args.detectors_to_exclude:
detectors_to_run = [
Expand Down Expand Up @@ -386,41 +387,44 @@ def parse_args(
default=defaults_flag_in_config["exclude_high"],
)

group_detector.add_argument(
fail_on_group = group_detector.add_mutually_exclusive_group()
fail_on_group.add_argument(
"--fail-pedantic",
help="Return the number of findings in the exit code",
action="store_true",
default=defaults_flag_in_config["fail_pedantic"],
help="Fail if any findings are detected",
action="store_const",
dest="fail_on",
const=FailOnLevel.PEDANTIC,
)

group_detector.add_argument(
"--no-fail-pedantic",
help="Do not return the number of findings in the exit code. Opposite of --fail-pedantic",
dest="fail_pedantic",
action="store_false",
required=False,
)

group_detector.add_argument(
fail_on_group.add_argument(
"--fail-low",
help="Fail if low or greater impact finding is detected",
action="store_true",
default=defaults_flag_in_config["fail_low"],
help="Fail if any low or greater impact findings are detected",
action="store_const",
dest="fail_on",
const=FailOnLevel.LOW,
)

group_detector.add_argument(
fail_on_group.add_argument(
"--fail-medium",
help="Fail if medium or greater impact finding is detected",
action="store_true",
default=defaults_flag_in_config["fail_medium"],
help="Fail if any medium or greater impact findings are detected",
action="store_const",
dest="fail_on",
const=FailOnLevel.MEDIUM,
)

group_detector.add_argument(
fail_on_group.add_argument(
"--fail-high",
help="Fail if high impact finding is detected",
action="store_true",
default=defaults_flag_in_config["fail_high"],
help="Fail if any high impact findings are detected",
action="store_const",
dest="fail_on",
const=FailOnLevel.HIGH,
)
fail_on_group.add_argument(
"--fail-none",
"--no-fail-pedantic",
help="Do not return the number of findings in the exit code",
action="store_const",
dest="fail_on",
const=FailOnLevel.NONE,
)
fail_on_group.set_defaults(fail_on=FailOnLevel.PEDANTIC)

group_detector.add_argument(
"--show-ignored-findings",
Expand Down Expand Up @@ -896,17 +900,18 @@ def main_impl(
stats = pstats.Stats(cp).sort_stats("cumtime")
stats.print_stats()

if args.fail_high:
fail_on = FailOnLevel(args.fail_on)
if fail_on == FailOnLevel.HIGH:
fail_on_detection = any(result["impact"] == "High" for result in results_detectors)
elif args.fail_medium:
elif fail_on == FailOnLevel.MEDIUM:
fail_on_detection = any(
result["impact"] in ["Medium", "High"] for result in results_detectors
)
elif args.fail_low:
elif fail_on == FailOnLevel.LOW:
fail_on_detection = any(
result["impact"] in ["Low", "Medium", "High"] for result in results_detectors
)
elif args.fail_pedantic:
elif fail_on == FailOnLevel.PEDANTIC:
fail_on_detection = bool(results_detectors)
else:
fail_on_detection = False
Expand Down
5 changes: 4 additions & 1 deletion slither/core/solidity_types/type_alias.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from typing import TYPE_CHECKING, Tuple
from typing import TYPE_CHECKING, Tuple, Dict

from slither.core.declarations.top_level import TopLevel
from slither.core.declarations.contract_level import ContractLevel
from slither.core.solidity_types import Type, ElementaryType

if TYPE_CHECKING:
from slither.core.declarations.function_top_level import FunctionTopLevel
from slither.core.declarations import Contract
from slither.core.scope.scope import FileScope

Expand Down Expand Up @@ -43,6 +44,8 @@ class TypeAliasTopLevel(TypeAlias, TopLevel):
def __init__(self, underlying_type: ElementaryType, name: str, scope: "FileScope") -> None:
super().__init__(underlying_type, name)
self.file_scope: "FileScope" = scope
# operators redefined
self.operators: Dict[str, "FunctionTopLevel"] = {}

def __str__(self) -> str:
return self.name
Expand Down
1 change: 1 addition & 0 deletions slither/detectors/all_detectors.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,5 +89,6 @@
from .functions.permit_domain_signature_collision import DomainSeparatorCollision
from .functions.codex import Codex
from .functions.cyclomatic_complexity import CyclomaticComplexity
from .operations.cache_array_length import CacheArrayLength
from .statements.incorrect_using_for import IncorrectUsingFor
from .operations.encode_packed import EncodePackedCollision
Loading

0 comments on commit 7a35d8f

Please sign in to comment.