Skip to content

Commit

Permalink
Ruff: Add and fix S110 (+ merge all S1 rules)
Browse files Browse the repository at this point in the history
  • Loading branch information
kiblik committed Nov 13, 2024
1 parent 7695c12 commit 586b87c
Show file tree
Hide file tree
Showing 13 changed files with 41 additions and 70 deletions.
6 changes: 2 additions & 4 deletions dojo/benchmark/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,8 @@ def add_benchmark(queryset, product):
benchmark_product.control = requirement
requirements.append(benchmark_product)

try:
Benchmark_Product.objects.bulk_create(requirements)
except Exception:
pass
# TODO: This code might need better exception handling or data processing
Benchmark_Product.objects.bulk_create(requirements)


@user_is_authorized(Product, Permissions.Benchmark_Edit, "pid")
Expand Down
6 changes: 2 additions & 4 deletions dojo/cred/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -585,10 +585,8 @@ def new_cred_finding(request, fid):
@user_is_authorized(Cred_User, Permissions.Credential_Delete, "ttid")
def delete_cred_controller(request, destination_url, id, ttid):
cred = None
try:
cred = Cred_Mapping.objects.get(pk=ttid)
except:
pass
# TODO: This code might need better exception handling or data processing
cred = Cred_Mapping.objects.get(pk=ttid)
if request.method == "POST":
tform = CredMappingForm(request.POST, instance=cred)
message = ""
Expand Down
14 changes: 6 additions & 8 deletions dojo/finding/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,14 +171,12 @@ def create_finding_group(finds, finding_group_name):

# if user provided a name, we use that, else:
# if we have components, we may set a nice name but catch 'name already exist' exceptions
try:
if finding_group_name:
finding_group.name = finding_group_name
elif finding_group.components:
finding_group.name = finding_group.components
finding_group.save()
except:
pass
# TODO: This code might need better exception handling or data processing
if finding_group_name:
finding_group.name = finding_group_name
elif finding_group.components:
finding_group.name = finding_group.components
finding_group.save()

added = len(available_findings)
skipped = len(finds) - added
Expand Down
6 changes: 2 additions & 4 deletions dojo/middleware.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging
import re
from contextlib import suppress
from threading import local
from urllib.parse import quote

Expand Down Expand Up @@ -56,13 +57,10 @@ def __call__(self, request):

if request.user.is_authenticated:
logger.debug("Authenticated user: %s", str(request.user))
try:
with suppress(ModuleNotFoundError): # to avoid unittests to fail
uwsgi = __import__("uwsgi", globals(), locals(), ["set_logvar"], 0)
# this populates dd_user log var, so can appear in the uwsgi logs
uwsgi.set_logvar("dd_user", str(request.user))
except:
# to avoid unittests to fail
pass
path = request.path_info.lstrip("/")
from dojo.models import Dojo_User
if Dojo_User.force_password_reset(request.user) and path != "change_password":
Expand Down
7 changes: 2 additions & 5 deletions dojo/product/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -964,11 +964,8 @@ def edit_product(request, pid):

if get_system_setting("enable_github") and github_inst:
gform = GITHUB_Product_Form(request.POST, instance=github_inst)
# need to handle delete
try:
gform.save()
except:
pass
# TODO: This code might need better exception handling or data processing
gform.save()
elif get_system_setting("enable_github"):
gform = GITHUB_Product_Form(request.POST)
if gform.is_valid():
Expand Down
12 changes: 5 additions & 7 deletions dojo/templatetags/display_tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -429,13 +429,11 @@ def pic_token(context, image, size):

@register.filter
def inline_image(image_file):
try:
if img_type := mimetypes.guess_type(image_file.file.name)[0]:
if img_type.startswith("image/"):
img_data = base64.b64encode(image_file.file.read())
return f"data:{img_type};base64, {img_data.decode('utf-8')}"
except:
pass
# TODO: This code might need better exception handling or data processing
if img_type := mimetypes.guess_type(image_file.file.name)[0]:
if img_type.startswith("image/"):
img_data = base64.b64encode(image_file.file.read())
return f"data:{img_type};base64, {img_data.decode('utf-8')}"
return ""


Expand Down
10 changes: 4 additions & 6 deletions dojo/tools/gitlab_api_fuzzing/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,10 @@ def get_findings(self, file, test):
title = vulnerability["name"]
severity = self.normalise_severity(vulnerability["severity"])
description = vulnerability.get("category", "")
try:
location = vulnerability["location"]
description += "\n" + location["crash_type"]
description += "\n" + location["crash_state"]
except:
pass
# TODO: This code might need better exception handling or data processing or data processing
location = vulnerability["location"]
description += "\n" + location["crash_type"]
description += "\n" + location["crash_state"]
findings.append(
Finding(
title=title,
Expand Down
18 changes: 7 additions & 11 deletions dojo/tools/h1/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,24 +121,20 @@ def build_description(self, content):
description += f"Triaged: {triaged_date}\n"

# Try to grab CVSS
try:
cvss = content["relationships"]["severity"]["data"]["attributes"]["score"]
description += f"CVSS: {cvss}\n"
except Exception:
pass
# TODO: This code might need better exception handling or data processing
cvss = content["relationships"]["severity"]["data"]["attributes"]["score"]
description += f"CVSS: {cvss}\n"

# Build rest of description meat
description += "##Report: \n{}\n".format(
content["attributes"]["vulnerability_information"],
)

# Try to grab weakness if it's there
try:
weakness_title = content["relationships"]["weakness"]["data"]["attributes"]["name"]
weakness_desc = content["relationships"]["weakness"]["data"]["attributes"]["description"]
description += f"\n##Weakness: {weakness_title}\n{weakness_desc}"
except Exception:
pass
# TODO: This code might need better exception handling or data processing
weakness_title = content["relationships"]["weakness"]["data"]["attributes"]["name"]
weakness_desc = content["relationships"]["weakness"]["data"]["attributes"]["description"]
description += f"\n##Weakness: {weakness_title}\n{weakness_desc}"

return description

Expand Down
4 changes: 1 addition & 3 deletions dojo/tools/kiuwan/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,10 +104,8 @@ def get_findings(self, filename, test):
finding.mitigation = "Not provided!"
finding.severity = findingdict["severity"]
finding.static_finding = True
try:
if "CWE" in row:
finding.cwe = int(row["CWE"])
except Exception:
pass

if finding is not None:
if finding.title is None:
Expand Down
10 changes: 5 additions & 5 deletions dojo/tools/veracode/json_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,13 +85,13 @@ def get_items(self, tree, test):
if not finding:
continue
# Set the date of the finding from the report if it is present
try:
if finding_status := vuln.get("finding_status"):
if settings.USE_FIRST_SEEN:
finding.date = parser.parse(vuln.get("finding_status", {}).get("first_found_date", ""))
if first_found_date := finding_status.get("first_found_date"):
finding.date = parser.parse(first_found_date)
else:
finding.date = parser.parse(vuln.get("finding_status", {}).get("last_found_date", ""))
except Exception:
pass
if last_found_date := finding_status.get("last_found_date"):
finding.date = parser.parse(last_found_date)
# Generate the description
finding = self.parse_description(finding, vuln.get("description"), scan_type)
finding.nb_occurences = vuln.get("count", 1)
Expand Down
2 changes: 1 addition & 1 deletion ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ select = [
"UP",
"YTT",
"ASYNC",
"S2", "S5", "S7", "S101", "S104", "S105", "S106", "S108", "S311", "S112", "S113",
"S1", "S2", "S5", "S7", "S311",
"FBT001", "FBT003",
"A003", "A004", "A006",
"COM",
Expand Down
8 changes: 3 additions & 5 deletions tests/Import_scanner_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,11 +172,9 @@ def test_engagement_import_scan_result(self):
index = list(found_matches.keys())[0]
scan_map[test] = options_text[index]
elif len(found_matches) > 1:
try:
index = list(found_matches.values()).index(temp_test)
scan_map[test] = options_text[list(found_matches.keys())[index]]
except:
pass
# TODO: This code might need better exception handling or data processing
index = list(found_matches.values()).index(temp_test)
scan_map[test] = options_text[list(found_matches.keys())[index]]

failed_tests = []
for test in self.tests:
Expand Down
8 changes: 1 addition & 7 deletions tests/base_test_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,13 +237,7 @@ def goto_all_findings_list(self, driver):
return driver

def wait_for_datatable_if_content(self, no_content_id, wrapper_id):
no_content = None
try:
no_content = self.driver.find_element(By.ID, no_content_id)
except:
pass

if no_content is None:
if not self.is_element_by_id_present(no_content_id):
# wait for product_wrapper div as datatables javascript modifies the DOM on page load.
WebDriverWait(self.driver, 30).until(
EC.presence_of_element_located((By.ID, wrapper_id)),
Expand Down

0 comments on commit 586b87c

Please sign in to comment.