Skip to content

Commit

Permalink
Ruff: Add and fix S110 (+ merge all S1 rules)
Browse files Browse the repository at this point in the history
  • Loading branch information
kiblik committed Nov 19, 2024
1 parent e59c395 commit 2296e40
Show file tree
Hide file tree
Showing 13 changed files with 46 additions and 75 deletions.
6 changes: 1 addition & 5 deletions dojo/benchmark/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,7 @@ def add_benchmark(queryset, product):
benchmark_product.product = product
benchmark_product.control = requirement
requirements.append(benchmark_product)

try:
Benchmark_Product.objects.bulk_create(requirements)
except Exception:
pass
Benchmark_Product.objects.bulk_create(requirements)


@user_is_authorized(Product, Permissions.Benchmark_Edit, "pid")
Expand Down
6 changes: 1 addition & 5 deletions dojo/cred/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -584,11 +584,7 @@ def new_cred_finding(request, fid):

@user_is_authorized(Cred_User, Permissions.Credential_Delete, "ttid")
def delete_cred_controller(request, destination_url, id, ttid):
cred = None
try:
cred = Cred_Mapping.objects.get(pk=ttid)
except:
pass
cred = Cred_Mapping.objects.filter(pk=ttid).first()
if request.method == "POST":
tform = CredMappingForm(request.POST, instance=cred)
message = ""
Expand Down
26 changes: 14 additions & 12 deletions dojo/finding/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from django.conf import settings
from django.db.models.query_utils import Q
from django.db.models.signals import post_delete, pre_delete
from django.db.utils import IntegrityError
from django.dispatch.dispatcher import receiver
from django.utils import timezone
from fieldsignals import pre_save_changed
Expand Down Expand Up @@ -164,21 +165,22 @@ def create_finding_group(finds, finding_group_name):

finding_group = Finding_Group(test=finds[0].test)
finding_group.creator = get_current_user()
finding_group.name = finding_group_name + finding_group_name_dummy
finding_group.save()
available_findings = [find for find in finds if not find.finding_group_set.all()]
finding_group.findings.set(available_findings)

# if user provided a name, we use that, else:
# if we have components, we may set a nice name but catch 'name already exist' exceptions
if finding_group_name:
finding_group.name = finding_group_name
elif finding_group.components:
finding_group.name = finding_group.components
try:
if finding_group_name:
finding_group.name = finding_group_name
elif finding_group.components:
finding_group.name = finding_group.components
finding_group.save()
except:
pass
except IntegrityError as ie:
if "already exists" in str(ie):
finding_group.name = finding_group_name + finding_group_name_dummy
finding_group.save()
else:
raise

available_findings = [find for find in finds if not find.finding_group_set.all()]
finding_group.findings.set(available_findings)

added = len(available_findings)
skipped = len(finds) - added
Expand Down
6 changes: 2 additions & 4 deletions dojo/middleware.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging
import re
from contextlib import suppress
from threading import local
from urllib.parse import quote

Expand Down Expand Up @@ -56,13 +57,10 @@ def __call__(self, request):

if request.user.is_authenticated:
logger.debug("Authenticated user: %s", str(request.user))
try:
with suppress(ModuleNotFoundError): # to avoid unittests to fail
uwsgi = __import__("uwsgi", globals(), locals(), ["set_logvar"], 0)
# this populates dd_user log var, so can appear in the uwsgi logs
uwsgi.set_logvar("dd_user", str(request.user))
except:
# to avoid unittests to fail
pass
path = request.path_info.lstrip("/")
from dojo.models import Dojo_User
if Dojo_User.force_password_reset(request.user) and path != "change_password":
Expand Down
5 changes: 1 addition & 4 deletions dojo/product/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -964,11 +964,8 @@ def edit_product(request, pid):

if get_system_setting("enable_github") and github_inst:
gform = GITHUB_Product_Form(request.POST, instance=github_inst)
# need to handle delete
try:
if gform.is_valid():
gform.save()
except:
pass
elif get_system_setting("enable_github"):
gform = GITHUB_Product_Form(request.POST)
if gform.is_valid():
Expand Down
13 changes: 6 additions & 7 deletions dojo/templatetags/display_tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -429,13 +429,12 @@ def pic_token(context, image, size):

@register.filter
def inline_image(image_file):
try:
if img_type := mimetypes.guess_type(image_file.file.name)[0]:
if img_type.startswith("image/"):
img_data = base64.b64encode(image_file.file.read())
return f"data:{img_type};base64, {img_data.decode('utf-8')}"
except:
pass
# TODO: This code might need better exception handling or data processing
if img_types := mimetypes.guess_type(image_file.file.name):
img_type = img_types[0]
if img_type.startswith("image/"):
img_data = base64.b64encode(image_file.file.read())
return f"data:{img_type};base64, {img_data.decode('utf-8')}"
return ""


Expand Down
11 changes: 5 additions & 6 deletions dojo/tools/gitlab_api_fuzzing/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,11 @@ def get_findings(self, file, test):
title = vulnerability["name"]
severity = self.normalise_severity(vulnerability["severity"])
description = vulnerability.get("category", "")
try:
location = vulnerability["location"]
description += "\n" + location["crash_type"]
description += "\n" + location["crash_state"]
except:
pass
if location := vulnerability.get("location"):
if crash_type := location.get("crash_type"):
description += f"\n{crash_type}"
if crash_state := location.get("crash_state"):
description += f"\n{crash_state}"
findings.append(
Finding(
title=title,
Expand Down
14 changes: 4 additions & 10 deletions dojo/tools/h1/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,24 +121,18 @@ def build_description(self, content):
description += f"Triaged: {triaged_date}\n"

# Try to grab CVSS
try:
cvss = content["relationships"]["severity"]["data"]["attributes"]["score"]
if cvss := content.get("relationships", {}).get("severity", {}).get("data", {}).get("attributes", {}).get("score"):
description += f"CVSS: {cvss}\n"
except Exception:
pass

# Build rest of description meat
description += "##Report: \n{}\n".format(
content["attributes"]["vulnerability_information"],
)

# Try to grab weakness if it's there
try:
weakness_title = content["relationships"]["weakness"]["data"]["attributes"]["name"]
weakness_desc = content["relationships"]["weakness"]["data"]["attributes"]["description"]
description += f"\n##Weakness: {weakness_title}\n{weakness_desc}"
except Exception:
pass
if weakness_title := content.get("relationships", {}).get("weakness", {}).get("data", {}).get("attributes", {}).get("name"):
if weakness_desc := content.get("relationships", {}).get("weakness", {}).get("data", {}).get("attributes", {}).get("description"):
description += f"\n##Weakness: {weakness_title}\n{weakness_desc}"

return description

Expand Down
7 changes: 3 additions & 4 deletions dojo/tools/kiuwan/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,10 +104,9 @@ def get_findings(self, filename, test):
finding.mitigation = "Not provided!"
finding.severity = findingdict["severity"]
finding.static_finding = True
try:
finding.cwe = int(row["CWE"])
except Exception:
pass
if cwe := row.get("CWE"):
if cwe.isdigit():
finding.cwe = int(cwe)

if finding is not None:
if finding.title is None:
Expand Down
10 changes: 5 additions & 5 deletions dojo/tools/veracode/json_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,13 +85,13 @@ def get_items(self, tree, test):
if not finding:
continue
# Set the date of the finding from the report if it is present
try:
if finding_status := vuln.get("finding_status"):
if settings.USE_FIRST_SEEN:
finding.date = parser.parse(vuln.get("finding_status", {}).get("first_found_date", ""))
if first_found_date := finding_status.get("first_found_date"):
finding.date = parser.parse(first_found_date)
else:
finding.date = parser.parse(vuln.get("finding_status", {}).get("last_found_date", ""))
except Exception:
pass
if last_found_date := finding_status.get("last_found_date"):
finding.date = parser.parse(last_found_date)
# Generate the description
finding = self.parse_description(finding, vuln.get("description"), scan_type)
finding.nb_occurences = vuln.get("count", 1)
Expand Down
2 changes: 1 addition & 1 deletion ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ select = [
"UP",
"YTT",
"ASYNC",
"S2", "S5", "S7", "S101", "S104", "S105", "S106", "S108", "S311", "S112", "S113",
"S1", "S2", "S5", "S7", "S311",
"FBT001", "FBT003",
"A003", "A004", "A006",
"COM",
Expand Down
7 changes: 2 additions & 5 deletions tests/Import_scanner_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,11 +172,8 @@ def test_engagement_import_scan_result(self):
index = list(found_matches.keys())[0]
scan_map[test] = options_text[index]
elif len(found_matches) > 1:
try:
index = list(found_matches.values()).index(temp_test)
scan_map[test] = options_text[list(found_matches.keys())[index]]
except:
pass
index = list(found_matches.values()).index(temp_test)
scan_map[test] = options_text[list(found_matches.keys())[index]]

failed_tests = []
for test in self.tests:
Expand Down
8 changes: 1 addition & 7 deletions tests/base_test_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,13 +237,7 @@ def goto_all_findings_list(self, driver):
return driver

def wait_for_datatable_if_content(self, no_content_id, wrapper_id):
no_content = None
try:
no_content = self.driver.find_element(By.ID, no_content_id)
except:
pass

if no_content is None:
if not self.is_element_by_id_present(no_content_id):
# wait for product_wrapper div as datatables javascript modifies the DOM on page load.
WebDriverWait(self.driver, 30).until(
EC.presence_of_element_located((By.ID, wrapper_id)),
Expand Down

0 comments on commit 2296e40

Please sign in to comment.