From 9cc8523bd3ae9ae1403661e5ba2b8749535dd5ae Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Wed, 13 Nov 2024 17:53:54 +0100 Subject: [PATCH] Ruff: Add and fix S110 (+ merge all S1 rules) --- dojo/benchmark/views.py | 5 +---- dojo/cred/views.py | 5 +---- dojo/finding/helper.py | 26 +++++++++++++------------ dojo/middleware.py | 6 ++---- dojo/product/views.py | 4 +--- dojo/templatetags/display_tags.py | 13 ++++++------- dojo/tools/gitlab_api_fuzzing/parser.py | 11 +++++------ dojo/tools/h1/parser.py | 14 ++++--------- dojo/tools/kiuwan/parser.py | 6 +++--- dojo/tools/veracode/json_parser.py | 10 +++++----- ruff.toml | 2 +- tests/Import_scanner_test.py | 7 ++----- tests/base_test_class.py | 7 +------ 13 files changed, 46 insertions(+), 70 deletions(-) diff --git a/dojo/benchmark/views.py b/dojo/benchmark/views.py index 0d0c7174b9..611c690945 100644 --- a/dojo/benchmark/views.py +++ b/dojo/benchmark/views.py @@ -1,4 +1,3 @@ -import contextlib import logging from crum import get_current_user @@ -37,9 +36,7 @@ def add_benchmark(queryset, product): benchmark_product.product = product benchmark_product.control = requirement requirements.append(benchmark_product) - - with contextlib.suppress(Exception): - Benchmark_Product.objects.bulk_create(requirements) + Benchmark_Product.objects.bulk_create(requirements) @user_is_authorized(Product, Permissions.Benchmark_Edit, "pid") diff --git a/dojo/cred/views.py b/dojo/cred/views.py index f8f7756e34..e79dc6c6b1 100644 --- a/dojo/cred/views.py +++ b/dojo/cred/views.py @@ -1,4 +1,3 @@ -import contextlib import logging from django.contrib import messages @@ -585,9 +584,7 @@ def new_cred_finding(request, fid): @user_is_authorized(Cred_User, Permissions.Credential_Delete, "ttid") def delete_cred_controller(request, destination_url, id, ttid): - cred = None - with contextlib.suppress(Exception): - cred = Cred_Mapping.objects.get(pk=ttid) + cred = Cred_Mapping.objects.filter(pk=ttid).first() if request.method == "POST": tform = CredMappingForm(request.POST, instance=cred) message = "" diff --git a/dojo/finding/helper.py b/dojo/finding/helper.py index 4c1281d665..66badd594d 100644 --- a/dojo/finding/helper.py +++ b/dojo/finding/helper.py @@ -4,6 +4,7 @@ from django.conf import settings from django.db.models.query_utils import Q from django.db.models.signals import post_delete, pre_delete +from django.db.utils import IntegrityError from django.dispatch.dispatcher import receiver from django.utils import timezone from fieldsignals import pre_save_changed @@ -164,21 +165,22 @@ def create_finding_group(finds, finding_group_name): finding_group = Finding_Group(test=finds[0].test) finding_group.creator = get_current_user() - finding_group.name = finding_group_name + finding_group_name_dummy - finding_group.save() - available_findings = [find for find in finds if not find.finding_group_set.all()] - finding_group.findings.set(available_findings) - # if user provided a name, we use that, else: - # if we have components, we may set a nice name but catch 'name already exist' exceptions + if finding_group_name: + finding_group.name = finding_group_name + elif finding_group.components: + finding_group.name = finding_group.components try: - if finding_group_name: - finding_group.name = finding_group_name - elif finding_group.components: - finding_group.name = finding_group.components finding_group.save() - except: - pass + except IntegrityError as ie: + if "already exists" in str(ie): + finding_group.name = finding_group_name + finding_group_name_dummy + finding_group.save() + else: + raise + + available_findings = [find for find in finds if not find.finding_group_set.all()] + finding_group.findings.set(available_findings) added = len(available_findings) skipped = len(finds) - added diff --git a/dojo/middleware.py b/dojo/middleware.py index 9fcb8a51db..239a2d92f4 100644 --- a/dojo/middleware.py +++ b/dojo/middleware.py @@ -1,5 +1,6 @@ import logging import re +from contextlib import suppress from threading import local from urllib.parse import quote @@ -56,13 +57,10 @@ def __call__(self, request): if request.user.is_authenticated: logger.debug("Authenticated user: %s", str(request.user)) - try: + with suppress(ModuleNotFoundError): # to avoid unittests to fail uwsgi = __import__("uwsgi", globals(), locals(), ["set_logvar"], 0) # this populates dd_user log var, so can appear in the uwsgi logs uwsgi.set_logvar("dd_user", str(request.user)) - except: - # to avoid unittests to fail - pass path = request.path_info.lstrip("/") from dojo.models import Dojo_User if Dojo_User.force_password_reset(request.user) and path != "change_password": diff --git a/dojo/product/views.py b/dojo/product/views.py index 8c20b50627..654169363d 100644 --- a/dojo/product/views.py +++ b/dojo/product/views.py @@ -1,7 +1,6 @@ # # product import base64 import calendar as tcalendar -import contextlib import logging from collections import OrderedDict from datetime import date, datetime, timedelta @@ -958,8 +957,7 @@ def edit_product(request, pid): if get_system_setting("enable_github") and github_inst: gform = GITHUB_Product_Form(request.POST, instance=github_inst) - # need to handle delete - with contextlib.suppress(Exception): + if gform.is_valid(): gform.save() elif get_system_setting("enable_github"): gform = GITHUB_Product_Form(request.POST) diff --git a/dojo/templatetags/display_tags.py b/dojo/templatetags/display_tags.py index df0ed46f53..d58d61bb44 100644 --- a/dojo/templatetags/display_tags.py +++ b/dojo/templatetags/display_tags.py @@ -431,13 +431,12 @@ def pic_token(context, image, size): @register.filter def inline_image(image_file): - try: - if img_type := mimetypes.guess_type(image_file.file.name)[0]: - if img_type.startswith("image/"): - img_data = base64.b64encode(image_file.file.read()) - return f"data:{img_type};base64, {img_data.decode('utf-8')}" - except: - pass + # TODO: This code might need better exception handling or data processing + if img_types := mimetypes.guess_type(image_file.file.name): + img_type = img_types[0] + if img_type.startswith("image/"): + img_data = base64.b64encode(image_file.file.read()) + return f"data:{img_type};base64, {img_data.decode('utf-8')}" return "" diff --git a/dojo/tools/gitlab_api_fuzzing/parser.py b/dojo/tools/gitlab_api_fuzzing/parser.py index c536dc0020..1095d21a65 100644 --- a/dojo/tools/gitlab_api_fuzzing/parser.py +++ b/dojo/tools/gitlab_api_fuzzing/parser.py @@ -28,12 +28,11 @@ def get_findings(self, file, test): title = vulnerability["name"] severity = self.normalise_severity(vulnerability["severity"]) description = vulnerability.get("category", "") - try: - location = vulnerability["location"] - description += "\n" + location["crash_type"] - description += "\n" + location["crash_state"] - except: - pass + if location := vulnerability.get("location"): + if crash_type := location.get("crash_type"): + description += f"\n{crash_type}" + if crash_state := location.get("crash_state"): + description += f"\n{crash_state}" findings.append( Finding( title=title, diff --git a/dojo/tools/h1/parser.py b/dojo/tools/h1/parser.py index 62072f5eb2..772700f317 100644 --- a/dojo/tools/h1/parser.py +++ b/dojo/tools/h1/parser.py @@ -118,11 +118,8 @@ def build_description(self, content): description += f"Triaged: {triaged_date}\n" # Try to grab CVSS - try: - cvss = content["relationships"]["severity"]["data"]["attributes"]["score"] + if cvss := content.get("relationships", {}).get("severity", {}).get("data", {}).get("attributes", {}).get("score"): description += f"CVSS: {cvss}\n" - except Exception: - pass # Build rest of description meat description += "##Report: \n{}\n".format( @@ -130,12 +127,9 @@ def build_description(self, content): ) # Try to grab weakness if it's there - try: - weakness_title = content["relationships"]["weakness"]["data"]["attributes"]["name"] - weakness_desc = content["relationships"]["weakness"]["data"]["attributes"]["description"] - description += f"\n##Weakness: {weakness_title}\n{weakness_desc}" - except Exception: - pass + if weakness_title := content.get("relationships", {}).get("weakness", {}).get("data", {}).get("attributes", {}).get("name"): + if weakness_desc := content.get("relationships", {}).get("weakness", {}).get("data", {}).get("attributes", {}).get("description"): + description += f"\n##Weakness: {weakness_title}\n{weakness_desc}" return description diff --git a/dojo/tools/kiuwan/parser.py b/dojo/tools/kiuwan/parser.py index 34601b05aa..1caeb78c80 100644 --- a/dojo/tools/kiuwan/parser.py +++ b/dojo/tools/kiuwan/parser.py @@ -1,4 +1,3 @@ -import contextlib import csv import hashlib import io @@ -105,8 +104,9 @@ def get_findings(self, filename, test): finding.mitigation = "Not provided!" finding.severity = findingdict["severity"] finding.static_finding = True - with contextlib.suppress(Exception): - finding.cwe = int(row["CWE"]) + if cwe := row.get("CWE"): + if cwe.isdigit(): + finding.cwe = int(cwe) if finding is not None: if finding.title is None: diff --git a/dojo/tools/veracode/json_parser.py b/dojo/tools/veracode/json_parser.py index b873ada353..5a25fabb66 100644 --- a/dojo/tools/veracode/json_parser.py +++ b/dojo/tools/veracode/json_parser.py @@ -85,13 +85,13 @@ def get_items(self, tree, test): if not finding: continue # Set the date of the finding from the report if it is present - try: + if finding_status := vuln.get("finding_status"): if settings.USE_FIRST_SEEN: - finding.date = parser.parse(vuln.get("finding_status", {}).get("first_found_date", "")) + if first_found_date := finding_status.get("first_found_date"): + finding.date = parser.parse(first_found_date) else: - finding.date = parser.parse(vuln.get("finding_status", {}).get("last_found_date", "")) - except Exception: - pass + if last_found_date := finding_status.get("last_found_date"): + finding.date = parser.parse(last_found_date) # Generate the description finding = self.parse_description(finding, vuln.get("description"), scan_type) finding.nb_occurences = vuln.get("count", 1) diff --git a/ruff.toml b/ruff.toml index e9008490a5..3848783d83 100644 --- a/ruff.toml +++ b/ruff.toml @@ -41,7 +41,7 @@ select = [ "UP", "YTT", "ASYNC", - "S2", "S5", "S7", "S101", "S104", "S105", "S106", "S108", "S311", "S112", "S113", + "S1", "S2", "S5", "S7", "S311", "FBT001", "FBT003", "A003", "A004", "A005", "A006", "COM", diff --git a/tests/Import_scanner_test.py b/tests/Import_scanner_test.py index 737b48bdcf..1764e36069 100644 --- a/tests/Import_scanner_test.py +++ b/tests/Import_scanner_test.py @@ -172,11 +172,8 @@ def test_engagement_import_scan_result(self): index = list(found_matches.keys())[0] scan_map[test] = options_text[index] elif len(found_matches) > 1: - try: - index = list(found_matches.values()).index(temp_test) - scan_map[test] = options_text[list(found_matches.keys())[index]] - except: - pass + index = list(found_matches.values()).index(temp_test) + scan_map[test] = options_text[list(found_matches.keys())[index]] failed_tests = [] for test in self.tests: diff --git a/tests/base_test_class.py b/tests/base_test_class.py index c4b056503a..5ace2934b8 100644 --- a/tests/base_test_class.py +++ b/tests/base_test_class.py @@ -1,4 +1,3 @@ -import contextlib import logging import os import re @@ -238,11 +237,7 @@ def goto_all_findings_list(self, driver): return driver def wait_for_datatable_if_content(self, no_content_id, wrapper_id): - no_content = None - with contextlib.suppress(Exception): - no_content = self.driver.find_element(By.ID, no_content_id) - - if no_content is None: + if not self.is_element_by_id_present(no_content_id): # wait for product_wrapper div as datatables javascript modifies the DOM on page load. WebDriverWait(self.driver, 30).until( EC.presence_of_element_located((By.ID, wrapper_id)),