diff --git a/.github/ISSUE_TEMPLATE/support_request.md b/.github/ISSUE_TEMPLATE/support_request.md index 4dc3873471..f575ea0762 100644 --- a/.github/ISSUE_TEMPLATE/support_request.md +++ b/.github/ISSUE_TEMPLATE/support_request.md @@ -7,7 +7,7 @@ assignees: '' --- **Slack us first!** -The easiest and fastest way to help you is via Slack. There's a free and easy signup to join our #defectdojo channel in the OWASP Slack workspace: [Get Access.](https://owasp-slack.herokuapp.com/) +The easiest and fastest way to help you is via Slack. There's a free and easy signup to join our #defectdojo channel in the OWASP Slack workspace: [Get Access.](https://owasp.org/slack/invite) If you're confident you've found a bug, or are allergic to Slack, you can submit an issue anyway. **Be informative** diff --git a/components/package.json b/components/package.json index f0e2d5af2c..4b74883bf0 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.39.2", + "version": "2.39.3", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/dojo/__init__.py b/dojo/__init__.py index f21e8362fa..ee1a414db8 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa: F401 -__version__ = "2.39.2" +__version__ = "2.39.3" __url__ = "https://github.com/DefectDojo/django-DefectDojo" __docs__ = "https://documentation.defectdojo.com" diff --git a/dojo/engagement/views.py b/dojo/engagement/views.py index ea73bd80c6..b813a9c275 100644 --- a/dojo/engagement/views.py +++ b/dojo/engagement/views.py @@ -17,7 +17,7 @@ from django.db import DEFAULT_DB_ALIAS from django.db.models import Count, Q from django.db.models.query import Prefetch, QuerySet -from django.http import FileResponse, HttpRequest, HttpResponse, HttpResponseRedirect, QueryDict, StreamingHttpResponse +from django.http import HttpRequest, HttpResponse, HttpResponseRedirect, QueryDict, StreamingHttpResponse from django.shortcuts import get_object_or_404, render from django.urls import Resolver404, reverse from django.utils import timezone @@ -100,6 +100,7 @@ add_success_message_to_response, async_delete, calculate_grade, + generate_file_response_from_file_path, get_cal_event, get_page_items, get_return_url, @@ -1516,7 +1517,7 @@ def upload_threatmodel(request, eid): @user_is_authorized(Engagement, Permissions.Engagement_View, "eid") def view_threatmodel(request, eid): eng = get_object_or_404(Engagement, pk=eid) - return FileResponse(open(eng.tmodel_path, "rb")) + return generate_file_response_from_file_path(eng.tmodel_path) @user_is_authorized(Engagement, Permissions.Engagement_View, "eid") diff --git a/dojo/forms.py b/dojo/forms.py index 6fe83668d1..cb1e670054 100644 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -752,6 +752,23 @@ class UploadThreatForm(forms.Form): attrs={"accept": ".jpg,.png,.pdf"}), label="Select Threat Model") + def clean(self): + if (file := self.cleaned_data.get("file", None)) is not None: + ext = os.path.splitext(file.name)[1] # [0] returns path+filename + valid_extensions = [".jpg", ".png", ".pdf"] + if ext.lower() not in valid_extensions: + if accepted_extensions := f"{', '.join(valid_extensions)}": + msg = ( + "Unsupported extension. Supported extensions are as " + f"follows: {accepted_extensions}" + ) + else: + msg = ( + "File uploads are prohibited due to the list of acceptable " + "file extensions being empty" + ) + raise ValidationError(msg) + class MergeFindings(forms.ModelForm): FINDING_ACTION = (("", "Select an Action"), ("inactive", "Inactive"), ("delete", "Delete")) diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index fb0eab686e..860f4f01d1 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -159,7 +159,13 @@ def can_be_pushed_to_jira(obj, form=None): elif isinstance(obj, Finding_Group): if not obj.findings.all(): return False, f"{to_str_typed(obj)} cannot be pushed to jira as it is empty.", "error_empty" - if "Active" not in obj.status(): + # Accommodating a strange behavior where a finding group sometimes prefers `obj.status` rather than `obj.status()` + try: + not_active = "Active" not in obj.status() + except TypeError: # TypeError: 'str' object is not callable + not_active = "Active" not in obj.status + # Determine if the finding group is not active + if not_active: return False, f"{to_str_typed(obj)} cannot be pushed to jira as it is not active.", "error_inactive" else: diff --git a/dojo/settings/.settings.dist.py.sha256sum b/dojo/settings/.settings.dist.py.sha256sum index 476d3116d4..def3909d2c 100644 --- a/dojo/settings/.settings.dist.py.sha256sum +++ b/dojo/settings/.settings.dist.py.sha256sum @@ -1 +1 @@ -42026ac47884ee26fe742e59fb7dc621b5f927ee6ee3c92daf09b97f2a740163 +002b28325f11793c5aa9f09326c2d5cc66de518cce51b2cb4cb681a920b89909 diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 1493afadd9..846639f07a 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1736,6 +1736,7 @@ def saml2_attrib_map_format(dict): "USN": "https://ubuntu.com/security/notices/", # e.g. https://ubuntu.com/security/notices/USN-6642-1 "DLA": "https://security-tracker.debian.org/tracker/", # e.g. https://security-tracker.debian.org/tracker/DLA-3917-1 "ELSA": "https://linux.oracle.com/errata/&&.html", # e.g. https://linux.oracle.com/errata/ELSA-2024-12714.html + "RXSA": "https://errata.rockylinux.org/", # e.g. https://errata.rockylinux.org/RXSA-2024:4928 } # List of acceptable file types that can be uploaded to a given object via arbitrary file upload FILE_UPLOAD_TYPES = env("DD_FILE_UPLOAD_TYPES") diff --git a/dojo/tools/osv_scanner/parser.py b/dojo/tools/osv_scanner/parser.py index 42e9408825..f91ec10f7d 100644 --- a/dojo/tools/osv_scanner/parser.py +++ b/dojo/tools/osv_scanner/parser.py @@ -30,26 +30,34 @@ def get_findings(self, file, test): except json.decoder.JSONDecodeError: return [] findings = [] - for result in data["results"]: - source_path = result["source"]["path"] - source_type = result["source"]["type"] - for package in result["packages"]: - package_name = package["package"]["name"] - package_version = package["package"]["version"] - package_ecosystem = package["package"]["ecosystem"] - for vulnerability in package["vulnerabilities"]: + for result in data.get("results", []): + # Extract source locations if present + source_path = result.get("source", {}).get("path", "") + source_type = result.get("source", {}).get("type", "") + for package in result.get("packages", []): + package_name = package.get("package", {}).get("name") + package_version = package.get("package", {}).get("version") + package_ecosystem = package.get("package", {}).get("ecosystem", "") + for vulnerability in package.get("vulnerabilities", []): vulnerabilityid = vulnerability.get("id", "") vulnerabilitysummary = vulnerability.get("summary", "") - vulnerabilitydetails = vulnerability["details"] - vulnerabilitypackagepurl = vulnerability["affected"][0].get("package", "") - if vulnerabilitypackagepurl != "": - vulnerabilitypackagepurl = vulnerabilitypackagepurl["purl"] - cwe = vulnerability["affected"][0]["database_specific"].get("cwes", None) - if cwe is not None: - cwe = cwe[0]["cweId"] + vulnerabilitydetails = vulnerability.get("details", "") + vulnerabilitypackagepurl = "" + cwe = None + # Make sure we have an affected section to work with + if (affected := vulnerability.get("affected")) is not None: + if len(affected) > 0: + # Pull the package purl if present + if (vulnerabilitypackage := affected[0].get("package", "")) != "": + vulnerabilitypackagepurl = vulnerabilitypackage.get("purl", "") + # Extract the CWE + if (cwe := affected[0].get("database_specific", {}).get("cwes", None)) is not None: + cwe = cwe[0]["cweId"] + # Create some references reference = "" for ref in vulnerability.get("references"): reference += ref.get("url") + "\n" + # Define the description description = vulnerabilitysummary + "\n" description += "**source_type**: " + source_type + "\n" description += "**package_ecosystem**: " + package_ecosystem + "\n" diff --git a/dojo/tools/redhatsatellite/parser.py b/dojo/tools/redhatsatellite/parser.py index 102f47876f..897273d8a1 100644 --- a/dojo/tools/redhatsatellite/parser.py +++ b/dojo/tools/redhatsatellite/parser.py @@ -62,7 +62,10 @@ def get_findings(self, filename, test): description += "**hosts_applicable_count:** " + str(hosts_applicable_count) + "\n" description += "**installable:** " + str(installable) + "\n" if bugs != []: - description += "**bugs:** " + str(bugs) + "\n" + description += "**bugs:** " + for bug in bugs[:-1]: + description += "[" + bug.get("bug_id") + "](" + bug.get("href") + ")" + ", " + description += "[" + bugs[-1].get("bug_id") + "](" + bugs[-1].get("href") + ")" + "\n" if module_streams != []: description += "**module_streams:** " + str(module_streams) + "\n" description += "**packages:** " + ", ".join(packages) diff --git a/dojo/tools/tenable/csv_format.py b/dojo/tools/tenable/csv_format.py index c1ea9fc2c8..2c2e013446 100644 --- a/dojo/tools/tenable/csv_format.py +++ b/dojo/tools/tenable/csv_format.py @@ -100,7 +100,7 @@ def get_findings(self, filename: str, test: Test): severity = self._convert_severity(raw_severity) # Other text fields description = row.get("Synopsis", row.get("definition.synopsis", "N/A")) - mitigation = str(row.get("Solution", row.get("definition.solution", "N/A"))) + mitigation = str(row.get("Solution", row.get("definition.solution", row.get("Steps to Remediate", "N/A")))) impact = row.get("Description", row.get("definition.description", "N/A")) references = row.get("See Also", row.get("definition.see_also", "N/A")) # Determine if the current row has already been processed diff --git a/dojo/utils.py b/dojo/utils.py index 470d860772..8bbd531210 100644 --- a/dojo/utils.py +++ b/dojo/utils.py @@ -5,6 +5,7 @@ import logging import mimetypes import os +import pathlib import re from calendar import monthrange from datetime import date, datetime, timedelta @@ -2616,14 +2617,32 @@ def generate_file_response(file_object: FileUpload) -> FileResponse: raise TypeError(msg) # Determine the path of the file on disk within the MEDIA_ROOT file_path = f"{settings.MEDIA_ROOT}/{file_object.file.url.lstrip(settings.MEDIA_URL)}" - _, file_extension = os.path.splitext(file_path) + + return generate_file_response_from_file_path( + file_path, file_name=file_object.title, file_size=file_object.file.size, + ) + + +def generate_file_response_from_file_path( + file_path: str, file_name: str | None = None, file_size: int | None = None, +) -> FileResponse: + """Serve an local file in a uniformed way.""" + # Determine the file path + file_path_without_extension, file_extension = os.path.splitext(file_path) + # Determine the file name if not supplied + if file_name is None: + file_name = file_path_without_extension.rsplit("/")[-1] + # Determine the file size if not supplied + if file_size is None: + file_size = pathlib.Path(file_path).stat().st_size # Generate the FileResponse + full_file_name = f"{file_name}{file_extension}" response = FileResponse( open(file_path, "rb"), - filename=f"{file_object.title}{file_extension}", + filename=full_file_name, content_type=f"{mimetypes.guess_type(file_path)}", ) # Add some important headers - response["Content-Disposition"] = f'attachment; filename="{file_object.title}{file_extension}"' - response["Content-Length"] = file_object.file.size + response["Content-Disposition"] = f'attachment; filename="{full_file_name}"' + response["Content-Length"] = file_size return response diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 95d4a96d0b..cf5789792f 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v2 -appVersion: "2.39.2" +appVersion: "2.39.3" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.6.155 +version: 1.6.156 icon: https://www.defectdojo.org/img/favicon.ico maintainers: - name: madchap diff --git a/unittests/scans/tenable/issue_11102.csv b/unittests/scans/tenable/issue_11102.csv new file mode 100644 index 0000000000..4c901ff864 --- /dev/null +++ b/unittests/scans/tenable/issue_11102.csv @@ -0,0 +1,61 @@ +"Plugin","Plugin Name","Family","Severity","IP Address","Protocol","Port","Exploit?","Repository","MAC Address","DNS Name","NetBIOS Name","Plugin Output","Synopsis","Description","Steps to Remediate","See Also","Risk Factor","STIG Severity","Vulnerability Priority Rating","CVSS V2 Base Score","CVSS V3 Base Score","CVSS V2 Temporal Score","CVSS V3 Temporal Score","CVSS V2 Vector","CVSS V3 Vector","CPE","CVE","BID","Cross References","First Discovered","Last Observed","Vuln Publication Date","Patch Publication Date","Plugin Publication Date","Plugin Modification Date","Exploit Ease","Exploit Frameworks","Check Type","Version","Recast Risk Comment","Accept Risk Comment","Agent ID","Host ID" +"42873","SSL Medium Strength Cipher Suites Supported (SWEET32)","General","High","1.2.3.4","TCP","443","No","Individual Scan","fa:16:3e:e6:0b:98","","","Plugin Output: + Medium Strength Ciphers (> 64-bit and < 112-bit key, or 3DES) + + Name Code KEX Auth Encryption MAC + ---------------------- ---------- --- ---- --------------------- --- + ECDHE-RSA-DES-CBC3-SHA 0xC0, 0x12 ECDH RSA 3DES-CBC(168) SHA1 + DES-CBC3-SHA 0x00, 0x0A RSA RSA 3DES-CBC(168) SHA1 + +The fields above are : + + {Tenable ciphername} + {Cipher ID code} + Kex={key exchange} + Auth={authentication} + Encrypt={symmetric encryption method} + MAC={message authentication code} + {export flag}","The remote service supports the use of medium strength SSL ciphers.","The remote host supports the use of SSL ciphers that offer medium strength encryption. Nessus regards medium strength as any encryption that uses key lengths at least 64 bits and less than 112 bits, or else that uses the 3DES encryption suite. + +Note that it is considerably easier to circumvent medium strength encryption if the attacker is on the same physical network.","Reconfigure the affected application if possible to avoid use of medium strength ciphers.","https://www.openssl.org/blog/blog/2016/08/24/sweet32/ +https://sweet32.info","Medium","","5.1","5.0","7.5","","","AV:N/AC:L/Au:N/C:P/I:N/A:N","AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N","","CVE-2016-2183","","","Feb 9, 2024 10:48:42 UTC","Oct 17, 2024 17:24:54 UTC","Aug 24, 2016 12:00:00 UTC","N/A","Nov 23, 2009 12:00:00 UTC","Feb 3, 2021 12:00:00 UTC","","","remote","1.21","","","","" +"42873","SSL Medium Strength Cipher Suites Supported (SWEET32)","General","High","2.3.4.5","TCP","443","No","Individual Scan","fa:16:3e:e6:0b:98","","","Plugin Output: + Medium Strength Ciphers (> 64-bit and < 112-bit key, or 3DES) + + Name Code KEX Auth Encryption MAC + ---------------------- ---------- --- ---- --------------------- --- + ECDHE-RSA-DES-CBC3-SHA 0xC0, 0x12 ECDH RSA 3DES-CBC(168) SHA1 + DES-CBC3-SHA 0x00, 0x0A RSA RSA 3DES-CBC(168) SHA1 + +The fields above are : + + {Tenable ciphername} + {Cipher ID code} + Kex={key exchange} + Auth={authentication} + Encrypt={symmetric encryption method} + MAC={message authentication code} + {export flag}","The remote service supports the use of medium strength SSL ciphers.","The remote host supports the use of SSL ciphers that offer medium strength encryption. Nessus regards medium strength as any encryption that uses key lengths at least 64 bits and less than 112 bits, or else that uses the 3DES encryption suite. + +Note that it is considerably easier to circumvent medium strength encryption if the attacker is on the same physical network.","Reconfigure the affected application if possible to avoid use of medium strength ciphers.","https://www.openssl.org/blog/blog/2016/08/24/sweet32/ +https://sweet32.info","Medium","","5.1","5.0","7.5","","","AV:N/AC:L/Au:N/C:P/I:N/A:N","AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N","","CVE-2016-2183","","","Feb 9, 2024 10:48:42 UTC","Oct 17, 2024 17:24:54 UTC","Aug 24, 2016 12:00:00 UTC","N/A","Nov 23, 2009 12:00:00 UTC","Feb 3, 2021 12:00:00 UTC","","","remote","1.21","","","","" +"42873","SSL Medium Strength Cipher Suites Supported (SWEET32)","General","High","1.2.3.4","TCP","8443","No","Individual Scan","fa:16:3e:e6:0b:98","","","Plugin Output: + Medium Strength Ciphers (> 64-bit and < 112-bit key, or 3DES) + + Name Code KEX Auth Encryption MAC + ---------------------- ---------- --- ---- --------------------- --- + ECDHE-RSA-DES-CBC3-SHA 0xC0, 0x12 ECDH RSA 3DES-CBC(168) SHA1 + DES-CBC3-SHA 0x00, 0x0A RSA RSA 3DES-CBC(168) SHA1 + +The fields above are : + + {Tenable ciphername} + {Cipher ID code} + Kex={key exchange} + Auth={authentication} + Encrypt={symmetric encryption method} + MAC={message authentication code} + {export flag}","The remote service supports the use of medium strength SSL ciphers.","The remote host supports the use of SSL ciphers that offer medium strength encryption. Nessus regards medium strength as any encryption that uses key lengths at least 64 bits and less than 112 bits, or else that uses the 3DES encryption suite. + +Note that it is considerably easier to circumvent medium strength encryption if the attacker is on the same physical network.","Reconfigure the affected application if possible to avoid use of medium strength ciphers.","https://www.openssl.org/blog/blog/2016/08/24/sweet32/ +https://sweet32.info","Medium","","5.1","5.0","7.5","","","AV:N/AC:L/Au:N/C:P/I:N/A:N","AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N","","CVE-2016-2183","","","Feb 9, 2024 10:48:42 UTC","Oct 17, 2024 17:24:54 UTC","Aug 24, 2016 12:00:00 UTC","N/A","Nov 23, 2009 12:00:00 UTC","Feb 3, 2021 12:00:00 UTC","","","remote","1.21","","","","" \ No newline at end of file diff --git a/unittests/tools/test_tenable_parser.py b/unittests/tools/test_tenable_parser.py index e80c3e4462..7be782c49e 100644 --- a/unittests/tools/test_tenable_parser.py +++ b/unittests/tools/test_tenable_parser.py @@ -299,3 +299,13 @@ def test_parse_issue_9612(self): endpoint.clean() self.assertEqual(2, len(findings)) self.assertEqual("Critical", findings[0].severity) + + def test_parse_issue_11102(self): + with open("unittests/scans/tenable/issue_11102.csv", encoding="utf-8") as testfile: + parser = TenableParser() + findings = parser.get_findings(testfile, self.create_test()) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + self.assertEqual(2, len(findings)) + self.assertEqual("Reconfigure the affected application if possible to avoid use of medium strength ciphers.", findings[0].mitigation)