From 733dd8e0115b90a604b0af93a54fadf22de85271 Mon Sep 17 00:00:00 2001 From: devloop Date: Sun, 10 Nov 2024 22:53:43 +0100 Subject: [PATCH] Remove the need for the asyncio stop Event on attacks, refactor attack module to a single function that can be cancelled on ctrl+c, use a finally block to persist attacked_ids in case of interruption --- tests/attack/test_mod_backup.py | 5 +- tests/attack/test_mod_buster.py | 4 +- tests/attack/test_mod_cms.py | 45 +++--- tests/attack/test_mod_cookieflags.py | 3 +- tests/attack/test_mod_crlf.py | 3 +- tests/attack/test_mod_csrf.py | 3 +- tests/attack/test_mod_exec.py | 8 +- tests/attack/test_mod_file.py | 12 +- tests/attack/test_mod_htaccess.py | 3 +- tests/attack/test_mod_htp.py | 21 ++- tests/attack/test_mod_https_redirect.py | 12 +- tests/attack/test_mod_ldap.py | 9 +- tests/attack/test_mod_log4shell.py | 27 ++-- tests/attack/test_mod_methods.py | 7 +- tests/attack/test_mod_network_device.py | 45 +++--- tests/attack/test_mod_nikto.py | 5 +- tests/attack/test_mod_permanentxss.py | 5 +- tests/attack/test_mod_redirect.py | 7 +- tests/attack/test_mod_shellshock.py | 3 +- tests/attack/test_mod_spring4shell.py | 7 +- tests/attack/test_mod_sql.py | 13 +- tests/attack/test_mod_ssl.py | 5 +- tests/attack/test_mod_ssrf.py | 5 +- tests/attack/test_mod_takeover.py | 3 +- tests/attack/test_mod_timesql.py | 9 +- tests/attack/test_mod_upload.py | 3 +- tests/attack/test_mod_wapp.py | 53 ++++--- tests/attack/test_mod_wp_enum.py | 11 +- tests/attack/test_mod_xss_advanced.py | 45 +++--- tests/attack/test_mod_xss_basics.py | 3 +- tests/attack/test_mod_xxe.py | 15 +- tests/cli/test_options.py | 31 ++-- wapitiCore/attack/attack.py | 3 - wapitiCore/attack/cms/cms_common.py | 21 +-- wapitiCore/attack/cms/mod_wp_enum.py | 6 - wapitiCore/attack/mod_backup.py | 6 +- wapitiCore/attack/mod_brute_login_form.py | 4 +- wapitiCore/attack/mod_buster.py | 15 +- wapitiCore/attack/mod_cms.py | 12 +- wapitiCore/attack/mod_crlf.py | 4 +- wapitiCore/attack/mod_csrf.py | 4 +- wapitiCore/attack/mod_exec.py | 4 +- wapitiCore/attack/mod_file.py | 4 +- wapitiCore/attack/mod_htp.py | 25 +--- wapitiCore/attack/mod_ldap.py | 4 +- wapitiCore/attack/mod_log4shell.py | 4 +- wapitiCore/attack/mod_network_device.py | 3 +- wapitiCore/attack/mod_nikto.py | 11 +- wapitiCore/attack/mod_redirect.py | 4 +- wapitiCore/attack/mod_shellshock.py | 4 +- wapitiCore/attack/mod_sql.py | 4 +- wapitiCore/attack/mod_ssl.py | 4 +- wapitiCore/attack/mod_ssrf.py | 4 +- wapitiCore/attack/mod_takeover.py | 7 +- wapitiCore/attack/mod_timesql.py | 4 +- wapitiCore/attack/mod_wapp.py | 4 +- wapitiCore/attack/mod_wp_enum.py | 6 - wapitiCore/attack/mod_xss.py | 4 +- wapitiCore/attack/mod_xxe.py | 4 +- wapitiCore/controller/wapiti.py | 173 ++++++++++++---------- wapitiCore/main/wapiti.py | 20 +-- 61 files changed, 353 insertions(+), 449 deletions(-) diff --git a/tests/attack/test_mod_backup.py b/tests/attack/test_mod_backup.py index c86b3abc9..be4701441 100644 --- a/tests/attack/test_mod_backup.py +++ b/tests/attack/test_mod_backup.py @@ -1,4 +1,3 @@ -from asyncio import Event from unittest.mock import AsyncMock import httpx @@ -35,7 +34,7 @@ async def test_whole_stuff(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleBackup(crawler, persister, options, Event(), crawler_configuration) + module = ModuleBackup(crawler, persister, options, crawler_configuration) module.do_get = True await module.attack(request, response) @@ -66,6 +65,6 @@ async def test_false_positive(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleBackup(crawler, persister, options, Event(), crawler_configuration) + module = ModuleBackup(crawler, persister, options, crawler_configuration) module.do_get = True assert not await module.must_attack(request, response) diff --git a/tests/attack/test_mod_buster.py b/tests/attack/test_mod_buster.py index 3dc8c6f0d..3cf98bb9d 100644 --- a/tests/attack/test_mod_buster.py +++ b/tests/attack/test_mod_buster.py @@ -1,6 +1,6 @@ from unittest import mock from unittest.mock import AsyncMock -from asyncio import Event, sleep +from asyncio import sleep import httpx import respx @@ -41,7 +41,7 @@ async def test_whole_stuff(): "wordlist.txt": "nawak\nadmin\nconfig.inc\nauthconfig.php", } with mock.patch("builtins.open", get_mock_open(files)): - module = ModuleBuster(crawler, persister, options, Event(), crawler_configuration) + module = ModuleBuster(crawler, persister, options, crawler_configuration) module.DATA_DIR = "" module.PATHS_FILE = "wordlist.txt" module.do_get = True diff --git a/tests/attack/test_mod_cms.py b/tests/attack/test_mod_cms.py index 06c833678..370a9f88c 100644 --- a/tests/attack/test_mod_cms.py +++ b/tests/attack/test_mod_cms.py @@ -1,7 +1,6 @@ import os import sys from os.path import join as path_join -from asyncio import Event from unittest.mock import AsyncMock import httpx @@ -41,7 +40,7 @@ async def test_no_drupal(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -79,7 +78,7 @@ async def test_drupal_version_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -129,7 +128,7 @@ async def test_drupal_multi_versions_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -175,7 +174,7 @@ async def test_drupal_version_not_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -209,7 +208,7 @@ async def test_no_joomla(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -248,7 +247,7 @@ async def test_joomla_version_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -298,7 +297,7 @@ async def test_joomla_multi_versions_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -345,7 +344,7 @@ async def test_joomla_version_not_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -379,7 +378,7 @@ async def test_no_prestashop(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -420,7 +419,7 @@ async def test_prestashop_version_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -470,7 +469,7 @@ async def test_prestashop_multi_versions_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -515,7 +514,7 @@ async def test_prestashop_version_not_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -558,7 +557,7 @@ async def test_spip_version_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -609,7 +608,7 @@ async def test_spip_multi_versions_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -655,7 +654,7 @@ async def test_spip_version_not_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -698,7 +697,7 @@ async def test_wp_version_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -750,7 +749,7 @@ async def test_wp_multi_versions_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -797,7 +796,7 @@ async def test_wp_no_version_detected(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -892,7 +891,7 @@ async def test_wp_false_positive_403(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -1004,7 +1003,7 @@ async def test_wp_false_positive_success(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -1094,7 +1093,7 @@ async def test_wp_plugin(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -1181,7 +1180,7 @@ async def test_wp_theme(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleCms(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCms(crawler, persister, options, crawler_configuration) await module.attack(request) diff --git a/tests/attack/test_mod_cookieflags.py b/tests/attack/test_mod_cookieflags.py index 1808185ab..b5dee2e4d 100644 --- a/tests/attack/test_mod_cookieflags.py +++ b/tests/attack/test_mod_cookieflags.py @@ -1,4 +1,3 @@ -import asyncio import re from unittest.mock import AsyncMock @@ -35,7 +34,7 @@ async def test_cookieflags(): await crawler.async_send(request) # Put cookies in our crawler object options = {"timeout": 10, "level": 2} - module = ModuleCookieflags(crawler, persister, options, asyncio.Event(), crawler_configuration) + module = ModuleCookieflags(crawler, persister, options, crawler_configuration) await module.attack(request) cookie_flags = [] diff --git a/tests/attack/test_mod_crlf.py b/tests/attack/test_mod_crlf.py index 05a7a2687..b78f52bb4 100644 --- a/tests/attack/test_mod_crlf.py +++ b/tests/attack/test_mod_crlf.py @@ -1,4 +1,3 @@ -from asyncio import Event from unittest.mock import AsyncMock import respx @@ -29,7 +28,7 @@ async def test_whole_stuff(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleCrlf(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCrlf(crawler, persister, options, crawler_configuration) module.do_get = True await module.attack(request) diff --git a/tests/attack/test_mod_csrf.py b/tests/attack/test_mod_csrf.py index 603c0b364..c14f36c09 100644 --- a/tests/attack/test_mod_csrf.py +++ b/tests/attack/test_mod_csrf.py @@ -2,7 +2,6 @@ import os import sys from time import sleep -from asyncio import Event from unittest.mock import AsyncMock import httpx @@ -90,7 +89,7 @@ async def test_csrf_cases(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 1} - module = ModuleCsrf(crawler, persister, options, Event(), crawler_configuration) + module = ModuleCsrf(crawler, persister, options, crawler_configuration) module.do_post = True for request, response in all_requests: if await module.must_attack(request, response): diff --git a/tests/attack/test_mod_exec.py b/tests/attack/test_mod_exec.py index 99d0a3e03..c8c3a28a0 100644 --- a/tests/attack/test_mod_exec.py +++ b/tests/attack/test_mod_exec.py @@ -2,7 +2,7 @@ import os import sys from time import sleep -from asyncio import Event, sleep as Sleep +from asyncio import sleep as Sleep from unittest.mock import AsyncMock import pytest @@ -57,7 +57,7 @@ async def test_whole_stuff(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleExec(crawler, persister, options, Event(), crawler_configuration) + module = ModuleExec(crawler, persister, options, crawler_configuration) module.do_post = True for request in all_requests: await module.attack(request) @@ -103,7 +103,7 @@ async def test_detection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 1} - module = ModuleExec(crawler, persister, options, Event(), crawler_configuration) + module = ModuleExec(crawler, persister, options, crawler_configuration) module.do_post = True for request in all_requests: await module.attack(request) @@ -137,7 +137,7 @@ def timeout_callback(http_request): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 1, "level": 1} - module = ModuleExec(crawler, persister, options, Event(), crawler_configuration) + module = ModuleExec(crawler, persister, options, crawler_configuration) module.do_post = False payloads_until_sleep = 0 diff --git a/tests/attack/test_mod_file.py b/tests/attack/test_mod_file.py index a2efbf7f3..e47b96d0c 100644 --- a/tests/attack/test_mod_file.py +++ b/tests/attack/test_mod_file.py @@ -2,7 +2,7 @@ import os import sys from time import sleep -from asyncio import Event, sleep as Sleep +from asyncio import sleep as Sleep from unittest.mock import AsyncMock import httpx @@ -36,7 +36,7 @@ async def test_inclusion_detection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleFile(crawler, persister, options, Event(), crawler_configuration) + module = ModuleFile(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -56,7 +56,7 @@ async def test_open_redirect(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleFile(crawler, persister, options, Event(), crawler_configuration) + module = ModuleFile(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -74,7 +74,7 @@ async def test_loknop_lfi_to_rce(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleFile(crawler, persister, options, Event(), crawler_configuration) + module = ModuleFile(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -101,7 +101,7 @@ async def test_warning_false_positive(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleFile(crawler, persister, options, Event(), crawler_configuration) + module = ModuleFile(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -130,7 +130,7 @@ async def test_no_crash(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleFile(crawler, persister, options, Event(), crawler_configuration) + module = ModuleFile(crawler, persister, options, crawler_configuration) module.do_post = False for request in all_requests: await module.attack(request) diff --git a/tests/attack/test_mod_htaccess.py b/tests/attack/test_mod_htaccess.py index 80ecb25cc..7452e0e3f 100644 --- a/tests/attack/test_mod_htaccess.py +++ b/tests/attack/test_mod_htaccess.py @@ -1,4 +1,3 @@ -from asyncio import Event from unittest.mock import AsyncMock import httpx @@ -46,7 +45,7 @@ async def test_whole_stuff(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleHtaccess(crawler, persister, options, Event(), crawler_configuration) + module = ModuleHtaccess(crawler, persister, options, crawler_configuration) module.do_get = True for request, response in all_requests: if await module.must_attack(request, response): diff --git a/tests/attack/test_mod_htp.py b/tests/attack/test_mod_htp.py index 16dcee1be..417df409a 100644 --- a/tests/attack/test_mod_htp.py +++ b/tests/attack/test_mod_htp.py @@ -1,7 +1,6 @@ import asyncio import json import os -from asyncio import Event from unittest.mock import patch, PropertyMock, AsyncMock import httpx @@ -28,7 +27,7 @@ async def test_must_attack(): crawler_configuration = CrawlerConfiguration(Request("http://perdu.com/")) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module_htp = ModuleHtp(crawler, persister, options, Event(), crawler_configuration) + module_htp = ModuleHtp(crawler, persister, options, crawler_configuration) assert await module_htp.must_attack(Request("http://perdu.com", method="POST")) is False assert await module_htp.must_attack(Request("http://perdu.com", method="GET")) is True @@ -59,7 +58,7 @@ async def test_analyze_file_detection(): crawler_configuration = CrawlerConfiguration(Request("http://perdu.com/")) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module_htp = ModuleHtp(crawler, persister, options, Event(), crawler_configuration) + module_htp = ModuleHtp(crawler, persister, options, crawler_configuration) found_technology = await module_htp._analyze_file(Request("http://perdu.com/")) @@ -89,7 +88,7 @@ async def test_analyze_file_no_detection(): crawler_configuration = CrawlerConfiguration(Request("http://perdu.com/")) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module_htp = ModuleHtp(crawler, persister, options, Event(), crawler_configuration) + module_htp = ModuleHtp(crawler, persister, options, crawler_configuration) assert await module_htp._analyze_file(Request("http://perdu.com")) is None @@ -112,7 +111,7 @@ async def test_analyze_file_none_content(): crawler_configuration = CrawlerConfiguration(Request("http://perdu.com/")) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module_htp = ModuleHtp(crawler, persister, options, Event(), crawler_configuration) + module_htp = ModuleHtp(crawler, persister, options, crawler_configuration) assert await module_htp._analyze_file(Request("http://perdu.com")) is None @@ -135,7 +134,7 @@ async def test_analyze_file_request_error(): crawler_configuration = CrawlerConfiguration(Request("http://perdu.com/")) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module_htp = ModuleHtp(crawler, persister, options, Event(), crawler_configuration) + module_htp = ModuleHtp(crawler, persister, options, crawler_configuration) found_technology = await module_htp._analyze_file(Request("http://perdu.com")) @@ -166,7 +165,7 @@ async def test_finish_no_technologies(): crawler_configuration = CrawlerConfiguration(Request("http://perdu.com/")) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module_htp = ModuleHtp(crawler, persister, options, Event(), crawler_configuration) + module_htp = ModuleHtp(crawler, persister, options, crawler_configuration) await module_htp.finish() @@ -202,7 +201,7 @@ async def test_finish_one_range(): crawler_configuration = CrawlerConfiguration(Request("http://perdu.com/")) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module_htp = ModuleHtp(crawler, persister, options, Event(), crawler_configuration) + module_htp = ModuleHtp(crawler, persister, options, crawler_configuration) module_htp._root_url = "http://perdu.com/" module_htp.tech_versions[techno] = [["1.2", "1.2.1", "1.3"]] @@ -248,7 +247,7 @@ async def test_finish_two_ranges(): crawler_configuration = CrawlerConfiguration(Request("http://perdu.com/")) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module_htp = ModuleHtp(crawler, persister, options, Event(), crawler_configuration) + module_htp = ModuleHtp(crawler, persister, options, crawler_configuration) module_htp._root_url = "http://perdu.com/" module_htp.tech_versions[techno] = [["1.2", "1.2.1", "1.3"], ["1.3", "1.4"], ["1.5", "1.5"], ["1.0", "1.2"]] @@ -305,7 +304,7 @@ async def test_root_attack_root_url(): crawler_configuration = CrawlerConfiguration(Request("http://perdu.com/")) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module_htp = ModuleHtp(crawler, persister, options, Event(), crawler_configuration) + module_htp = ModuleHtp(crawler, persister, options, crawler_configuration) module_htp._root_url = target_url target_request = Request(target_url) mock_analyze_file.return_value = None @@ -356,7 +355,7 @@ async def test_attack(): crawler_configuration = CrawlerConfiguration(Request(target_url)) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module_htp = ModuleHtp(crawler, persister, options, Event(), crawler_configuration) + module_htp = ModuleHtp(crawler, persister, options, crawler_configuration) module_htp._root_url = target_url target_request = Request(target_url + "index.html") options = {"timeout": 10, "level": 2, "tasks": 20} diff --git a/tests/attack/test_mod_https_redirect.py b/tests/attack/test_mod_https_redirect.py index cfcd7bcf4..f6408cba9 100644 --- a/tests/attack/test_mod_https_redirect.py +++ b/tests/attack/test_mod_https_redirect.py @@ -1,4 +1,3 @@ -from asyncio import Event from unittest.mock import AsyncMock import pytest @@ -10,6 +9,7 @@ from wapitiCore.net.crawler import AsyncCrawler from wapitiCore.attack.mod_https_redirect import ModuleHttpsRedirect + @pytest.mark.asyncio @respx.mock async def test_no_redirect(): @@ -34,7 +34,7 @@ async def test_no_redirect(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleHttpsRedirect(crawler, persister, options, Event(), crawler_configuration) + module = ModuleHttpsRedirect(crawler, persister, options, crawler_configuration) module.do_post = True for request in all_requests: if not module.finished: @@ -76,7 +76,7 @@ async def test_redirect_http(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleHttpsRedirect(crawler, persister, options, Event(), crawler_configuration) + module = ModuleHttpsRedirect(crawler, persister, options, crawler_configuration) module.do_post = True for request in all_requests: if not module.finished: @@ -112,7 +112,7 @@ async def test_error_response(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleHttpsRedirect(crawler, persister, options, Event(), crawler_configuration) + module = ModuleHttpsRedirect(crawler, persister, options, crawler_configuration) module.do_post = True for request in all_requests: if not module.finished: @@ -146,7 +146,7 @@ async def test_http_url_provided(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleHttpsRedirect(crawler, persister, options, Event(), crawler_configuration) + module = ModuleHttpsRedirect(crawler, persister, options, crawler_configuration) module.do_post = True for request in all_requests: if not module.finished: @@ -180,7 +180,7 @@ async def test_specific_port_provided(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleHttpsRedirect(crawler, persister, options, Event(), crawler_configuration) + module = ModuleHttpsRedirect(crawler, persister, options, crawler_configuration) module.do_post = True for request in all_requests: if not module.finished: diff --git a/tests/attack/test_mod_ldap.py b/tests/attack/test_mod_ldap.py index dcc9e2d17..33a3247e2 100644 --- a/tests/attack/test_mod_ldap.py +++ b/tests/attack/test_mod_ldap.py @@ -1,4 +1,3 @@ -from asyncio import Event from fnmatch import fnmatch from hashlib import md5 from unittest.mock import AsyncMock @@ -77,7 +76,7 @@ async def test_whole_stuff(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleLdap(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLdap(crawler, persister, options, crawler_configuration) module.do_post = True for request in all_requests: await module.attack(request) @@ -113,7 +112,7 @@ async def test_random_responses(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleLdap(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLdap(crawler, persister, options, crawler_configuration) for request in all_requests: await module.attack(request) @@ -162,7 +161,7 @@ async def test_vulnerabilities(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleLdap(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLdap(crawler, persister, options, crawler_configuration) for request in all_requests: await module.attack(request) @@ -203,7 +202,7 @@ async def test_is_page_dynamic(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleLdap(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLdap(crawler, persister, options, crawler_configuration) assert not await module.is_page_dynamic( Request("http://perdu.com/"), PayloadInfo("", "", True), diff --git a/tests/attack/test_mod_log4shell.py b/tests/attack/test_mod_log4shell.py index 9556d43a4..f6830e60e 100644 --- a/tests/attack/test_mod_log4shell.py +++ b/tests/attack/test_mod_log4shell.py @@ -1,7 +1,6 @@ import asyncio import os import random -from asyncio import Event from unittest import mock from unittest.mock import patch, AsyncMock from httpx import Response as HttpxResponse @@ -40,7 +39,7 @@ async def test_read_headers(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "dns_endpoint": "8.8.8.8"} - module = ModuleLog4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLog4Shell(crawler, persister, options, crawler_configuration) module.DATA_DIR = "" with mock.patch("builtins.open", get_mock_open(files)) as mock_open_headers: @@ -75,7 +74,7 @@ async def test_get_batch_malicious_headers(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleLog4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLog4Shell(crawler, persister, options, crawler_configuration) headers = random.sample(range(0, 100), 100) malicious_headers, headers_uuid_record = module._get_batch_malicious_headers(headers) @@ -109,7 +108,7 @@ def __init__(self, response: bool) -> None: async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "dns_endpoint": "dns.google"} - module = ModuleLog4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLog4Shell(crawler, persister, options, crawler_configuration) with mock.patch.object(Resolver, "resolve", return_value=(MockAnswer(True),)): assert await module._verify_dns("foobar") is True @@ -139,7 +138,7 @@ async def mock_verify_dns(_header_uuid: str): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleLog4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLog4Shell(crawler, persister, options, crawler_configuration) module._verify_dns = mock_verify_dns @@ -187,7 +186,7 @@ async def mock_verify_dns(_header_uuid: str): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleLog4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLog4Shell(crawler, persister, options, crawler_configuration) module._verify_dns = mock_verify_dns @@ -217,7 +216,7 @@ async def test_must_attack(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleLog4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLog4Shell(crawler, persister, options, crawler_configuration) module.finished = False @@ -255,7 +254,7 @@ async def test_attack(): with mock.patch("builtins.open", get_mock_open(files)) as mock_open_headers, \ patch.object(ModuleLog4Shell, "_verify_dns", return_value=future_verify_dns) as mock_verify_dns: - module = ModuleLog4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLog4Shell(crawler, persister, options, crawler_configuration) module.DATA_DIR = "" module.HEADERS_FILE = "headers.txt" @@ -283,20 +282,20 @@ def test_init(): # When the dns_endpoint is valid options = {"timeout": 10, "level": 2, "dns_endpoint": "whatever.use.mock"} with patch("socket.gethostbyname", autospec=True) as mock_gethostbyname: - module = ModuleLog4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLog4Shell(crawler, persister, options, crawler_configuration) assert mock_gethostbyname.assert_called_once assert not module.finished # When the dns_endpoint is not valid options = {"timeout": 10, "level": 2, "dns_endpoint": "256.512.1024.2048"} - module = ModuleLog4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLog4Shell(crawler, persister, options, crawler_configuration) assert module.finished # When the dns_endpoint is None options = {"timeout": 10, "level": 2, "dns_endpoint": None} - module = ModuleLog4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLog4Shell(crawler, persister, options, crawler_configuration) assert module.finished @@ -324,7 +323,7 @@ async def test_attack_apache_struts(): "_verify_url_vulnerability", return_value=future_url_vulnerability ) as mock_verify_url: - module = ModuleLog4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLog4Shell(crawler, persister, options, crawler_configuration) await module._attack_apache_struts("http://perdu.com/") @@ -355,7 +354,7 @@ async def test_attack_apache_druid(): "_verify_url_vulnerability", return_value=future_url_vulnerability ) as mock_verify_url: - module = ModuleLog4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLog4Shell(crawler, persister, options, crawler_configuration) await module._attack_apache_druid_url("http://perdu.com/") @@ -386,7 +385,7 @@ async def test_attack_unifi(): "_verify_url_vulnerability", return_value=future_url_vulnerability ) as mock_verify_url: - module = ModuleLog4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleLog4Shell(crawler, persister, options, crawler_configuration) await module._attack_unifi_url("http://perdu.com/") diff --git a/tests/attack/test_mod_methods.py b/tests/attack/test_mod_methods.py index 1309381bb..a3d82d883 100644 --- a/tests/attack/test_mod_methods.py +++ b/tests/attack/test_mod_methods.py @@ -1,4 +1,3 @@ -from asyncio import Event from unittest.mock import AsyncMock import httpx @@ -81,7 +80,7 @@ async def test_trivial(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleMethods(crawler, persister, options, Event(), crawler_configuration) + module = ModuleMethods(crawler, persister, options, crawler_configuration) module.do_get = True for request, response in all_requests: await module.attack(request, response) @@ -156,7 +155,7 @@ async def test_advanced(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleMethods(crawler, persister, options, Event(), crawler_configuration) + module = ModuleMethods(crawler, persister, options, crawler_configuration) module.do_get = True await module.attack(request, response) @@ -208,7 +207,7 @@ async def test_blind_with_trace(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleMethods(crawler, persister, options, Event(), crawler_configuration) + module = ModuleMethods(crawler, persister, options, crawler_configuration) module.do_get = True await module.attack(request, response) diff --git a/tests/attack/test_mod_network_device.py b/tests/attack/test_mod_network_device.py index fa8b887c6..633697aec 100644 --- a/tests/attack/test_mod_network_device.py +++ b/tests/attack/test_mod_network_device.py @@ -1,5 +1,4 @@ import json -from asyncio import Event from unittest.mock import AsyncMock import httpx @@ -39,7 +38,7 @@ async def test_no_net_device(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -76,7 +75,7 @@ async def test_ubika_without_version(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -128,7 +127,7 @@ async def test_ubika_with_version(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -171,7 +170,7 @@ async def test_detect_fortimanager(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -213,7 +212,7 @@ async def test_detect_ssl_vpn(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -253,7 +252,7 @@ async def test_fortinet_false_positive(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -289,7 +288,7 @@ async def test_detect_fortinet(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -329,7 +328,7 @@ async def test_detect_fortiportal_from_title(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -370,7 +369,7 @@ async def test_detect_fortimail(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -409,7 +408,7 @@ async def test_detect_fortimanager(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -449,7 +448,7 @@ async def test_detect_fortianalyzer(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -485,7 +484,7 @@ async def test_raise_on_request_error(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleForti(crawler, persister, options, Event(), crawler_configuration) + module = ModuleForti(crawler, persister, options, crawler_configuration) with pytest.raises(RequestError) as exc_info: await module.check_forti("http://perdu.com/") @@ -531,7 +530,7 @@ async def test_detect_harbor_with_version(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -579,7 +578,7 @@ async def test_detect_harbor_without_version(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -620,7 +619,7 @@ async def test_detect_harbor_with_json_error(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -654,7 +653,7 @@ async def test_detect_harbor_raise_on_request_error(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleHarbor(crawler, persister, options, Event(), crawler_configuration) + module = ModuleHarbor(crawler, persister, options, crawler_configuration) with pytest.raises(RequestError) as exc_info: await module.check_harbor("http://perdu.com/") @@ -692,7 +691,7 @@ async def test_detect_citrix_from_title(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -732,7 +731,7 @@ async def test_detect_citrix_from_class(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -765,7 +764,7 @@ async def test_detect_citrix_in_root_url(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -805,7 +804,7 @@ async def test_checkpoint_without_version(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -845,7 +844,7 @@ async def test_checkpoint_based_on_realmsArrJSON(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -888,7 +887,7 @@ async def test_checkpoint_with_version(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNetworkDevice(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNetworkDevice(crawler, persister, options, crawler_configuration) await module.attack(request) diff --git a/tests/attack/test_mod_nikto.py b/tests/attack/test_mod_nikto.py index ea83ad788..5276545bd 100644 --- a/tests/attack/test_mod_nikto.py +++ b/tests/attack/test_mod_nikto.py @@ -1,5 +1,4 @@ import os -from asyncio import Event from itertools import chain from unittest.mock import AsyncMock @@ -40,7 +39,7 @@ async def test_whole_stuff(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNikto(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNikto(crawler, persister, options, crawler_configuration) module.do_get = True await module.attack(request) @@ -92,7 +91,7 @@ async def test_false_positives(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "tasks": 20} - module = ModuleNikto(crawler, persister, options, Event(), crawler_configuration) + module = ModuleNikto(crawler, persister, options, crawler_configuration) module.do_get = True module.NIKTO_DB = "temp_nikto_db" await module.attack(request) diff --git a/tests/attack/test_mod_permanentxss.py b/tests/attack/test_mod_permanentxss.py index 34a02b228..62f764c3a 100644 --- a/tests/attack/test_mod_permanentxss.py +++ b/tests/attack/test_mod_permanentxss.py @@ -1,4 +1,3 @@ -from asyncio import Event from unittest.mock import AsyncMock import httpx @@ -57,7 +56,7 @@ async def test_second_order_injection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModulePermanentxss(crawler, persister, options, Event(), crawler_configuration) + module = ModulePermanentxss(crawler, persister, options, crawler_configuration) module.do_post = False module.tried_xss["iamgroot"] = ( comment_request, @@ -95,7 +94,7 @@ async def test_first_order_injection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModulePermanentxss(crawler, persister, options, Event(), crawler_configuration) + module = ModulePermanentxss(crawler, persister, options, crawler_configuration) module.do_post = False module.tried_xss["iamgroot"] = ( comment_request, diff --git a/tests/attack/test_mod_redirect.py b/tests/attack/test_mod_redirect.py index 3a55f984b..36f04dd32 100644 --- a/tests/attack/test_mod_redirect.py +++ b/tests/attack/test_mod_redirect.py @@ -2,7 +2,6 @@ import os import sys from time import sleep -from asyncio import Event from unittest.mock import AsyncMock import pytest @@ -34,7 +33,7 @@ async def test_redirect_detection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleRedirect(crawler, persister, options, Event(), crawler_configuration) + module = ModuleRedirect(crawler, persister, options, crawler_configuration) await module.attack(request) assert persister.add_payload.call_args_list[0][1]["module"] == "redirect" @@ -53,7 +52,7 @@ async def test_redirect_detection_no_url(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleRedirect(crawler, persister, options, Event(), crawler_configuration) + module = ModuleRedirect(crawler, persister, options, crawler_configuration) await module.attack(request) assert persister.add_payload.call_args_list[0][1]["module"] == "redirect" @@ -92,7 +91,7 @@ async def test_whole_stuff(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleRedirect(crawler, persister, options, Event(), crawler_configuration) + module = ModuleRedirect(crawler, persister, options, crawler_configuration) module.do_post = True for request in all_requests: await module.attack(request) diff --git a/tests/attack/test_mod_shellshock.py b/tests/attack/test_mod_shellshock.py index 1d2d2ea32..5d2c74758 100644 --- a/tests/attack/test_mod_shellshock.py +++ b/tests/attack/test_mod_shellshock.py @@ -1,6 +1,5 @@ import re from binascii import unhexlify -from asyncio import Event from unittest.mock import AsyncMock import httpx @@ -44,7 +43,7 @@ async def test_whole_stuff(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleShellshock(crawler, persister, options, Event(), crawler_configuration) + module = ModuleShellshock(crawler, persister, options, crawler_configuration) module.do_get = True for request in all_requests: await module.attack(request) diff --git a/tests/attack/test_mod_spring4shell.py b/tests/attack/test_mod_spring4shell.py index 8531f0dd7..f25c86fb0 100644 --- a/tests/attack/test_mod_spring4shell.py +++ b/tests/attack/test_mod_spring4shell.py @@ -1,6 +1,5 @@ import asyncio import os -from asyncio import Event from unittest.mock import AsyncMock from httpx import Response as HttpxResponse @@ -33,7 +32,7 @@ async def test_detect_spring4shell(): crawler_configuration = CrawlerConfiguration(request) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: - module = ModuleSpring4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleSpring4Shell(crawler, persister, options, crawler_configuration) assert await module._check_spring4shell("GET", request, payload) == False @@ -64,7 +63,7 @@ async def test_detect_spring4shell_get(): crawler_configuration = CrawlerConfiguration(request) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: - module = ModuleSpring4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleSpring4Shell(crawler, persister, options, crawler_configuration) assert await module._check_spring4shell("GET", request, payload) == True assert await module._check_spring4shell("POST", request, payload) == False @@ -88,7 +87,7 @@ async def test_no_spring4shell(): crawler_configuration = CrawlerConfiguration(request) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: - module = ModuleSpring4Shell(crawler, persister, options, Event(), crawler_configuration) + module = ModuleSpring4Shell(crawler, persister, options, crawler_configuration) assert await module._check_spring4shell("GET", request, payload) == False assert await module._check_spring4shell("POST", request, payload) == False \ No newline at end of file diff --git a/tests/attack/test_mod_sql.py b/tests/attack/test_mod_sql.py index beb029072..0fc56adb3 100644 --- a/tests/attack/test_mod_sql.py +++ b/tests/attack/test_mod_sql.py @@ -1,7 +1,6 @@ from urllib.parse import urlparse, parse_qs from tempfile import NamedTemporaryFile import sqlite3 -from asyncio import Event from unittest.mock import AsyncMock import httpx @@ -44,7 +43,7 @@ async def test_whole_stuff(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleSql(crawler, persister, options, Event(), crawler_configuration) + module = ModuleSql(crawler, persister, options, crawler_configuration) module.do_post = True for request in all_requests: await module.attack(request) @@ -66,7 +65,7 @@ async def test_false_positive(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 1} - module = ModuleSql(crawler, persister, options, Event(), crawler_configuration) + module = ModuleSql(crawler, persister, options, crawler_configuration) module.do_post = True await module.attack(request) @@ -97,7 +96,7 @@ async def test_true_positive(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 1} - module = ModuleSql(crawler, persister, options, Event(), crawler_configuration) + module = ModuleSql(crawler, persister, options, crawler_configuration) module.do_post = True await module.attack(request) @@ -154,7 +153,7 @@ def process(http_request): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 1} - module = ModuleSql(crawler, persister, options, Event(), crawler_configuration) + module = ModuleSql(crawler, persister, options, crawler_configuration) module.do_post = True await module.attack(request) @@ -177,7 +176,7 @@ async def test_negative_blind(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 1} - module = ModuleSql(crawler, persister, options, Event(), crawler_configuration) + module = ModuleSql(crawler, persister, options, crawler_configuration) await module.attack(request) assert not persister.add_payload.call_count @@ -236,7 +235,7 @@ def process(http_request): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 1} - module = ModuleSql(crawler, persister, options, Event(), crawler_configuration) + module = ModuleSql(crawler, persister, options, crawler_configuration) module.do_post = True await module.attack(request) diff --git a/tests/attack/test_mod_ssl.py b/tests/attack/test_mod_ssl.py index 205868784..409d5e925 100644 --- a/tests/attack/test_mod_ssl.py +++ b/tests/attack/test_mod_ssl.py @@ -2,7 +2,6 @@ import os import sys from time import sleep -from asyncio import Event import http.server import ssl from unittest.mock import AsyncMock @@ -61,7 +60,7 @@ async def test_ssl_scanner(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleSsl(crawler, persister, options, Event(), crawler_configuration) + module = ModuleSsl(crawler, persister, options, crawler_configuration) await module.attack(request) persister.add_payload.assert_any_call( @@ -192,7 +191,7 @@ async def test_certificate_transparency(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleSsl(crawler, persister, options, Event(), crawler_configuration) + module = ModuleSsl(crawler, persister, options, crawler_configuration) assert 1 == await module.check_certificate_transparency(cert) diff --git a/tests/attack/test_mod_ssrf.py b/tests/attack/test_mod_ssrf.py index 4dc6bf3ad..ca8d484fc 100644 --- a/tests/attack/test_mod_ssrf.py +++ b/tests/attack/test_mod_ssrf.py @@ -1,4 +1,3 @@ -from asyncio import Event from unittest.mock import AsyncMock import httpx @@ -49,7 +48,7 @@ def get_path_by_id(request_id): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleSsrf(crawler, persister, options, Event(), crawler_configuration) + module = ModuleSsrf(crawler, persister, options, crawler_configuration) module.do_post = True respx.get("https://wapiti3.ovh/get_ssrf.php?session_id=" + module._session_id).mock( @@ -103,7 +102,7 @@ async def test_query_string_injection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleSsrf(crawler, persister, options, Event(), crawler_configuration) + module = ModuleSsrf(crawler, persister, options, crawler_configuration) module._session_id = "yolo" mutated_request, parameter, payload_info = next(module.mutator.mutate(request, module.get_payloads)) # Make sure get_payloads will correctly inject the session ID and hex-encoded parameter name in such case diff --git a/tests/attack/test_mod_takeover.py b/tests/attack/test_mod_takeover.py index 2cf5eb2b4..b4bee29a0 100644 --- a/tests/attack/test_mod_takeover.py +++ b/tests/attack/test_mod_takeover.py @@ -1,4 +1,3 @@ -from asyncio import Event from unittest.mock import AsyncMock import httpx @@ -63,7 +62,7 @@ async def resolve(qname, rdtype, raise_on_no_answer: bool = False): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleTakeover(crawler, persister, options, Event(), crawler_configuration) + module = ModuleTakeover(crawler, persister, options, crawler_configuration) for request in all_requests: await module.attack(request) diff --git a/tests/attack/test_mod_timesql.py b/tests/attack/test_mod_timesql.py index ca0d95aef..2d94a649e 100644 --- a/tests/attack/test_mod_timesql.py +++ b/tests/attack/test_mod_timesql.py @@ -2,7 +2,6 @@ import os import sys from time import sleep -from asyncio import Event from unittest.mock import AsyncMock import pytest @@ -39,7 +38,7 @@ async def test_timesql_detection(): # and in the module we have ceil(attack_options.get("timeout", self.time_to_sleep)) + 1 options = {"timeout": 0, "level": 1} - module = ModuleTimesql(crawler, persister, options, Event(), crawler_configuration) + module = ModuleTimesql(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -63,7 +62,7 @@ async def test_timesql_false_positive(): # and in the module we have ceil(attack_options.get("timeout", self.time_to_sleep)) + 1 options = {"timeout": 0, "level": 1} - module = ModuleTimesql(crawler, persister, options, Event(), crawler_configuration) + module = ModuleTimesql(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -83,7 +82,7 @@ async def test_false_positive_request_count(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 1, "level": 1} - module = ModuleTimesql(crawler, persister, options, Event(), crawler_configuration) + module = ModuleTimesql(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -108,7 +107,7 @@ async def test_true_positive_request_count(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 1, "level": 1} - module = ModuleTimesql(crawler, persister, options, Event(), crawler_configuration) + module = ModuleTimesql(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) diff --git a/tests/attack/test_mod_upload.py b/tests/attack/test_mod_upload.py index 13d711d90..9b4581b42 100644 --- a/tests/attack/test_mod_upload.py +++ b/tests/attack/test_mod_upload.py @@ -1,5 +1,4 @@ import logging -from asyncio import Event from unittest.mock import AsyncMock, patch import re @@ -87,7 +86,7 @@ async def test_extension_blacklist(): # Also make sure we respect the exclusion list options = {"timeout": 10, "level": 2, "excluded_urls": ["http://*logout*"]} - module = ModuleUpload(crawler, persister, options, Event(), crawler_configuration) + module = ModuleUpload(crawler, persister, options, crawler_configuration) await module.attack(request) assert persister.add_payload.call_count diff --git a/tests/attack/test_mod_wapp.py b/tests/attack/test_mod_wapp.py index 6cd55e1c2..d93442df3 100644 --- a/tests/attack/test_mod_wapp.py +++ b/tests/attack/test_mod_wapp.py @@ -2,7 +2,6 @@ import lzma import os import tempfile -from asyncio import Event from pathlib import Path from unittest.mock import AsyncMock, patch, mock_open, ANY @@ -46,7 +45,7 @@ async def test_false_positive(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -78,7 +77,7 @@ async def test_url_detection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -140,7 +139,7 @@ async def test_html_detection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -185,7 +184,7 @@ async def test_dom_detection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.attack(request) assert persister.add_payload.call_count @@ -231,7 +230,7 @@ async def test_script_detection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -269,7 +268,7 @@ async def test_cookies_detection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -307,7 +306,7 @@ async def test_cookies_whatever_value_detection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -345,7 +344,7 @@ async def test_headers_detection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -384,7 +383,7 @@ async def test_meta_detection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -490,7 +489,7 @@ async def test_multi_detection(): # To prevent any issue the config directory is a newly created temporary directory persister.CONFIG_DIR = temp_dir - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -582,7 +581,7 @@ async def test_implies_detection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -624,7 +623,7 @@ async def test_vulnerabilities(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -680,7 +679,7 @@ async def test_merge_with_and_without_redirection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -732,7 +731,7 @@ async def test_raise_on_invalid_json(): crawler_configuration = CrawlerConfiguration(Request("http://perdu.com/")) async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "wapp_url": "http://perdu.com"} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) with pytest.raises(ValueError) as exc_info: await module._dump_url_content_to_file("http://perdu.com/src/categories.json", "test.json") @@ -760,7 +759,7 @@ async def test_raise_on_not_valid_db_url(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "wapp_url": "http://perdu.com/"} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) with pytest.raises(ValueError) as exc_info: await module._load_wapp_database(cat_url, tech_url, group_url) @@ -819,7 +818,7 @@ async def test_raise_on_value_error(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "wapp_url": "http://perdu.com/"} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) with pytest.raises(ValueError) as exc_info: await module._load_wapp_database(cat_url, tech_url, group_url) @@ -845,7 +844,7 @@ async def test_raise_on_request_error(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "wapp_url": "http://perdu.com/"} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) with pytest.raises(RequestError) as exc_info: await module._load_wapp_database(cat_url, tech_url, group_url) @@ -868,7 +867,7 @@ async def test_raise_on_request_error_for_dump_url(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "wapp_url": "http://perdu.com/"} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) with pytest.raises(RequestError) as exc_info: await module._dump_url_content_to_file(url, "cat.json") @@ -889,7 +888,7 @@ async def test_wappalyzer_raise_on_request_error_for_update(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "wapp_url": "http://perdu.com/"} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) with pytest.raises(RequestError) as exc_info: await module.update_wappalyzer() @@ -921,7 +920,7 @@ async def test_wappalyzer_raise_on_value_error_for_update(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "wapp_url": "http://perdu.com/"} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) with pytest.raises(ValueError) as exc_info: await module.update_wappalyzer() @@ -1045,7 +1044,7 @@ async def test_private_gitlab(): options = {"timeout": 10, "level": 2, "wapp_url": "http://perdu.com/"} with patch.dict(os.environ, {'GITLAB_PRIVATE_TOKEN': 'test_gitlab_private_token'}): - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) await module.update_wappalyzer() await module.attack(request) @@ -1095,7 +1094,7 @@ async def test_wappalyzer_raise_on_not_valid_directory_for_update(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "wapp_dir": "/"} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) with pytest.raises(ValueError) as exc_info: await module.update_wappalyzer() @@ -1143,7 +1142,7 @@ async def test_wappalyzer_raise_on_not_valid_json_for_update(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "wapp_dir": wapp_dir} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) with pytest.raises(ValueError) as exc_info: await module.update_wappalyzer() @@ -1174,7 +1173,7 @@ async def test_wappalyzer_raise_on_not_valid_json_file_for_update(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "wapp_dir": wapp_dir} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) with pytest.raises(ValueError) as exc_info: await module.update_wappalyzer() @@ -1205,7 +1204,7 @@ async def test_wappalyzer_raise_on_file_does_not_exist_for_update(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2, "wapp_dir": wapp_dir} - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) with pytest.raises(ValueError) as exc_info: await module.update_wappalyzer() @@ -1253,7 +1252,7 @@ async def test_get_vulnerabilities(): # To prevent any issue the config directory is a newly created temporary directory persister.CONFIG_DIR = temp_dir - module = ModuleWapp(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWapp(crawler, persister, options, crawler_configuration) os.mkdir(os.path.join(temp_dir, "cves")) with lzma.open(os.path.join(temp_dir, "cves", "wordpress.json.xz"), "wb") as fd: fd.write(json.dumps(cve_data).encode()) diff --git a/tests/attack/test_mod_wp_enum.py b/tests/attack/test_mod_wp_enum.py index 0513a76b7..d5b75afe3 100644 --- a/tests/attack/test_mod_wp_enum.py +++ b/tests/attack/test_mod_wp_enum.py @@ -1,4 +1,3 @@ -from asyncio import Event from unittest.mock import AsyncMock, patch import httpx @@ -33,7 +32,7 @@ async def test_no_wordpress(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWpEnum(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWpEnum(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -103,7 +102,7 @@ async def test_plugin(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWpEnum(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWpEnum(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -190,7 +189,7 @@ async def test_theme(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleWpEnum(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWpEnum(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -273,7 +272,7 @@ async def test_wp_version(): with patch.object(ModuleWpEnum, "detect_plugin", AsyncMock()) as mock_detect_plugin, \ patch.object(ModuleWpEnum, "detect_theme", AsyncMock()) as mock_detect_theme: - module = ModuleWpEnum(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWpEnum(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -314,7 +313,7 @@ async def test_wp_version_no_file(): with patch.object(ModuleWpEnum, "detect_plugin", AsyncMock()) as mock_detect_plugin, \ patch.object(ModuleWpEnum, "detect_theme", AsyncMock()) as mock_detect_theme: - module = ModuleWpEnum(crawler, persister, options, Event(), crawler_configuration) + module = ModuleWpEnum(crawler, persister, options, crawler_configuration) await module.attack(request) diff --git a/tests/attack/test_mod_xss_advanced.py b/tests/attack/test_mod_xss_advanced.py index c616db8b2..72e7268f6 100644 --- a/tests/attack/test_mod_xss_advanced.py +++ b/tests/attack/test_mod_xss_advanced.py @@ -3,7 +3,6 @@ import os import sys from time import sleep -from asyncio import Event from unittest.mock import AsyncMock import pytest @@ -35,7 +34,7 @@ async def test_title_false_positive(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -52,7 +51,7 @@ async def test_title_positive(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -74,7 +73,7 @@ async def test_script_filter_bypass(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -93,7 +92,7 @@ async def test_script_src_protocol_relative(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -114,7 +113,7 @@ async def test_attr_quote_escape(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -133,7 +132,7 @@ async def test_attr_double_quote_escape(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -152,7 +151,7 @@ async def test_attr_escape(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -171,7 +170,7 @@ async def test_tag_name_escape(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -190,7 +189,7 @@ async def test_partial_tag_name_escape(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -208,7 +207,7 @@ async def test_xss_inside_tag_input(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -227,7 +226,7 @@ async def test_xss_inside_tag_link(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -246,7 +245,7 @@ async def test_xss_inside_href_link(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -265,7 +264,7 @@ async def test_xss_inside_src_iframe(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -284,7 +283,7 @@ async def test_xss_uppercase_no_script(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -303,7 +302,7 @@ async def test_frame_src_escape(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -322,7 +321,7 @@ async def test_frame_src_no_escape(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -341,7 +340,7 @@ async def test_bad_separator_used(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -359,7 +358,7 @@ async def test_escape_with_style(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -377,7 +376,7 @@ async def test_rare_tag_and_event(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -395,7 +394,7 @@ async def test_xss_with_strong_csp(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -412,7 +411,7 @@ async def test_xss_with_weak_csp(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -429,7 +428,7 @@ async def test_fallback_to_html_injection(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) diff --git a/tests/attack/test_mod_xss_basics.py b/tests/attack/test_mod_xss_basics.py index 7c9941f1c..b4d0ef512 100644 --- a/tests/attack/test_mod_xss_basics.py +++ b/tests/attack/test_mod_xss_basics.py @@ -1,4 +1,3 @@ -from asyncio import Event from unittest.mock import AsyncMock import respx @@ -41,7 +40,7 @@ async def test_whole_stuff(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXss(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXss(crawler, persister, options, crawler_configuration) module.do_post = True for request in all_requests: await module.attack(request) diff --git a/tests/attack/test_mod_xxe.py b/tests/attack/test_mod_xxe.py index 410af1320..8a307012b 100644 --- a/tests/attack/test_mod_xxe.py +++ b/tests/attack/test_mod_xxe.py @@ -3,7 +3,6 @@ import sys from time import sleep import logging -from asyncio import Event from unittest.mock import AsyncMock import pytest @@ -43,7 +42,7 @@ async def test_direct_body(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 1} - module = ModuleXxe(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXxe(crawler, persister, options, crawler_configuration) await module.attack(request) @@ -64,7 +63,7 @@ async def test_direct_param(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 1} - module = ModuleXxe(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXxe(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -81,7 +80,7 @@ async def test_direct_query_string(): async with AsyncCrawler.with_configuration(crawler_configuration) as crawler: options = {"timeout": 10, "level": 2} - module = ModuleXxe(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXxe(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -113,7 +112,7 @@ async def test_out_of_band_body(): "internal_endpoint": "http://wapiti3.ovh/" } - module = ModuleXxe(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXxe(crawler, persister, options, crawler_configuration) respx.get("http://wapiti3.ovh/get_xxe.php?session_id=" + module._session_id).mock( return_value=httpx.Response( @@ -162,7 +161,7 @@ async def test_out_of_band_param(): "internal_endpoint": "http://wapiti3.ovh/" } - module = ModuleXxe(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXxe(crawler, persister, options, crawler_configuration) respx.get("http://wapiti3.ovh/get_xxe.php?session_id=" + module._session_id).mock( return_value=httpx.Response( @@ -211,7 +210,7 @@ async def test_out_of_band_query_string(): "internal_endpoint": "http://wapiti3.ovh/" } - module = ModuleXxe(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXxe(crawler, persister, options, crawler_configuration) module.do_post = False await module.attack(request) @@ -264,7 +263,7 @@ async def test_direct_upload(): "internal_endpoint": "http://wapiti3.ovh/" } - module = ModuleXxe(crawler, persister, options, Event(), crawler_configuration) + module = ModuleXxe(crawler, persister, options, crawler_configuration) await module.attack(request) diff --git a/tests/cli/test_options.py b/tests/cli/test_options.py index 1a749acff..921b19519 100644 --- a/tests/cli/test_options.py +++ b/tests/cli/test_options.py @@ -1,5 +1,4 @@ import sys -from asyncio import Event from time import monotonic from unittest import mock @@ -25,74 +24,73 @@ async def count_paths(self): return 0 with mock.patch("os.makedirs", return_value=True): - stop_event = Event() cli = Wapiti(Request("http://perdu.com/"), session_dir="/dev/shm") cli.persister = CustomMock() crawler = mock.MagicMock() cli.set_attack_options({"timeout": 10}) cli.set_modules("-all,xxe") - attak_modules = await cli._load_attack_modules(stop_event, crawler) + attak_modules = await cli._load_attack_modules(crawler) assert {module.name for module in attak_modules if module.do_get or module.do_post} == {"xxe"} cli.set_modules("xxe") - attak_modules = await cli._load_attack_modules(stop_event, crawler) + attak_modules = await cli._load_attack_modules(crawler) assert {module.name for module in attak_modules if module.do_get or module.do_post} == {"xxe"} cli.set_modules("common,xxe") - attak_modules = await cli._load_attack_modules(stop_event, crawler) + attak_modules = await cli._load_attack_modules(crawler) activated_modules = {module.name for module in attak_modules if module.do_get or module.do_post} assert len(activated_modules) == len(common_modules) + 1 cli.set_modules("common,-exec") - attak_modules = await cli._load_attack_modules(stop_event, crawler) + attak_modules = await cli._load_attack_modules(crawler) activated_modules = {module.name for module in attak_modules if module.do_get or module.do_post} assert len(activated_modules) == len(common_modules) - 1 cli.set_modules("all,-xxe") - attak_modules = await cli._load_attack_modules(stop_event, crawler) + attak_modules = await cli._load_attack_modules(crawler) activated_modules = {module.name for module in attak_modules if module.do_get or module.do_post} assert len(activated_modules) == len(all_modules) - 1 cli.set_modules("all,-common") - attak_modules = await cli._load_attack_modules(stop_event, crawler) + attak_modules = await cli._load_attack_modules(crawler) activated_modules = {module.name for module in attak_modules if module.do_get or module.do_post} assert len(activated_modules) == len(all_modules) - len(common_modules) cli.set_modules("common,-all,xss") - attak_modules = await cli._load_attack_modules(stop_event, crawler) + attak_modules = await cli._load_attack_modules(crawler) activated_modules = {module.name for module in attak_modules if module.do_get or module.do_post} assert len(activated_modules) == 1 cli.set_modules("passive") - attak_modules = await cli._load_attack_modules(stop_event, crawler) + attak_modules = await cli._load_attack_modules(crawler) activated_modules = {module.name for module in attak_modules if module.do_get or module.do_post} assert len(activated_modules) == len(passive_modules) cli.set_modules("passive,xxe") - attak_modules = await cli._load_attack_modules(stop_event, crawler) + attak_modules = await cli._load_attack_modules(crawler) activated_modules = {module.name for module in attak_modules if module.do_get or module.do_post} assert len(activated_modules) == len(passive_modules) + 1 cli.set_modules("passive,-wapp") - attak_modules = await cli._load_attack_modules(stop_event, crawler) + attak_modules = await cli._load_attack_modules(crawler) activated_modules = {module.name for module in attak_modules if module.do_get or module.do_post} assert len(activated_modules) == len(passive_modules) - 1 cli.set_modules("cms") - attak_modules = await cli._load_attack_modules(stop_event, crawler) + attak_modules = await cli._load_attack_modules(crawler) activated_modules = {module.name for module in attak_modules if module.do_get or module.do_post} assert len(activated_modules) == 1 # Empty module list: no modules will be used cli.set_modules("") - attak_modules = await cli._load_attack_modules(stop_event, crawler) + attak_modules = await cli._load_attack_modules(crawler) activated_modules = {module.name for module in attak_modules if module.do_get or module.do_post} assert not activated_modules # Use default settings: only use "commons" modules cli.set_modules(None) - attak_modules = await cli._load_attack_modules(stop_event, crawler) + attak_modules = await cli._load_attack_modules(crawler) activated_modules = {module.name for module in attak_modules if module.do_get or module.do_post} assert activated_modules == set(common_modules) @@ -152,7 +150,6 @@ async def get_root_url(self): return "http://perdu.com/" with mock.patch("os.makedirs", return_value=True): - stop_event = Event() cli = Wapiti(Request("http://perdu.com/"), session_dir="/dev/shm") cli.persister = CustomMock() cli.set_max_attack_time(max_attack_time) @@ -160,7 +157,7 @@ async def get_root_url(self): cli.set_modules("all") time = monotonic() - await cli.attack(stop_event) + await cli.attack() max_run_duration = max_attack_time * (len(all_modules) + delta) # execution time for all modules + delta of uncertainty assert monotonic() - time < max_run_duration diff --git a/wapitiCore/attack/attack.py b/wapitiCore/attack/attack.py index 8f934ceb9..31c56b906 100644 --- a/wapitiCore/attack/attack.py +++ b/wapitiCore/attack/attack.py @@ -26,7 +26,6 @@ import random from binascii import hexlify from typing import Optional, Iterator, Tuple, List, Callable, Union, Iterable, Type -from asyncio import Event import json from pkg_resources import resource_filename @@ -225,13 +224,11 @@ def __init__( crawler: AsyncCrawler, persister: SqlPersister, attack_options: dict, - stop_event: Event, crawler_configuration: CrawlerConfiguration): super().__init__() self._session_id = "".join([random.choice("0123456789abcdefghjijklmnopqrstuvwxyz") for __ in range(0, 6)]) self.crawler = crawler self.persister = persister - self._stop_event = stop_event self.options = attack_options self.crawler_configuration = crawler_configuration self.start = 0 diff --git a/wapitiCore/attack/cms/cms_common.py b/wapitiCore/attack/cms/cms_common.py index c13653507..17c84bb4d 100644 --- a/wapitiCore/attack/cms/cms_common.py +++ b/wapitiCore/attack/cms/cms_common.py @@ -88,18 +88,8 @@ async def detect_version(self, payloads_hash, root_url): tasks.remove(task) - if self._stop_event.is_set(): - for task in pending_tasks: - task.cancel() - tasks.remove(task) - - if len(pending_tasks) > self.options["tasks"]: - continue - - break - - if self._stop_event.is_set(): - break + if len(pending_tasks) <= self.options["tasks"]: + break # We reached the end of your list, but we may still have some running tasks while tasks: @@ -120,12 +110,5 @@ async def detect_version(self, payloads_hash, root_url): tasks.remove(task) - if self._stop_event.is_set(): - for task in pending_tasks: - task.cancel() - tasks.remove(task) - - break - if versions: self.versions = set.intersection(*[set(versions) for versions in versions.values()]) diff --git a/wapitiCore/attack/cms/mod_wp_enum.py b/wapitiCore/attack/cms/mod_wp_enum.py index ce346b436..ce1a7231e 100644 --- a/wapitiCore/attack/cms/mod_wp_enum.py +++ b/wapitiCore/attack/cms/mod_wp_enum.py @@ -102,9 +102,6 @@ def get_theme(self): async def detect_plugin(self, url): for plugin in self.get_plugin(): - if self._stop_event.is_set(): - break - request = Request(f'{url}/wp-content/plugins/{plugin}/readme.txt', 'GET') try: response: Response = await self.crawler.async_send(request) @@ -160,9 +157,6 @@ async def detect_plugin(self, url): async def detect_theme(self, url): for theme in self.get_theme(): - if self._stop_event.is_set(): - break - request = Request(f'{url}/wp-content/themes/{theme}/readme.txt', 'GET') try: response: Response = await self.crawler.async_send(request) diff --git a/wapitiCore/attack/mod_backup.py b/wapitiCore/attack/mod_backup.py index 8e680fc85..70c7ee465 100644 --- a/wapitiCore/attack/mod_backup.py +++ b/wapitiCore/attack/mod_backup.py @@ -44,8 +44,8 @@ class ModuleBackup(Attack): do_get = True do_post = False - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - super().__init__(crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + super().__init__(crawler, persister, attack_options, crawler_configuration) self.false_positive_directories = {} def get_payloads(self, _: Optional[Request] = None, __: Optional[Parameter] = None) -> Iterator[PayloadInfo]: @@ -94,8 +94,6 @@ async def attack(self, request: Request, response: Optional[Response] = None): for payload_info in self.get_payloads(): raw_payload = payload_info.payload - if self._stop_event.is_set(): - break if request.file_name: if "[FILE_" not in raw_payload: diff --git a/wapitiCore/attack/mod_brute_login_form.py b/wapitiCore/attack/mod_brute_login_form.py index c3e7861c0..33f26ecfd 100644 --- a/wapitiCore/attack/mod_brute_login_form.py +++ b/wapitiCore/attack/mod_brute_login_form.py @@ -160,7 +160,7 @@ async def attack(self, request: Request, response: Optional[Response] = None): creds_iterator = product(self.get_usernames(), self.get_passwords()) while True: - if pending_count < self.options["tasks"] and not self._stop_event.is_set() and not found: + if pending_count < self.options["tasks"] and not found: try: username, password = next(creds_iterator) except StopIteration: @@ -234,7 +234,7 @@ async def attack(self, request: Request, response: Optional[Response] = None): tasks.remove(task) - if self._stop_event.is_set() or found: + if found: # If we found valid credentials we need to stop pending tasks as they may generate false positives # because the session is opened on the website and next attempts may appear as logged in for task in pending_tasks: diff --git a/wapitiCore/attack/mod_buster.py b/wapitiCore/attack/mod_buster.py index c20a7f1ed..d3462a3a0 100644 --- a/wapitiCore/attack/mod_buster.py +++ b/wapitiCore/attack/mod_buster.py @@ -42,8 +42,8 @@ class ModuleBuster(Attack): do_get = True do_post = False - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - Attack.__init__(self, crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + Attack.__init__(self, crawler, persister, attack_options, crawler_configuration) self.known_dirs = [] self.known_pages = [] self.new_resources = [] @@ -99,7 +99,7 @@ async def test_directory(self, path: str): with open(path_join(self.DATA_DIR, self.PATHS_FILE), encoding="utf-8", errors="ignore") as wordlist: while True: - if pending_count < self.options["tasks"] and not self._stop_event.is_set(): + if pending_count < self.options["tasks"]: try: candidate = next(wordlist).strip() except StopIteration: @@ -126,11 +126,6 @@ async def test_directory(self, path: str): self.network_errors += 1 tasks.remove(task) - if self._stop_event.is_set(): - for task in pending_tasks: - task.cancel() - tasks.remove(task) - async def attack(self, request: Request, response: Optional[Response] = None): self.finished = True if not self.do_get: @@ -148,12 +143,10 @@ async def attack(self, request: Request, response: Optional[Response] = None): # Then for each known webdirs we look for unknown webpages inside for current_dir in self.known_dirs: - if self._stop_event.is_set(): - break await self.test_directory(current_dir) # Finally, for each discovered webdirs we look for more webpages - while self.new_resources and not self._stop_event.is_set(): + while self.new_resources: current_res = self.new_resources.pop(0) if current_res.endswith("/"): # Mark as known then explore diff --git a/wapitiCore/attack/mod_cms.py b/wapitiCore/attack/mod_cms.py index dcfe24d2b..930958305 100644 --- a/wapitiCore/attack/mod_cms.py +++ b/wapitiCore/attack/mod_cms.py @@ -17,8 +17,6 @@ # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -from asyncio import Event - from typing import Optional from wapitiCore.attack.cms.mod_drupal_enum import ModuleDrupalEnum @@ -57,26 +55,26 @@ async def attack(self, request: Request, response: Optional[Response] = None): if "drupal" in cms_list: module = ModuleDrupalEnum( - self.crawler, self.persister, self.options, Event(), self.crawler_configuration + self.crawler, self.persister, self.options, self.crawler_configuration ) await module.attack(request_to_root) if "joomla" in cms_list: module = ModuleJoomlaEnum( - self.crawler, self.persister, self.options, Event(), self.crawler_configuration + self.crawler, self.persister, self.options, self.crawler_configuration ) await module.attack(request_to_root) if "prestashop" in cms_list: module = ModulePrestashopEnum( - self.crawler, self.persister, self.options, Event(), self.crawler_configuration + self.crawler, self.persister, self.options, self.crawler_configuration ) await module.attack(request_to_root) if "spip" in cms_list: module = ModuleSpipEnum( - self.crawler, self.persister, self.options, Event(), self.crawler_configuration + self.crawler, self.persister, self.options, self.crawler_configuration ) await module.attack(request_to_root) if "wp" in cms_list: module = ModuleWpEnum( - self.crawler, self.persister, self.options, Event(), self.crawler_configuration + self.crawler, self.persister, self.options, self.crawler_configuration ) await module.attack(request_to_root) diff --git a/wapitiCore/attack/mod_crlf.py b/wapitiCore/attack/mod_crlf.py index 69a1abd4a..4b3104379 100644 --- a/wapitiCore/attack/mod_crlf.py +++ b/wapitiCore/attack/mod_crlf.py @@ -40,8 +40,8 @@ class ModuleCrlf(Attack): do_post = True payloads = [PayloadInfo(payload="http://www.google.fr\r\nwapiti: 3.2.0 version")] - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - super().__init__(crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + super().__init__(crawler, persister, attack_options, crawler_configuration) self.mutator = self.get_mutator() async def attack(self, request: Request, response: Optional[Response] = None): diff --git a/wapitiCore/attack/mod_csrf.py b/wapitiCore/attack/mod_csrf.py index 9b5618631..16c4b3ca6 100644 --- a/wapitiCore/attack/mod_csrf.py +++ b/wapitiCore/attack/mod_csrf.py @@ -56,8 +56,8 @@ class ModuleCsrf(Attack): "anti-csrf-token", "x-csrf-header", "x-xsrf-header", "x-csrf-protection" ] - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - Attack.__init__(self, crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + Attack.__init__(self, crawler, persister, attack_options, crawler_configuration) # list to ensure only one occurrence per (vulnerable url/post_keys) tuple self.already_vulnerable = [] diff --git a/wapitiCore/attack/mod_exec.py b/wapitiCore/attack/mod_exec.py index 624826804..e02456b32 100644 --- a/wapitiCore/attack/mod_exec.py +++ b/wapitiCore/attack/mod_exec.py @@ -40,8 +40,8 @@ class ModuleExec(Attack): """ name = "exec" - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - super().__init__(crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + super().__init__(crawler, persister, attack_options, crawler_configuration) self.false_positive_timeouts = set() self.mutator = self.get_mutator() diff --git a/wapitiCore/attack/mod_file.py b/wapitiCore/attack/mod_file.py index 7354683cf..2a222b205 100644 --- a/wapitiCore/attack/mod_file.py +++ b/wapitiCore/attack/mod_file.py @@ -104,8 +104,8 @@ class ModuleFile(Attack): """Detect file-related vulnerabilities such as directory traversal and include() vulnerabilities.""" name = "file" - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - Attack.__init__(self, crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + Attack.__init__(self, crawler, persister, attack_options, crawler_configuration) self.known_false_positives = defaultdict(set) self.mutator = self.get_mutator() diff --git a/wapitiCore/attack/mod_htp.py b/wapitiCore/attack/mod_htp.py index 327298f78..349aca1f7 100644 --- a/wapitiCore/attack/mod_htp.py +++ b/wapitiCore/attack/mod_htp.py @@ -68,8 +68,8 @@ class ModuleHtp(Attack): HTP_DATABASE = "hashtheplanet.db" HTP_DATABASE_URL = "https://github.com/Cyberwatch/HashThePlanet/releases/download/latest/hashtheplanet.db" - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - Attack.__init__(self, crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + Attack.__init__(self, crawler, persister, attack_options, crawler_configuration) self.tech_versions: Dict[Technology, List[Versions]] = {} self.user_config_dir = self.persister.CONFIG_DIR @@ -136,18 +136,8 @@ async def search_static_files(self, root_url: str): tasks.remove(task) - if self._stop_event.is_set(): - for task in pending_tasks: - task.cancel() - tasks.remove(task) - - if len(pending_tasks) > self.options["tasks"]: - continue - - break - - if self._stop_event.is_set(): - break + if len(pending_tasks) <= self.options["tasks"]: + break # We reached the end of your list, but we may still have some running tasks while tasks: @@ -170,13 +160,6 @@ async def search_static_files(self, root_url: str): tasks.remove(task) - if self._stop_event.is_set(): - for task in pending_tasks: - task.cancel() - tasks.remove(task) - - break - async def _init_db(self): if self._db is None: await self._verify_htp_database(os.path.join(self.user_config_dir, self.HTP_DATABASE)) diff --git a/wapitiCore/attack/mod_ldap.py b/wapitiCore/attack/mod_ldap.py index 344916c27..5a7e1c718 100644 --- a/wapitiCore/attack/mod_ldap.py +++ b/wapitiCore/attack/mod_ldap.py @@ -121,8 +121,8 @@ class ModuleLdap(Attack): """ name = "ldap" - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - super().__init__(crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + super().__init__(crawler, persister, attack_options, crawler_configuration) self.mutator = self.get_mutator() def get_payloads(self, _: Optional[Request] = None, __: Optional[Parameter] = None) -> Iterator[PayloadInfo]: diff --git a/wapitiCore/attack/mod_log4shell.py b/wapitiCore/attack/mod_log4shell.py index 0f471d990..b699ef878 100644 --- a/wapitiCore/attack/mod_log4shell.py +++ b/wapitiCore/attack/mod_log4shell.py @@ -48,8 +48,8 @@ class ModuleLog4Shell(Attack): SOLR_URL = "solr/admin/cores" UNIFI_URL = "api/login" - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - Attack.__init__(self, crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + Attack.__init__(self, crawler, persister, attack_options, crawler_configuration) dns_endpoint = attack_options.get("dns_endpoint") try: diff --git a/wapitiCore/attack/mod_network_device.py b/wapitiCore/attack/mod_network_device.py index 5154f0a3d..6b481e925 100644 --- a/wapitiCore/attack/mod_network_device.py +++ b/wapitiCore/attack/mod_network_device.py @@ -17,7 +17,6 @@ # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -from asyncio import Event from typing import Optional from wapitiCore.attack.network_devices.mod_checkpoint import ModuleCheckPoint @@ -52,6 +51,6 @@ async def attack(self, request: Request, response: Optional[Response] = None): modules_list = [ModuleCheckPoint, ModuleCitrix, ModuleForti, ModuleHarbor, ModuleUbika] for module in modules_list: mod = module( - self.crawler, self.persister, self.options, Event(), self.crawler_configuration + self.crawler, self.persister, self.options, self.crawler_configuration ) await mod.attack(request_to_root) diff --git a/wapitiCore/attack/mod_nikto.py b/wapitiCore/attack/mod_nikto.py index 7c8ca2a27..f9267865b 100644 --- a/wapitiCore/attack/mod_nikto.py +++ b/wapitiCore/attack/mod_nikto.py @@ -67,8 +67,8 @@ class ModuleNikto(Attack): user_config_dir = None finished = False - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - Attack.__init__(self, crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + Attack.__init__(self, crawler, persister, attack_options, crawler_configuration) self.user_config_dir = self.persister.CONFIG_DIR self.junk_string = "w" + "".join( [random.choice("0123456789abcdefghjijklmnopqrstuvwxyz") for __ in range(0, 5000)] @@ -155,7 +155,7 @@ async def attack(self, request: Request, response: Optional[Response] = None): reader = csv.reader(nikto_db_file) while True: - if pending_count < self.options["tasks"] and not self._stop_event.is_set(): + if pending_count < self.options["tasks"]: try: line = next(reader) except StopIteration: @@ -180,11 +180,6 @@ async def attack(self, request: Request, response: Optional[Response] = None): await task tasks.remove(task) - if self._stop_event.is_set(): - for task in pending_tasks: - task.cancel() - tasks.remove(task) - async def process_line(self, line): match = match_or = match_and = False fail = fail_or = False diff --git a/wapitiCore/attack/mod_redirect.py b/wapitiCore/attack/mod_redirect.py index a2e8881a6..883d716bc 100644 --- a/wapitiCore/attack/mod_redirect.py +++ b/wapitiCore/attack/mod_redirect.py @@ -40,8 +40,8 @@ class ModuleRedirect(Attack): do_get = True do_post = False - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - super().__init__(crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + super().__init__(crawler, persister, attack_options, crawler_configuration) self.mutator = self.get_mutator() async def attack(self, request: Request, response: Optional[Response] = None): diff --git a/wapitiCore/attack/mod_shellshock.py b/wapitiCore/attack/mod_shellshock.py index da13918d9..d8d828dd0 100644 --- a/wapitiCore/attack/mod_shellshock.py +++ b/wapitiCore/attack/mod_shellshock.py @@ -40,8 +40,8 @@ class ModuleShellshock(Attack): do_get = True do_post = True - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - Attack.__init__(self, crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + Attack.__init__(self, crawler, persister, attack_options, crawler_configuration) empty_func = "() { :;}; " self.rand_string = "".join([random.choice(string.hexdigits) for _ in range(32)]) diff --git a/wapitiCore/attack/mod_sql.py b/wapitiCore/attack/mod_sql.py index 90151cab5..38775274f 100644 --- a/wapitiCore/attack/mod_sql.py +++ b/wapitiCore/attack/mod_sql.py @@ -306,8 +306,8 @@ class ModuleSql(Attack): payloads = ["[VALUE]\xBF'\"("] filename_payload = "'\"(" # TODO: wait for https://github.com/shazow/urllib3/pull/856 then use that for files upld - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - super().__init__(crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + super().__init__(crawler, persister, attack_options, crawler_configuration) self.mutator = self.get_mutator() self.time_to_sleep = ceil(attack_options.get("timeout", self.time_to_sleep)) + 1 diff --git a/wapitiCore/attack/mod_ssl.py b/wapitiCore/attack/mod_ssl.py index 299d04ccd..95a239b8e 100644 --- a/wapitiCore/attack/mod_ssl.py +++ b/wapitiCore/attack/mod_ssl.py @@ -307,8 +307,8 @@ class ModuleSsl(Attack): """Evaluate the security of SSL/TLS certificate configuration.""" name = "ssl" - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - Attack.__init__(self, crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + Attack.__init__(self, crawler, persister, attack_options, crawler_configuration) # list to ensure only one occurrence per (vulnerable url/post_keys) tuple self.tested_targets = set() self.has_sslcan = None diff --git a/wapitiCore/attack/mod_ssrf.py b/wapitiCore/attack/mod_ssrf.py index 880833650..52cc2cb5b 100644 --- a/wapitiCore/attack/mod_ssrf.py +++ b/wapitiCore/attack/mod_ssrf.py @@ -41,8 +41,8 @@ class ModuleSsrf(Attack): name = "ssrf" MSG_VULN = "SSRF vulnerability" - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - super().__init__(crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + super().__init__(crawler, persister, attack_options, crawler_configuration) self.mutator = self.get_mutator() def get_payloads( diff --git a/wapitiCore/attack/mod_takeover.py b/wapitiCore/attack/mod_takeover.py index f9c5764d6..d62d4fa7f 100644 --- a/wapitiCore/attack/mod_takeover.py +++ b/wapitiCore/attack/mod_takeover.py @@ -197,8 +197,8 @@ class ModuleTakeover(Attack): """Detect subdomains vulnerable to takeover (CNAME records pointing to non-existent and/or available domains)""" name = "takeover" - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - super().__init__(crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + super().__init__(crawler, persister, attack_options, crawler_configuration) self.processed_domains = set() self.takeover = TakeoverChecker() @@ -233,9 +233,6 @@ async def feed_queue(self, queue: asyncio.Queue, domain: str): else: break - if self._stop_event.is_set(): - break - # send stop command to every worker for __ in range(CONCURRENT_TASKS): while True: diff --git a/wapitiCore/attack/mod_timesql.py b/wapitiCore/attack/mod_timesql.py index 975430227..c9d688a75 100644 --- a/wapitiCore/attack/mod_timesql.py +++ b/wapitiCore/attack/mod_timesql.py @@ -43,8 +43,8 @@ class ModuleTimesql(Attack): MSG_VULN = "Blind SQL vulnerability" - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - Attack.__init__(self, crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + Attack.__init__(self, crawler, persister, attack_options, crawler_configuration) self.mutator = self.get_mutator() self.time_to_sleep = ceil(attack_options.get("timeout", self.time_to_sleep)) + 1 diff --git a/wapitiCore/attack/mod_wapp.py b/wapitiCore/attack/mod_wapp.py index 172537aff..1acf49060 100644 --- a/wapitiCore/attack/mod_wapp.py +++ b/wapitiCore/attack/mod_wapp.py @@ -152,8 +152,8 @@ class ModuleWapp(Attack): # Store the gitlab private token from env variable gitlab_private_token = None - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - Attack.__init__(self, crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + Attack.__init__(self, crawler, persister, attack_options, crawler_configuration) self.user_config_dir = self.persister.CONFIG_DIR if not os.path.isdir(self.user_config_dir): os.makedirs(self.user_config_dir) diff --git a/wapitiCore/attack/mod_wp_enum.py b/wapitiCore/attack/mod_wp_enum.py index 1601cefe0..46d79d816 100644 --- a/wapitiCore/attack/mod_wp_enum.py +++ b/wapitiCore/attack/mod_wp_enum.py @@ -135,9 +135,6 @@ async def detect_version(self, url: str): async def detect_plugin(self, url): for plugin in self.get_plugin(): - if self._stop_event.is_set(): - break - request = Request(f'{url}/wp-content/plugins/{plugin}/readme.txt', 'GET') try: response: Response = await self.crawler.async_send(request) @@ -193,9 +190,6 @@ async def detect_plugin(self, url): async def detect_theme(self, url): for theme in self.get_theme(): - if self._stop_event.is_set(): - break - request = Request(f'{url}/wp-content/themes/{theme}/readme.txt', 'GET') try: response: Response = await self.crawler.async_send(request) diff --git a/wapitiCore/attack/mod_xss.py b/wapitiCore/attack/mod_xss.py index de9f2f640..6a0b3c77e 100644 --- a/wapitiCore/attack/mod_xss.py +++ b/wapitiCore/attack/mod_xss.py @@ -62,8 +62,8 @@ class ModuleXss(Attack): RANDOM_WEBSITE = f"https://{random_string(length=6)}.com/" - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - Attack.__init__(self, crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + Attack.__init__(self, crawler, persister, attack_options, crawler_configuration) methods = "" if self.do_get: methods += "G" diff --git a/wapitiCore/attack/mod_xxe.py b/wapitiCore/attack/mod_xxe.py index 1a536d9bb..aab05ab3e 100644 --- a/wapitiCore/attack/mod_xxe.py +++ b/wapitiCore/attack/mod_xxe.py @@ -52,8 +52,8 @@ class ModuleXxe(Attack): MSG_VULN = "XXE vulnerability" - def __init__(self, crawler, persister, attack_options, stop_event, crawler_configuration): - Attack.__init__(self, crawler, persister, attack_options, stop_event, crawler_configuration) + def __init__(self, crawler, persister, attack_options, crawler_configuration): + Attack.__init__(self, crawler, persister, attack_options, crawler_configuration) self.vulnerables = set() self.attacked_urls = set() self.payload_to_rules = {} diff --git a/wapitiCore/controller/wapiti.py b/wapitiCore/controller/wapiti.py index 61f1d3d27..2dc9a3e58 100644 --- a/wapitiCore/controller/wapiti.py +++ b/wapitiCore/controller/wapiti.py @@ -20,7 +20,9 @@ import asyncio import os import shutil +import signal import sys +from enum import Enum from operator import attrgetter from collections import deque from dataclasses import replace @@ -28,7 +30,7 @@ from importlib import import_module from time import gmtime, strftime from traceback import print_tb -from typing import Dict, List, Deque, AsyncGenerator, Optional +from typing import Dict, List, Deque, AsyncGenerator, Optional, Set from urllib.parse import urlparse from uuid import uuid1 @@ -59,6 +61,13 @@ } +class UserChoice(Enum): + REPORT = "r" + NEXT = "n" + QUIT = "q" + CONTINUE = "c" + + class InvalidOptionValue(Exception): def __init__(self, opt_name, opt_value): super().__init__() @@ -188,6 +197,7 @@ def __init__(self, scope_request: Request, scope="folder", session_dir=None, con self._headless_mode = "no" self._wait_time = 2. self._buffer = [] + self._user_choice = UserChoice.CONTINUE if session_dir: SqlPersister.CRAWLER_DATA_DIR = session_dir @@ -284,9 +294,8 @@ async def _init_report(self): additional.wstg_code() ) - async def _load_attack_modules(self, stop_event: asyncio.Event, crawler: AsyncCrawler) -> List[Attack]: + async def _load_attack_modules(self, crawler: AsyncCrawler) -> List[Attack]: await self._init_report() - stop_event.clear() logging.info("[*] Existing modules:") logging.info(f"\t {', '.join(sorted(all_modules))}") @@ -305,7 +314,6 @@ async def _load_attack_modules(self, stop_event: asyncio.Event, crawler: AsyncCr crawler, self.persister, self.attack_options, - stop_event, self.crawler_configuration, ) except Exception as exception: # pylint: disable=broad-except @@ -320,7 +328,6 @@ async def _load_attack_modules(self, stop_event: asyncio.Event, crawler: AsyncCr async def update(self, requested_modules: str = "all"): """Update modules that implement an update method""" - stop_event = asyncio.Event() modules = all_modules if (not requested_modules or requested_modules == "all") else requested_modules.split(",") async with AsyncCrawler.with_configuration(self.crawler_configuration) as crawler: @@ -332,7 +339,6 @@ async def update(self, requested_modules: str = "all"): crawler, self.persister, self.attack_options, - stop_event, self.crawler_configuration, ) if hasattr(class_instance, "update"): @@ -445,38 +451,10 @@ async def load_resources_for_module(self, module: Attack) -> AsyncGenerator[Requ async for request, response in self.persister.get_forms(attack_module=module.name): yield request, response - async def load_and_attack(self, stop_event: asyncio.Event, attack_module: Attack): - answer = "0" - attacked_ids = set() + async def load_and_attack(self, attack_module: Attack, attacked_ids: Set[int]) -> None: + original_request: Request + original_response: Response async for original_request, original_response in self.load_resources_for_module(attack_module): - if stop_event.is_set(): - print('') - print("Attack process was interrupted. Do you want to:") - print("\tr) stop everything here and generate the (R)eport") - print("\tn) move to the (N)ext attack module (if any)") - print("\tq) (Q)uit without generating the report") - print("\tc) (C)ontinue the current attack") - - while True: - try: - answer = input("? ").strip().lower() - except UnicodeDecodeError: - pass - - if answer not in ("r", "n", "q", "c"): - print("Invalid choice. Valid choices are r, n, q and c.") - else: - break - - if answer in ("n", "c"): - stop_event.clear() - - if answer in ("r", "n", "q"): - break - - if answer == "c": - continue - try: if await attack_module.must_attack(original_request, original_response): logging.info(f"[+] {original_request}") @@ -502,18 +480,69 @@ async def load_and_attack(self, stop_event: asyncio.Event, attack_module: Attack else: if original_request.path_id is not None: attacked_ids.add(original_request.path_id) - return attacked_ids, answer - async def attack(self, stop_event: asyncio.Event): + def handle_user_interruption(self, task) -> None: + """ + Attack handler for Ctrl+C interruption. + """ + print("Attack process was interrupted. Do you want to:") + print("\tr) stop everything here and generate the (R)eport") + print("\tn) move to the (N)ext attack module (if any)") + print("\tq) (Q)uit without generating the report") + print("\tc) (C)ontinue the current attack") + + while True: + try: + self._user_choice = UserChoice(input("? ").strip().lower()) + if self._user_choice != UserChoice.CONTINUE: + task.cancel() + return + except (UnicodeDecodeError, ValueError): + print("Invalid choice. Valid choices are r, n, q, and c.") + + async def run_attack_module(self, attack_module): + """Run a single attack module, handling persistence and timeouts.""" + logging.log("GREEN", "[*] Launching module {0}", attack_module.name) + + already_attacked = await self.persister.count_attacked(attack_module.name) + if already_attacked: + logging.success( + "[*] {0} pages were previously attacked and will be skipped", + already_attacked + ) + + attacked_ids = set() + + try: + await asyncio.wait_for( + self.load_and_attack(attack_module, attacked_ids), + self._max_attack_time + ) + except asyncio.TimeoutError: + logging.info( + f"Max attack time was reached for module {attack_module.name}, stopping." + ) + finally: + # In ALL cases we want to persist the IDs of requests that have been attacked so far + # especially if the user it ctrl+c + await self.persister.set_attacked(attacked_ids, attack_module.name) + + # We also want to check the external endpoints to see if some attacks succeeded despite the module being + # potentially stopped + if hasattr(attack_module, "finish"): + await attack_module.finish() + + if attack_module.network_errors: + logging.warning(f"{attack_module.network_errors} requests were skipped due to network issues") + + async def attack(self): """Launch the attacks based on the preferences set by the command line""" async with AsyncCrawler.with_configuration(self.crawler_configuration) as crawler: - attack_modules = await self._load_attack_modules(stop_event, crawler) - answer = "0" + attack_modules = await self._load_attack_modules(crawler) - for attack_module in attack_modules: - if stop_event.is_set(): - break + loop = asyncio.get_running_loop() + for attack_module in attack_modules: if attack_module.do_get is False and attack_module.do_post is False: continue @@ -535,50 +564,32 @@ async def attack(self, stop_event: asyncio.Event): [attack for attack in attack_modules if attack.name in attack_module.require] ) - logging.log("GREEN", "[*] Launching module {0}", attack_module.name) - - already_attacked = await self.persister.count_attacked(attack_module.name) - if already_attacked: - logging.success( - "[*] {0} pages were previously attacked and will be skipped", - already_attacked - ) + # Create and run each attack module as an asyncio task + current_attack_task = asyncio.create_task( + self.run_attack_module(attack_module) + ) - answer = "0" - attacked_ids = set() + # Setup signal handler to prompt the user for task cancellation + loop.add_signal_handler(signal.SIGINT, self.handle_user_interruption, current_attack_task) try: - attacked_ids, answer = await asyncio.wait_for( - self.load_and_attack(stop_event, attack_module), - self._max_attack_time - ) - except asyncio.TimeoutError: - logging.info( - f"Max attack time was reached for module {attack_module.name}, stopping." - ) - - await self.persister.set_attacked(attacked_ids, attack_module.name) - - if hasattr(attack_module, "finish"): - await attack_module.finish() - - if attack_module.network_errors: - logging.warning(f"{attack_module.network_errors} requests were skipped due to network issues") - - if answer == "r": - # Do not process remaining modules + await current_attack_task # Await the attack module task + except asyncio.CancelledError: + # The user chose to stop the current module + pass + finally: + # Clean up the signal handler for the next loop + loop.remove_signal_handler(signal.SIGINT) + + # As the handler directly continue or cancel the current_attack_task module, we don't have + # cases where we have to call `continue`. Just check for the two other options + if self._user_choice in (UserChoice.REPORT, UserChoice.QUIT): break - if answer == "q": + if self._user_choice == UserChoice.QUIT: await self.persister.close() return - # if self.crawler.get_uploads(): - # print('') - # print(_("Upload scripts found:")) - # print("----------------------") - # for upload_form in self.crawler.get_uploads(): - # print(upload_form) await self.write_report() async def write_report(self): @@ -636,7 +647,7 @@ async def write_report(self): async def send_bug_report(self, exception: Exception, traceback_, module_name: str, original_request: Request): async with AsyncCrawler.with_configuration(self.crawler_configuration) as crawler: traceback_file = str(uuid1()) - with open(traceback_file, "w", encoding='utf-8') as traceback_fd: + with open(traceback_file, "w", encoding="utf-8") as traceback_fd: print_tb(traceback_, file=traceback_fd) print(f"{exception.__class__.__name__}: {exception}", file=traceback_fd) print(f"Occurred in {module_name} on {original_request}", file=traceback_fd) diff --git a/wapitiCore/main/wapiti.py b/wapitiCore/main/wapiti.py index e2fba8e72..9c1039e69 100755 --- a/wapitiCore/main/wapiti.py +++ b/wapitiCore/main/wapiti.py @@ -49,11 +49,6 @@ def inner_ctrl_c_signal_handler(): global_stop_event.set() -def stop_attack_process(): - logging.info("Waiting for all payload tasks to finish for current resource, please wait.") - global_stop_event.set() - - def fix_url_path(url: str): """Fix the url path if it's not defined""" return url if urlparse(url).path else url + '/' @@ -265,7 +260,7 @@ async def wapiti_main(): wap.set_http_credentials(HttpCredential(args.http_user, args.http_password, args.auth_method)) if ("http_user" in args and "http_password" not in args) or \ - ("http_user" not in args and "http_password" in args): + ("http_user" not in args and "http_password" in args): raise InvalidOptionValue("--auth-user and --auth-password", "Both options are required when one is used") @@ -418,14 +413,13 @@ async def wapiti_main(): "This option is required when --form-user and --form-password or form-cred is used") # This option is deprecated, but we still support it # Should be removed in the future - username, password = None, None if "%" in args.form_credentials: username, password = args.form_credentials.split("%", 1) form_credential = FormCredential( - username, - password, - args.form_url, - ) + username, + password, + args.form_url, + ) else: raise InvalidOptionValue("--form-cred", args.form_credentials) elif "form_user" in args and "form_password" in args: @@ -492,9 +486,7 @@ async def wapiti_main(): ) logging.info(f"[*] Wapiti found {await wap.count_resources()} URLs and forms during the scan") - loop.add_signal_handler(signal.SIGINT, stop_attack_process) - await wap.attack(global_stop_event) - loop.remove_signal_handler(signal.SIGINT) + await wap.attack() except OperationalError: logging.error(