diff --git a/CHANGELOG.md b/CHANGELOG.md
index dc67f51..d792466 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,9 @@
+#### 2.2.7
+- Added `--casing` option for casing style enforcement
+- Added `--ratelimit` option for explicitly defining requests/second
+- Fixed "decrease chunk size/use --stable" type errors in some cases
+- Fixed a bug in anamoly detection
+
#### 2.2.6
- Fixed Arjun getting infinitely stuck on some webpages
diff --git a/arjun/__init__.py b/arjun/__init__.py
index 5d724e8..04e6690 100644
--- a/arjun/__init__.py
+++ b/arjun/__init__.py
@@ -1 +1 @@
-__version__ = '2.2.6'
+__version__ = '2.2.7'
diff --git a/arjun/__main__.py b/arjun/__main__.py
index c9f3d99..0bb0769 100644
--- a/arjun/__main__.py
+++ b/arjun/__main__.py
@@ -13,6 +13,7 @@
from arjun.core.utils import fetch_params, stable_request, random_str, slicer, confirm, populate, reader, nullify, prepare_requests, compatible_path
from arjun.plugins.heuristic import heuristic
+from arjun.plugins.wl import detect_casing, covert_to_case
arjun_dir = compatible_path(mem.__file__.replace(compatible_path('/core/config.py'), ''))
@@ -25,7 +26,7 @@
parser.add_argument('-d', help='Delay between requests in seconds. (default: 0)', dest='delay', type=float, default=0)
parser.add_argument('-t', help='Number of concurrent threads. (default: 5)', dest='threads', type=int, default=5)
parser.add_argument('-w', help='Wordlist file path. (default: {arjundir}/db/large.txt)', dest='wordlist', default=arjun_dir+'/db/large.txt')
-parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON/HEADERS. (default: GET)', dest='method', default='GET')
+parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON. (default: GET)', dest='method', default='GET')
parser.add_argument('-i', help='Import target URLs from file.', dest='import_file', nargs='?', const=True)
parser.add_argument('-T', help='HTTP request timeout in seconds. (default: 15)', dest='timeout', type=float, default=15)
parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=250)
@@ -36,6 +37,7 @@
parser.add_argument('--stable', help='Prefer stability over speed.', dest='stable', action='store_true')
parser.add_argument('--include', help='Include this data in every request.', dest='include', default={})
parser.add_argument('--disable-redirects', help='disable redirects', dest='disable_redirects', action='store_true')
+parser.add_argument('--casing', help='casing style for params e.g. like_this, likeThis, likethis', dest='casing')
args = parser.parse_args() # arguments to be parsed
if args.quiet:
@@ -77,7 +79,11 @@
passive_params = fetch_params(host)
wordlist.update(passive_params)
print('%s Collected %s parameters, added to the wordlist' % (info, len(passive_params)))
- wordlist = list(wordlist)
+ if args.casing:
+ delimiter, casing = detect_casing(args.casing)
+ wordlist = [covert_to_case(word, delimiter, casing) for word in wordlist]
+ else:
+ wordlist = list(wordlist)
except FileNotFoundError:
exit('%s The specified file for parameters doesn\'t exist' % bad)
@@ -118,11 +124,15 @@ def initialize(request, wordlist, single_url=False):
return 'skipped'
print('%s Probing the target for stability' % run)
request['url'] = stable_request(url, request['headers'])
+ mem.var['healthy_url'] = True
if not request['url']:
return 'skipped'
else:
fuzz = "z" + random_str(6)
response_1 = requester(request, {fuzz[:-1]: fuzz[::-1][:-1]})
+ mem.var['healthy_url'] = response_1.status_code not in (400, 413, 418, 429, 503)
+ if not mem.var['healthy_url']:
+ print('%s Target returned HTTP %i, this may cause problems.' % (bad, request.status_code))
if single_url:
print('%s Analysing HTTP response for anomalies' % run)
response_2 = requester(request, {fuzz[:-1]: fuzz[::-1][:-1]})
@@ -139,16 +149,14 @@ def initialize(request, wordlist, single_url=False):
reason = compare(response_3, factors, {zzuf[:-1]: zzuf[::-1][:-1]})[2]
if not reason:
break
- factors[reason] = False
- if single_url:
- print('%s Analysing HTTP response for potential parameter names' % run)
+ factors[reason] = None
if found:
num = len(found)
if words_exist:
- print('%s Heuristic scanner found %i parameters' % (good, num))
+ print('%s Extracted %i parameters from response for testing' % (good, num))
else:
s = 's' if num > 1 else ''
- print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found)))
+ print('%s Extracted %i parameter%s from response for testing: %s' % (good, num, s, ', '.join(found)))
if single_url:
print('%s Logicforcing the URL endpoint' % run)
populated = populate(wordlist)
@@ -192,11 +200,11 @@ def main():
count = 0
for request in requests:
url = request['url']
+ print('%s Scanning %d/%d: %s' % (run, count, len(requests), url))
these_params = initialize(request, wordlist, single_url=is_single)
count += 1
mem.var['kill'] = False
mem.var['bad_req_count'] = 0
- print('%s Scanning %d/%d: %s' % (run, count, len(requests), url))
if these_params == 'skipped':
print('%s Skipped %s due to errors' % (bad, url))
elif these_params:
@@ -205,7 +213,7 @@ def main():
final_result[url]['method'] = request['method']
final_result[url]['headers'] = request['headers']
exporter(final_result)
- print('%s Parameters found: %s\n' % (good, ', '.join(final_result[url]['params'])))
+ print('%s Parameters found: %-4s\n' % (good, ', '.join(final_result[url]['params'])))
if not mem.var['json_file']:
final_result = {}
continue
diff --git a/arjun/core/anomaly.py b/arjun/core/anomaly.py
index 0f19960..13c77f6 100644
--- a/arjun/core/anomaly.py
+++ b/arjun/core/anomaly.py
@@ -44,7 +44,7 @@ def define(response_1, response_2, param, value, wordlist):
elif remove_tags(body_1) == remove_tags(body_2):
factors['same_plaintext'] = remove_tags(body_1)
elif body_1 and body_2 and body_1.count('\\n') == body_2.count('\\n'):
- factors['lines_diff'] = diff_map(body_1, body_2)
+ factors['lines_diff'] = diff_map(body_1, body_2)
if param not in response_2.text:
factors['param_missing'] = [word for word in wordlist if word in response_2.text]
if value not in response_2.text:
diff --git a/arjun/core/error_handler.py b/arjun/core/error_handler.py
index ebb7859..8eca359 100644
--- a/arjun/core/error_handler.py
+++ b/arjun/core/error_handler.py
@@ -4,6 +4,7 @@
from arjun.core.colors import bad
+
def connection_refused():
"""
checks if a request should be retried if the server refused connection
@@ -17,6 +18,7 @@ def connection_refused():
print('%s Target has rate limiting in place, please use --stable switch' % bad)
return 'kill'
+
def error_handler(response, factors):
"""
decides what to do after performing a HTTP request
@@ -26,6 +28,8 @@ def error_handler(response, factors):
returns str
"""
if type(response) != str and response.status_code in (400, 413, 418, 429, 503):
+ if not mem.var['healthy_url']:
+ return 'ok'
if response.status_code == 503:
mem.var['kill'] = True
print('%s Target is unable to process requests, try --stable switch' % bad)
diff --git a/arjun/core/exporter.py b/arjun/core/exporter.py
index 529d736..22566cc 100644
--- a/arjun/core/exporter.py
+++ b/arjun/core/exporter.py
@@ -6,6 +6,7 @@
from arjun.core.utils import create_query_string
+
def json_export(result):
"""
exports result to a file in JSON format
@@ -13,6 +14,7 @@ def json_export(result):
with open(mem.var['json_file'], 'w+', encoding='utf8') as json_output:
json.dump(result, json_output, sort_keys=True, indent=4)
+
def burp_export(result):
"""
exports results to Burp Suite by sending request to Burp proxy
@@ -30,6 +32,7 @@ def burp_export(result):
elif data['method'] == 'JSON':
requests.post(url, json=populate(data['params']), headers=data['headers'], proxies=proxies, verify=False)
+
def text_export(result):
"""
exports results to a text file, one url per line
@@ -48,6 +51,7 @@ def text_export(result):
elif data['method'] == 'POST':
text_file.write(clean_url + '\t' + query_string + '\n')
+
def exporter(result):
"""
main exporter function that calls other export functions
diff --git a/arjun/core/importer.py b/arjun/core/importer.py
index b4ef48e..4643732 100644
--- a/arjun/core/importer.py
+++ b/arjun/core/importer.py
@@ -1,5 +1,18 @@
import re
+burp_regex = re.compile(r'''(?m)^
+ [^<]+
+ [^<]*
+ [^<]*
+
+ .*
+ (.*)
+
+ ([^<]*)
+ ([^<]*)
+ ([^<]*)''')
+
+
def reader(path, mode='string'):
"""
reads a file
@@ -11,6 +24,7 @@ def reader(path, mode='string'):
else:
return ''.join([line for line in file])
+
def parse_request(string):
"""
parses http request
@@ -25,6 +39,7 @@ def parse_request(string):
result['data'] = match.group(4)
return result
+
def parse_headers(string):
"""
parses headers
@@ -37,18 +52,6 @@ def parse_headers(string):
result[splitted[0]] = ':'.join(splitted[1:]).strip()
return result
-burp_regex = re.compile(r'''(?m)^
- [^<]+
- [^<]*
- [^<]*
-
- .*
- (.*)
-
- ([^<]*)
- ([^<]*)
- ([^<]*)''')
-
def burp_import(path):
"""
diff --git a/arjun/core/prompt.py b/arjun/core/prompt.py
index 7ef7a52..8e472e3 100644
--- a/arjun/core/prompt.py
+++ b/arjun/core/prompt.py
@@ -1,6 +1,7 @@
import os
import tempfile
+
def prompt(default=None):
"""
lets user paste input by opening a temp file in a text editor
diff --git a/arjun/core/requester.py b/arjun/core/requester.py
index fc74a37..18bc6b6 100644
--- a/arjun/core/requester.py
+++ b/arjun/core/requester.py
@@ -11,6 +11,7 @@
warnings.filterwarnings('ignore') # Disable SSL related warnings
+
@sleep_and_retry
@limits(calls=mem.var['rate_limit'], period=1)
def requester(request, payload={}):
diff --git a/arjun/core/utils.py b/arjun/core/utils.py
index 609bbff..84d084a 100644
--- a/arjun/core/utils.py
+++ b/arjun/core/utils.py
@@ -153,7 +153,7 @@ def create_query_string(params):
pair = param + '=' + random_str(4) + '&'
query_string += pair
if query_string.endswith('&'):
- query_string = query_string[:-1]
+ query_string = query_string[:-1]
return '?' + query_string
@@ -180,6 +180,7 @@ def extract_js(response):
scripts.append(actual_parts[0])
return scripts
+
def parse_headers(string):
"""
parses headers
@@ -274,7 +275,7 @@ def prepare_requests(args):
'headers': headers,
'include': params
}
- )
+ )
elif args.import_file:
result = importer(args.import_file, mem.var['method'], headers, args.include)
return result
diff --git a/arjun/plugins/commoncrawl.py b/arjun/plugins/commoncrawl.py
index 3861da1..c89c337 100644
--- a/arjun/plugins/commoncrawl.py
+++ b/arjun/plugins/commoncrawl.py
@@ -2,9 +2,10 @@
from urllib.parse import urlparse
+
def commoncrawl(host, page=0):
these_params = set()
- response = requests.get('http://index.commoncrawl.org/CC-MAIN-2020-29-index?url=*.%s&fl=url&page=%s&limit=10000' % (host, page), verify=False).text
+ response = requests.get('http://index.commoncrawl.org/CC-MAIN-2024-42-index?url=*.%s&fl=url&page=%s&limit=10000' % (host, page), verify=False).text
if response.startswith(''):
return ([], False, 'commoncrawl')
urls = response.split('\n')
diff --git a/arjun/plugins/heuristic.py b/arjun/plugins/heuristic.py
index b6d3910..ee94b27 100644
--- a/arjun/plugins/heuristic.py
+++ b/arjun/plugins/heuristic.py
@@ -11,9 +11,11 @@
re_empty_vars = re.compile(r'''(?:[;\n]|\bvar|\blet)(\w+)\s*=\s*(?:['"`]{1,2}|true|false|null)''')
re_map_keys = re.compile(r'''['"](\w+?)['"]\s*:\s*['"`]''')
+
def is_not_junk(param):
return (re_not_junk.match(param) is not None)
+
def heuristic(raw_response, wordlist):
words_exist = False
potential_params = []
diff --git a/arjun/plugins/otx.py b/arjun/plugins/otx.py
index b3ed2b7..a606232 100644
--- a/arjun/plugins/otx.py
+++ b/arjun/plugins/otx.py
@@ -2,6 +2,7 @@
from urllib.parse import urlparse
+
def otx(host, page):
these_params = set()
data = requests.get('https://otx.alienvault.com/api/v1/indicators/hostname/%s/url_list?limit=50&page=%d' % (host, page+1), verify=False).json()
diff --git a/arjun/plugins/wayback.py b/arjun/plugins/wayback.py
index 2a0c0df..44e0ec4 100644
--- a/arjun/plugins/wayback.py
+++ b/arjun/plugins/wayback.py
@@ -2,6 +2,7 @@
from urllib.parse import urlparse
+
def wayback(host, page):
payload = {
'url': host,
diff --git a/arjun/plugins/wl.py b/arjun/plugins/wl.py
new file mode 100644
index 0000000..04d44bd
--- /dev/null
+++ b/arjun/plugins/wl.py
@@ -0,0 +1,80 @@
+def detect_casing(string):
+ """Detect the casing style and delimiter of given string."""
+ delimiter = ""
+ casing = ""
+
+ if string.islower():
+ casing = "l"
+ elif string.isupper():
+ casing = "u"
+ else:
+ casing = casing = "c" if string[0].islower() else "p"
+
+ if "-" in string:
+ delimiter = "-"
+ elif "_" in string:
+ delimiter = "_"
+ elif "." in string:
+ delimiter = "."
+
+ return delimiter, casing
+
+
+def transform(parts, delimiter, casing):
+ """Combine list of strings to form a string with given casing style."""
+ if len(parts) == 1:
+ if casing == "l":
+ return parts[0].lower()
+ elif casing == "u":
+ return parts[0].upper()
+ return parts[0]
+
+ result = []
+ for i, part in enumerate(parts):
+ if casing == "l":
+ transformed = part.lower()
+ elif casing == "u":
+ transformed = part.upper()
+ elif casing == "c":
+ if i == 0:
+ transformed = part.lower()
+ else:
+ transformed = part.lower().title()
+ else: # casing == "p"
+ transformed = part.lower().title()
+
+ result.append(transformed)
+
+ return delimiter.join(result)
+
+
+def handle(text):
+ """Break down a string into array of 'words'."""
+ if "-" in text:
+ return text.split("-")
+ elif "_" in text:
+ return text.split("_")
+ elif "." in text:
+ return text.split(".")
+
+ if not text.islower() and not text.isupper():
+ parts = []
+ temp = ""
+ for char in text:
+ if not char.isupper():
+ temp += char
+ else:
+ if temp:
+ parts.append(temp)
+ temp = char
+ if temp:
+ parts.append(temp)
+ return parts
+
+ return [text]
+
+
+def covert_to_case(string, delimiter, casing):
+ """Process input stream and write transformed text to output stream."""
+ parts = handle(string)
+ return transform(parts, delimiter, casing)