Skip to content

Commit

Permalink
Merge pull request #172 from s0md3v/2.2.0
Browse files Browse the repository at this point in the history
2.2.0
  • Loading branch information
s0md3v authored Sep 11, 2022
2 parents 4219512 + 289f546 commit 4a0931d
Show file tree
Hide file tree
Showing 9 changed files with 198 additions and 21 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
#### 2.2.0
- Ability to detect parameters that respond to a certain value e.g. "?debug=yes"
- Added "required parameter" detection
- Heuristic can now extract words out of json/text responses
- Fixed -oB option description

#### 2.1.6
- Fixed multiple breaking bugs
- Export results as they come in multi-target mode
Expand Down
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,4 +74,5 @@ Direct links to some basic options are given below:
Optionally, you can use the `--help` argument to explore Arjun on your own.

##### Credits
The parameter names wordlist is created by extracting top parameter names from [CommonCrawl](http://commoncrawl.org) dataset and merging best words from [SecLists](https://github.com/danielmiessler/SecLists) and [param-miner](https://github.com/PortSwigger/param-miner) wordlists into that.
The parameter names wordlist is created by extracting top parameter names from [CommonCrawl](http://commoncrawl.org) dataset and merging best words from [SecLists](https://github.com/danielmiessler/SecLists) and [param-miner](https://github.com/PortSwigger/param-miner) wordlists into that.\
`db/special.json` wordlist is taken from [data-payloads](https://github.com/yehgdotnet/data-payloads).
2 changes: 1 addition & 1 deletion arjun/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '2.1.6'
__version__ = '2.2.0'
20 changes: 14 additions & 6 deletions arjun/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from arjun.core.colors import green, end, info, bad, good, run, res

import argparse
import json

from urllib.parse import urlparse
import arjun.core.config as mem
Expand All @@ -26,7 +27,7 @@
parser.add_argument('-d', help='Delay between requests in seconds. (default: 0)', dest='delay', type=float, default=0)
parser.add_argument('-t', help='Number of concurrent threads. (default: 5)', dest='threads', type=int, default=5)
parser.add_argument('-w', help='Wordlist file path. (default: {arjundir}/db/large.txt)', dest='wordlist', default=arjun_dir+'/db/large.txt')
parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON. (default: GET)', dest='method', default='GET')
parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON/HEADERS. (default: GET)', dest='method', default='GET')
parser.add_argument('-i', help='Import target URLs from file.', dest='import_file', nargs='?', const=True)
parser.add_argument('-T', help='HTTP request timeout in seconds. (default: 15)', dest='timeout', type=float, default=15)
parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=500)
Expand Down Expand Up @@ -127,14 +128,19 @@ def initialize(request, wordlist, single_url=False):
factors = define(response_1, response_2, fuzz, fuzz[::-1], wordlist)
if single_url:
print('%s Analysing HTTP response for potential parameter names' % run)
found = heuristic(response_1.text, wordlist)
found, words_exist = heuristic(response_1, wordlist)
if found:
num = len(found)
s = 's' if num > 1 else ''
print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found)))
if words_exist:
print('%s Heuristic scanner found %i parameters' % (good, num))
else:
s = 's' if num > 1 else ''
print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found)))
if single_url:
print('%s Logicforcing the URL endpoint' % run)
populated = populate(wordlist)
with open(f'{arjun_dir}/db/special.json', 'r') as f:
populated.update(json.load(f))
param_groups = slicer(populated, int(len(wordlist)/mem.var['chunks']))
prev_chunk_count = len(param_groups)
last_params = []
Expand All @@ -157,7 +163,8 @@ def initialize(request, wordlist, single_url=False):
if reason:
name = list(param.keys())[0]
confirmed_params.append(name)
print('%s parameter detected: %s, based on: %s' % (res, name, reason))
if single_url:
print('%s parameter detected: %s, based on: %s' % (res, name, reason))
return confirmed_params


Expand All @@ -171,14 +178,15 @@ def main():
# in case of a single target
mem.var['kill'] = False
url = request['url']
these_params = initialize(request, wordlist)
these_params = initialize(request, wordlist, single_url=True)
if these_params == 'skipped':
print('%s Skipped %s due to errors' % (bad, request['url']))
elif these_params:
final_result[url] = {}
final_result[url]['params'] = these_params
final_result[url]['method'] = request['method']
final_result[url]['headers'] = request['headers']
print('%s Parameters found: %s' % (good, ', '.join(final_result[url]['params'])))
exporter(final_result)
else:
print('%s No parameters were discovered.' % info)
Expand Down
2 changes: 1 addition & 1 deletion arjun/core/anomaly.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def compare(response, factors, params):
return ('param name reflection', params)
if factors['value_missing']:
for value in params.values():
if type(value) != str:
if type(value) != str or len(value) != 6:
continue
if value in response.text and re.search(r'[\'"\s]%s[\'"\s]' % value, response.text):
return ('param value reflection', params)
Expand Down
4 changes: 2 additions & 2 deletions arjun/core/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ def burp_export(result):
exports results to Burp Suite by sending request to Burp proxy
"""
proxies = {
'http': 'http://' + mem.var['burp_port'],
'https': 'https://' + mem.var['burp_port']
'http': 'http://127.0.0.1:' + mem.var['burp_port'],
'https': 'https://127.0.0.1:' + mem.var['burp_port']
}
for url, data in result.items():
if data['method'] == 'GET':
Expand Down
2 changes: 1 addition & 1 deletion arjun/core/requester.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def requester(request, payload={}):
if len(request.get('include', '')) != 0:
payload.update(request['include'])
if mem.var['stable']:
mem.var['delay'] = random.choice(range(6, 12))
mem.var['delay'] = random.choice(range(3, 10))
time.sleep(mem.var['delay'])
url = request['url']
if mem.var['kill']:
Expand Down
153 changes: 153 additions & 0 deletions arjun/db/special.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
{
"debug": "yes",
"debug": "true",
"debug": "1",
"debug": "on",
"test": "yes",
"test": "true",
"test": "1",
"test": "on",
"source": "yes",
"source": "true",
"source": "1",
"source": "on",
"admin": "yes",
"admin": "true",
"admin": "1",
"admin": "on",
"show": "yes",
"show": "true",
"show": "1",
"show": "on",
"bot": "yes",
"bot": "1",
"bot": "on",
"antibot": "off",
"antibot": "0",
"antibot": "no",
"antibot": "none",
"antibot": "nil",
"antirobot": "off",
"antirobot": "0",
"antirobot": "no",
"antirobot": "none",
"antirobot": "nil",
"env": "staging",
"env": "test",
"env": "testing",
"env": "pre",
"env": "pre-staging",
"env": "daily",
"env": "uat",
"anticrawl": "off",
"anticrawl": "0",
"anticrawl": "none",
"anticrawl": "no",
"anticrawl": "nil",
"captcha": "off",
"captcha": "0",
"captcha": "none",
"captcha": "no",
"captcha": "nil",
"signing": "off",
"signing": "0",
"signing": "none",
"signing": "no",
"signing": "nil",
"signature": "off",
"signature": "0",
"signature": "none",
"signature": "no",
"signature": "nil",
"enc": "off",
"enc": "0",
"enc": "none",
"enc": "no",
"enc": "nil",
"encryption": "off",
"encryption": "0",
"encryption": "none",
"encryption": "no",
"encryption": "nil",
"automation": "on",
"automation": "1",
"automation": "yes",
"waf": "disabled",
"waf": "disable",
"waf": "off",
"waf": "0",
"waf": "no",
"security": "disabled",
"security": "disable",
"security": "0",
"security": "no",
"isdebug": "yes",
"isdebug": "true",
"isdebug": "1",
"isdebug": "on",
"istest": "yes",
"istest": "true",
"istest": "1",
"istest": "on",
"isadmin": "yes",
"isadmin": "true",
"isadmin": "1",
"isadmin": "on",
"isbot": "yes",
"isbot": "1",
"isbot": "on",
"isenv": "staging",
"isenv": "test",
"isenv": "testing",
"isenv": "pre",
"isenv": "pre-staging",
"isenv": "daily",
"isenv": "uat",
"hascaptcha": "off",
"hascaptcha": "0",
"hascaptcha": "none",
"hascaptcha": "no",
"hascaptcha": "nil",
"hassigning": "off",
"hassigning": "0",
"hassigning": "none",
"hassigning": "no",
"hassigning": "nil",
"hassignature": "off",
"hassignature": "0",
"hassignature": "none",
"hassignature": "no",
"hassignature": "nil",
"isenc": "off",
"isenc": "0",
"isenc": "none",
"isenc": "no",
"isenc": "nil",
"isencryption": "off",
"isencryption": "0",
"isencryption": "none",
"isencryption": "no",
"isencryption": "nil",
"hasautomation": "on",
"hasautomation": "1",
"hasautomation": "yes",
"haswaf": "disabled",
"haswaf": "disable",
"haswaf": "off",
"haswaf": "0",
"haswaf": "no",
"issecurity": "disabled",
"issecurity": "disable",
"hassecurity": "0",
"hassecurity": "no",
"disable": "waf",
"disable": "security",
"disabled": "waf",
"disabled": "security",
"dosinglesignon": "1",
"singlesignon": "1",
"hassinglesignon": "1",
"dosso": "1",
"sso": "1",
"hassso": "1"
}
27 changes: 18 additions & 9 deletions arjun/plugins/heuristic.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,32 @@
import re

from arjun.core.colors import info
import arjun.core.config as mem
from arjun.core.utils import extract_js

re_not_junk = re.compile(r'^[A-Za-z0-9_]+$')


def is_not_junk(param):
return (re_not_junk.match(param) is not None)

# TODO: for map keys, javascript tolerates { param: "value" }
re_words = re.compile(r'[A-Za-z][A-Za-z0-9_]*')
re_not_junk = re.compile(r'^[A-Za-z0-9_]+$')
re_input_names = re.compile(r'''(?i)<input.+?name=["']?([^"'\s>]+)''')
re_input_ids = re.compile(r'''(?i)<input.+?id=["']?([^"'\s>]+)''')
re_empty_vars = re.compile(r'''(?:[;\n]|\bvar|\blet)(\w+)\s*=\s*(?:['"`]{1,2}|true|false|null)''')
re_map_keys = re.compile(r'''['"](\w+?)['"]\s*:\s*['"`]''')

def is_not_junk(param):
return (re_not_junk.match(param) is not None)

def heuristic(response, wordlist):
def heuristic(raw_response, wordlist):
words_exist = False
potential_params = []

headers, response = raw_response.headers, raw_response.text
if headers.get('content-type', '').startswith(('application/json', 'text/plain')):
if len(response) < 200:
if ('required' or 'missing' or 'not found' or 'requires') in response.lower() and ('param' or 'parameter' or 'field') in response.lower():
if not mem.var['quiet']:
print('%s The endpoint seems to require certain parameters to function. Check the repsonse and use the --include option appropriately for better results.' % info)
words_exist = True
potential_params = re_words.findall(response)
# Parse Inputs
input_names = re_input_names.findall(response)
potential_params += input_names
Expand All @@ -34,7 +43,7 @@ def heuristic(response, wordlist):
potential_params += map_keys

if len(potential_params) == 0:
return []
return [], words_exist

found = set()
for word in potential_params:
Expand All @@ -45,4 +54,4 @@ def heuristic(response, wordlist):
wordlist.remove(word)
wordlist.insert(0, word)

return list(found)
return list(found), words_exist

0 comments on commit 4a0931d

Please sign in to comment.