Skip to content

Commit

Permalink
Merge pull request #18 from halfluke/main
Browse files Browse the repository at this point in the history
graphql-cop now runs against a list of endpoints and continues iterating through the list until all endpoints are tested in case there is more than one if the user had not specified the path (e.g. -t http://example.com). If the user had already specified the path (e.g. -t http://example.com/graphql) then only uses this endpoint.
  • Loading branch information
dolevf authored Sep 4, 2022
2 parents 17560b0 + 1ecf45b commit 857a6e7
Show file tree
Hide file tree
Showing 15 changed files with 51 additions and 49 deletions.
33 changes: 23 additions & 10 deletions graphql-cop.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,17 @@
from lib.tests.info_unhandled_error import unhandled_error_detection
from lib.utils import is_graphql, draw_art


parser = OptionParser(usage='%prog -t http://example.com -o json')
parser.add_option('-t', '--target', dest='url', help='target url with the path')
parser.add_option('-H', '--header', dest='header', action='append', help='Append Header(s) to the request \'{"Authorization": "Bearer eyjt"}\' - Use multiple -H for multiple Headers')
parser.add_option('-t', '--target', dest='url', help='target url with the path - if a GraphQL path is not provided, GraphQL Cop will iterate through a series of common GraphQL paths')
parser.add_option('-H', '--header', dest='header', action='append', help='Append Header(s) to the request \'{"Authorization": "Bearer eyjt"}\' - Use multiple -H for additional Headers')
parser.add_option('-o', '--output', dest='format',
help='json', default=False)
parser.add_option('--proxy', '-x', dest='proxy', action='store_true', default=False,
help='Sends the request through http://127.0.0.1:8080 proxy')
parser.add_option('--version', '-v', dest='version', action='store_true', default=False,
help='Print out the current version and exit.')


options, args = parser.parse_args()

if options.version:
Expand Down Expand Up @@ -61,14 +61,20 @@
print("Cannot cast %s into header dictionary. Ensure the format \'{\"key\": \"value\"}\'."%(options.header))

if not urlparse(options.url).scheme:
print("URL missing scheme (http:// or https://). Ensure ULR contains some scheme.")
print("URL missing scheme (http:// or https://). Ensure URL contains some scheme.")
sys.exit(1)
else:
url = options.url

if not is_graphql(url, proxy, HEADERS):
print(url, 'does not seem to be running GraphQL.')
sys.exit(1)
endpoints = ['/graphiql', '/playground', '/console', '/graphql']
paths = []
parsed = urlparse(url)

if parsed.path and parsed.path != '/':
paths.append(url)
else:
for endpoint in endpoints:
paths.append(parsed.scheme + '://' + parsed.netloc + endpoint)

tests = [field_suggestions, introspection, detect_graphiql,
get_method_support, alias_overloading, batch_query,
Expand All @@ -78,11 +84,18 @@

json_output = []

for test in tests:
json_output.append(test(url, proxy, HEADERS))
for path in paths:
if not is_graphql(path, proxy, HEADERS):
print(path, 'does not seem to be running GraphQL.')
continue
for test in tests:
json_output.append(test(path, proxy, HEADERS))

json_output = sorted(json_output, key=lambda d: d['title'])

if options.format == 'json':
print(json_output)
for i in range(len(json_output)):
print(json_output[i], end='\n\n')
else:
for i in json_output:
if i['result']:
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/dos_alias_overloading.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def alias_overloading(url, proxy, headers):
'result':False,
'title':'Alias Overloading',
'description':'Alias Overloading with 100+ aliases is allowed',
'impact':'Denial of Service',
'impact':'Denial of Service - /' + url.rsplit('/', 1)[-1],
'severity':'HIGH',
'curl_verify':''
}
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/dos_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def batch_query(url, proxy, headers):
'result':False,
'title':'Array-based Query Batching',
'description':'Batch queries allowed with 10+ simultaneous queries',
'impact':'Denial of Service',
'impact':'Denial of Service - /' + url.rsplit('/', 1)[-1],
'severity':'HIGH',
'curl_verify':''
}
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/dos_circular_introspection.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ def circular_query_introspection(url, proxy, headers):
'result':False,
'title':'Introspection-based Circular Query',
'description':'Circular-query using Introspection',
'impact':'Denial of Service',
'impact':'Denial of Service - /' + url.rsplit('/', 1)[-1],
'severity':'HIGH',
'curl_verify':''
}
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/dos_directive_overloading.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def directive_overloading(url, proxy, headers):
'result':False,
'title':'Directive Overloading',
'description':'Multiple duplicated directives allowed in a query',
'impact':'Denial of Service',
'impact':'Denial of Service - /' + url.rsplit('/', 1)[-1],
'severity':'HIGH',
'curl_verify':''
}
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/dos_field_duplication.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def field_duplication(url, proxy, headers):
'result':False,
'title':'Field Duplication',
'description':'Queries are allowed with 500 of the same repeated field',
'impact':'Denial of Service',
'impact':'Denial of Service - /' + url.rsplit('/', 1)[-1],
'severity':'HIGH',
'curl_verify':''
}
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/info_field_suggestions.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def field_suggestions(url, proxy, headers):
'result':False,
'title':'Field Suggestions',
'description':'Field Suggestions are Enabled',
'impact':'Information Leakage',
'impact':'Information Leakage - /' + url.rsplit('/', 1)[-1],
'severity':'LOW',
'curl_verify':''
}
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/info_get_based_mutation.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ def get_based_mutation(url, proxies, headers):
'result':False,
'title':'Mutation is allowed over GET (possible CSRF)',
'description':'GraphQL mutations allowed using the GET method',
'impact':'Possible Cross Site Request Forgery',
'impact':'Possible Cross Site Request Forgery - /' + url.rsplit('/', 1)[-1],
'severity':'MEDIUM',
'curl_verify':''
}
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/info_get_method_support.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def get_method_support(url, proxies, headers):
'result':False,
'title':'GET Method Query Support',
'description':'GraphQL queries allowed using the GET method',
'impact':'Possible Cross Site Request Forgery (CSRF)',
'impact':'Possible Cross Site Request Forgery (CSRF) - /' + url.rsplit('/', 1)[-1],
'severity':'MEDIUM',
'curl_verify':''
}
Expand Down
27 changes: 8 additions & 19 deletions lib/tests/info_graphiql.py
Original file line number Diff line number Diff line change
@@ -1,41 +1,30 @@
"""Collect GraphiQL details."""
from urllib.parse import urlparse
from lib.utils import request, curlify


def detect_graphiql(url, proxy, headers):
"""Get GraphiQL."""
res = {
'result':False,
'title':'GraphQL IDE',
'description':'GraphiQL Explorer/Playground Enabled',
'impact':'Information Leakage',
'impact':'Information Leakage - /' + url.rsplit('/', 1)[-1],
'severity':'LOW',
'curl_verify':''
}

heuristics = ('graphiql.min.css', 'GraphQL Playground', 'GraphiQL', 'graphql-playground')
endpoints = ['graphiql', 'playground', 'console', 'graphql']

parsed = urlparse(url)
if "Accept" in headers.keys():
backup_accept_header=headers["Accept"]
headers["Accept"]= "text/html"

truepath = ""
pathlist = parsed.path.split('/')
for p in range(0, len(pathlist)):
truepath += pathlist[p] + '/'
url = '{}://{}{}'.format(parsed.scheme, parsed.netloc, truepath)
for endpoint in endpoints:
response = request(url + endpoint, proxies=proxy, headers=headers)
res['curl_verify'] = curlify(response)
try:
if response and any(word in response.text for word in heuristics):
res['result'] = True
break
except:
pass
response = request(url, proxies=proxy, headers=headers)
res['curl_verify'] = curlify(response)
try:
if response and any(word in response.text for word in heuristics):
res['result'] = True
except:
pass

del headers["Accept"]
if 'backup_accept_header' in locals():
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/info_introspect.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def introspection(url, proxy, headers):
'result':False,
'title':'Introspection',
'description':'Introspection Query Enabled',
'impact':'Information Leakage',
'impact':'Information Leakage - /' + url.rsplit('/', 1)[-1],
'severity':'HIGH',
'curl_verify':''
}
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/info_post_based_csrf.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ def post_based_csrf(url, proxies, headers):
'result':False,
'title':'POST based url-encoded query (possible CSRF)',
'description':'GraphQL accepts non-JSON queries over POST',
'impact':'Possible Cross Site Request Forgery',
'impact':'Possible Cross Site Request Forgery - /' + url.rsplit('/', 1)[-1],
'severity':'MEDIUM',
'curl_verify':''
}
Expand Down
8 changes: 4 additions & 4 deletions lib/tests/info_trace_mode.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,19 @@ def trace_mode(url, proxy, headers):
'result':False,
'title':'Trace Mode',
'description':'Tracing is Enabled',
'impact':'Information Leakage',
'impact':'Information Leakage - /' + url.rsplit('/', 1)[-1],
'severity':'INFO',
'curl_verify':''
}

q = 'query cop { __typename }'
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
res['curl_verify'] = curlify(gql_response)

try:
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
res['curl_verify'] = curlify(gql_response)
if gql_response.json()['errors'][0]['extensions']['tracing']:
res['result'] = True
elif 'stacktrace' in str(gql_response.json()).lower():
elif '\'extensions\': {\'tracing\':' in str(gql_response.json()).lower():
res['result'] = True
except:
pass
Expand Down
10 changes: 5 additions & 5 deletions lib/tests/info_unhandled_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,24 +3,24 @@


def unhandled_error_detection(url, proxy, headers):
"""Get the trace mode."""
"""Get unhandled errors."""
res = {
'result':False,
'title':'Unhandled Errors Detection',
'description':'Exception errors are not handled',
'impact':'Information Leakage',
'impact':'Information Leakage - /' + url.rsplit('/', 1)[-1],
'severity':'INFO',
'curl_verify':''
}

q = 'qwerty cop { abc }'
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
res['curl_verify'] = curlify(gql_response)

try:
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
res['curl_verify'] = curlify(gql_response)
if gql_response.json()['errors'][0]['extensions']['exception']:
res['result'] = True
elif 'exception' in str(gql_response.json()).lower():
elif '\'extensions\': {\'exception\':' in str(gql_response.json()).lower():
res['result'] = True
except:
pass
Expand Down
2 changes: 1 addition & 1 deletion version.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
"""Version details of graphql-cop."""
VERSION = '1.9'
VERSION = '1.10'

0 comments on commit 857a6e7

Please sign in to comment.