From 19ebd9ac5a2a3715c3d60b1b2a3ee9fc95060248 Mon Sep 17 00:00:00 2001 From: halfluke Date: Fri, 2 Sep 2022 00:38:54 +0100 Subject: [PATCH 1/6] Modified trace_mode and unhandled_error tests to take into account GraphQL IDE finding as in DVGA. --- lib/tests/info_graphiql.py | 2 +- lib/tests/info_trace_mode.py | 19 ++++++++++++++++--- lib/tests/info_unhandled_error.py | 20 ++++++++++++++++---- version.py | 2 +- 4 files changed, 34 insertions(+), 9 deletions(-) diff --git a/lib/tests/info_graphiql.py b/lib/tests/info_graphiql.py index 58687f4..7df65bd 100644 --- a/lib/tests/info_graphiql.py +++ b/lib/tests/info_graphiql.py @@ -2,7 +2,6 @@ from urllib.parse import urlparse from lib.utils import request, curlify - def detect_graphiql(url, proxy, headers): """Get GraphiQL.""" res = { @@ -32,6 +31,7 @@ def detect_graphiql(url, proxy, headers): res['curl_verify'] = curlify(response) try: if response and any(word in response.text for word in heuristics): + detect_graphiql.GraphQLIDEpath = url + endpoint res['result'] = True break except: diff --git a/lib/tests/info_trace_mode.py b/lib/tests/info_trace_mode.py index 47180c3..3a6e97e 100644 --- a/lib/tests/info_trace_mode.py +++ b/lib/tests/info_trace_mode.py @@ -1,5 +1,6 @@ """Collect trace mode details.""" from lib.utils import graph_query, curlify +from lib.tests.info_graphiql import detect_graphiql def trace_mode(url, proxy, headers): @@ -14,15 +15,27 @@ def trace_mode(url, proxy, headers): } q = 'query cop { __typename }' - gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q) - res['curl_verify'] = curlify(gql_response) try: + gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q) + res['curl_verify'] = curlify(gql_response) if gql_response.json()['errors'][0]['extensions']['tracing']: res['result'] = True - elif 'stacktrace' in str(gql_response.json()).lower(): + elif '\'extensions\': {\'tracing\':' in str(gql_response.json()).lower(): res['result'] = True except: pass + if hasattr(detect_graphiql, 'GraphQLIDEpath'): + url = detect_graphiql.GraphQLIDEpath + try: + gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q) + res['curl_verify'] = curlify(gql_response) + if gql_response.json()['errors'][0]['extensions']['tracing']: + res['result'] = True + elif '\'extensions\': {\'tracing\':' in str(gql_response.json()).lower(): + res['result'] = True + except: + pass + return res diff --git a/lib/tests/info_unhandled_error.py b/lib/tests/info_unhandled_error.py index a81a538..70170d4 100644 --- a/lib/tests/info_unhandled_error.py +++ b/lib/tests/info_unhandled_error.py @@ -1,6 +1,6 @@ """Collect trace mode details.""" from lib.utils import graph_query, curlify - +from lib.tests.info_graphiql import detect_graphiql def unhandled_error_detection(url, proxy, headers): """Get the trace mode.""" @@ -14,15 +14,27 @@ def unhandled_error_detection(url, proxy, headers): } q = 'qwerty cop { abc }' - gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q) - res['curl_verify'] = curlify(gql_response) try: + gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q) + res['curl_verify'] = curlify(gql_response) if gql_response.json()['errors'][0]['extensions']['exception']: res['result'] = True - elif 'exception' in str(gql_response.json()).lower(): + elif '\'extensions\': {\'exception\':' in str(gql_response.json()).lower(): res['result'] = True except: pass + if hasattr(detect_graphiql, 'GraphQLIDEpath'): + url = detect_graphiql.GraphQLIDEpath + try: + gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q) + res['curl_verify'] = curlify(gql_response) + if gql_response.json()['errors'][0]['extensions']['exception']: + res['result'] = True + elif '\'extensions\': {\'exception\':' in str(gql_response.json()).lower(): + res['result'] = True + except: + pass + return res diff --git a/version.py b/version.py index bdcb65a..0dfe791 100644 --- a/version.py +++ b/version.py @@ -1,2 +1,2 @@ """Version details of graphql-cop.""" -VERSION = '1.9' +VERSION = '1.99' From 5b8ec89d882c0cf1bf358c6c6c0657fcc755ae08 Mon Sep 17 00:00:00 2001 From: halfluke Date: Fri, 2 Sep 2022 02:33:22 +0100 Subject: [PATCH 2/6] Cleaned and simplified code. URL where tracing or unhandled errors occur is now shown. --- graphql-cop.py | 8 +++++++- lib/tests/info_graphiql.py | 2 +- lib/tests/info_trace_mode.py | 15 +-------------- lib/tests/info_unhandled_error.py | 18 +++--------------- 4 files changed, 12 insertions(+), 31 deletions(-) diff --git a/graphql-cop.py b/graphql-cop.py index a6a7860..affcc4f 100644 --- a/graphql-cop.py +++ b/graphql-cop.py @@ -81,8 +81,14 @@ for test in tests: json_output.append(test(url, proxy, HEADERS)) +if hasattr(detect_graphiql, 'GraphQLIDEpath'): + url = detect_graphiql.GraphQLIDEpath + json_output.append(trace_mode(url, proxy, HEADERS)) + json_output.append(unhandled_error_detection(url, proxy, HEADERS)) + if options.format == 'json': - print(json_output) + for i in range(len(json_output)): + print(json_output[i], end='\n\n') else: for i in json_output: if i['result']: diff --git a/lib/tests/info_graphiql.py b/lib/tests/info_graphiql.py index 7df65bd..8e43b0d 100644 --- a/lib/tests/info_graphiql.py +++ b/lib/tests/info_graphiql.py @@ -23,7 +23,7 @@ def detect_graphiql(url, proxy, headers): truepath = "" pathlist = parsed.path.split('/') - for p in range(0, len(pathlist)): + for p in range(len(pathlist)): truepath += pathlist[p] + '/' url = '{}://{}{}'.format(parsed.scheme, parsed.netloc, truepath) for endpoint in endpoints: diff --git a/lib/tests/info_trace_mode.py b/lib/tests/info_trace_mode.py index 3a6e97e..e28a6eb 100644 --- a/lib/tests/info_trace_mode.py +++ b/lib/tests/info_trace_mode.py @@ -1,6 +1,5 @@ """Collect trace mode details.""" from lib.utils import graph_query, curlify -from lib.tests.info_graphiql import detect_graphiql def trace_mode(url, proxy, headers): @@ -9,7 +8,7 @@ def trace_mode(url, proxy, headers): 'result':False, 'title':'Trace Mode', 'description':'Tracing is Enabled', - 'impact':'Information Leakage', + 'impact':'Information Leakage - ' + url, 'severity':'INFO', 'curl_verify':'' } @@ -26,16 +25,4 @@ def trace_mode(url, proxy, headers): except: pass - if hasattr(detect_graphiql, 'GraphQLIDEpath'): - url = detect_graphiql.GraphQLIDEpath - try: - gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q) - res['curl_verify'] = curlify(gql_response) - if gql_response.json()['errors'][0]['extensions']['tracing']: - res['result'] = True - elif '\'extensions\': {\'tracing\':' in str(gql_response.json()).lower(): - res['result'] = True - except: - pass - return res diff --git a/lib/tests/info_unhandled_error.py b/lib/tests/info_unhandled_error.py index 70170d4..05c47b9 100644 --- a/lib/tests/info_unhandled_error.py +++ b/lib/tests/info_unhandled_error.py @@ -1,14 +1,14 @@ """Collect trace mode details.""" from lib.utils import graph_query, curlify -from lib.tests.info_graphiql import detect_graphiql + def unhandled_error_detection(url, proxy, headers): - """Get the trace mode.""" + """Get unhandled errors.""" res = { 'result':False, 'title':'Unhandled Errors Detection', 'description':'Exception errors are not handled', - 'impact':'Information Leakage', + 'impact':'Information Leakage - ' + url, 'severity':'INFO', 'curl_verify':'' } @@ -25,16 +25,4 @@ def unhandled_error_detection(url, proxy, headers): except: pass - if hasattr(detect_graphiql, 'GraphQLIDEpath'): - url = detect_graphiql.GraphQLIDEpath - try: - gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q) - res['curl_verify'] = curlify(gql_response) - if gql_response.json()['errors'][0]['extensions']['exception']: - res['result'] = True - elif '\'extensions\': {\'exception\':' in str(gql_response.json()).lower(): - res['result'] = True - except: - pass - return res From df8118a58bcb731ea1aa8cbf7505270b1fcee4d2 Mon Sep 17 00:00:00 2001 From: halfluke Date: Fri, 2 Sep 2022 23:36:06 +0100 Subject: [PATCH 3/6] graphql-cop now runs against a list of endpoints and continues iterating through the list until all endpoints are tested in case there is more than one if the user had not specified the path (e.g. -t http://example.com). If the user had already specified the path (e.g. -t http://example.com/graphql) then only uses this endpoint. --- graphql-cop.py | 31 ++++++++++++++++--------- lib/tests/dos_alias_overloading.py | 2 +- lib/tests/dos_batch.py | 2 +- lib/tests/dos_circular_introspection.py | 2 +- lib/tests/dos_directive_overloading.py | 2 +- lib/tests/dos_field_duplication.py | 2 +- lib/tests/info_field_suggestions.py | 2 +- lib/tests/info_get_based_mutation.py | 2 +- lib/tests/info_get_method_support.py | 2 +- lib/tests/info_graphiql.py | 27 +++++++-------------- lib/tests/info_introspect.py | 2 +- lib/tests/info_post_based_csrf.py | 2 +- lib/tests/info_trace_mode.py | 2 +- lib/tests/info_unhandled_error.py | 2 +- version.py | 2 +- 15 files changed, 41 insertions(+), 43 deletions(-) diff --git a/graphql-cop.py b/graphql-cop.py index affcc4f..9711219 100644 --- a/graphql-cop.py +++ b/graphql-cop.py @@ -22,7 +22,6 @@ from lib.tests.info_unhandled_error import unhandled_error_detection from lib.utils import is_graphql, draw_art - parser = OptionParser(usage='%prog -t http://example.com -o json') parser.add_option('-t', '--target', dest='url', help='target url with the path') parser.add_option('-H', '--header', dest='header', action='append', help='Append Header(s) to the request \'{"Authorization": "Bearer eyjt"}\' - Use multiple -H for multiple Headers') @@ -33,6 +32,7 @@ parser.add_option('--version', '-v', dest='version', action='store_true', default=False, help='Print out the current version and exit.') + options, args = parser.parse_args() if options.version: @@ -61,14 +61,20 @@ print("Cannot cast %s into header dictionary. Ensure the format \'{\"key\": \"value\"}\'."%(options.header)) if not urlparse(options.url).scheme: - print("URL missing scheme (http:// or https://). Ensure ULR contains some scheme.") + print("URL missing scheme (http:// or https://). Ensure URL contains some scheme.") sys.exit(1) else: url = options.url -if not is_graphql(url, proxy, HEADERS): - print(url, 'does not seem to be running GraphQL.') - sys.exit(1) +endpoints = ['/graphiql', '/playground', '/console', '/graphql'] +paths = [] +parsed = urlparse(url) + +if parsed.path and parsed.path != '/': + paths.append(url) +else: + for endpoint in endpoints: + paths.append(parsed.scheme + '://' + parsed.netloc + endpoint) tests = [field_suggestions, introspection, detect_graphiql, get_method_support, alias_overloading, batch_query, @@ -78,14 +84,13 @@ json_output = [] -for test in tests: - json_output.append(test(url, proxy, HEADERS)) +for path in paths: + for test in tests: + json_output.append(test(path, proxy, HEADERS)) -if hasattr(detect_graphiql, 'GraphQLIDEpath'): - url = detect_graphiql.GraphQLIDEpath - json_output.append(trace_mode(url, proxy, HEADERS)) - json_output.append(unhandled_error_detection(url, proxy, HEADERS)) +json_output = sorted(json_output, key=lambda d: d['title']) +isgraphql=0 if options.format == 'json': for i in range(len(json_output)): print(json_output[i], end='\n\n') @@ -93,3 +98,7 @@ for i in json_output: if i['result']: print('[{}] {} - {} ({})'.format(i['severity'], i['title'], i['description'], i['impact'])) + isgraphql += 1 + +if isgraphql == 0 and options.format != 'json': + print(url, 'does not seem to be running GraphQL.') diff --git a/lib/tests/dos_alias_overloading.py b/lib/tests/dos_alias_overloading.py index c28aeb9..bff1067 100644 --- a/lib/tests/dos_alias_overloading.py +++ b/lib/tests/dos_alias_overloading.py @@ -8,7 +8,7 @@ def alias_overloading(url, proxy, headers): 'result':False, 'title':'Alias Overloading', 'description':'Alias Overloading with 100+ aliases is allowed', - 'impact':'Denial of Service', + 'impact':'Denial of Service - /' + url.rsplit('/', 1)[-1], 'severity':'HIGH', 'curl_verify':'' } diff --git a/lib/tests/dos_batch.py b/lib/tests/dos_batch.py index 938e4ed..15d5916 100644 --- a/lib/tests/dos_batch.py +++ b/lib/tests/dos_batch.py @@ -8,7 +8,7 @@ def batch_query(url, proxy, headers): 'result':False, 'title':'Array-based Query Batching', 'description':'Batch queries allowed with 10+ simultaneous queries', - 'impact':'Denial of Service', + 'impact':'Denial of Service - /' + url.rsplit('/', 1)[-1], 'severity':'HIGH', 'curl_verify':'' } diff --git a/lib/tests/dos_circular_introspection.py b/lib/tests/dos_circular_introspection.py index c935c76..2e1f9c9 100644 --- a/lib/tests/dos_circular_introspection.py +++ b/lib/tests/dos_circular_introspection.py @@ -7,7 +7,7 @@ def circular_query_introspection(url, proxy, headers): 'result':False, 'title':'Introspection-based Circular Query', 'description':'Circular-query using Introspection', - 'impact':'Denial of Service', + 'impact':'Denial of Service - /' + url.rsplit('/', 1)[-1], 'severity':'HIGH', 'curl_verify':'' } diff --git a/lib/tests/dos_directive_overloading.py b/lib/tests/dos_directive_overloading.py index 1a5ac75..4115d2b 100644 --- a/lib/tests/dos_directive_overloading.py +++ b/lib/tests/dos_directive_overloading.py @@ -8,7 +8,7 @@ def directive_overloading(url, proxy, headers): 'result':False, 'title':'Directive Overloading', 'description':'Multiple duplicated directives allowed in a query', - 'impact':'Denial of Service', + 'impact':'Denial of Service - /' + url.rsplit('/', 1)[-1], 'severity':'HIGH', 'curl_verify':'' } diff --git a/lib/tests/dos_field_duplication.py b/lib/tests/dos_field_duplication.py index 7a12bae..bb07187 100644 --- a/lib/tests/dos_field_duplication.py +++ b/lib/tests/dos_field_duplication.py @@ -8,7 +8,7 @@ def field_duplication(url, proxy, headers): 'result':False, 'title':'Field Duplication', 'description':'Queries are allowed with 500 of the same repeated field', - 'impact':'Denial of Service', + 'impact':'Denial of Service - /' + url.rsplit('/', 1)[-1], 'severity':'HIGH', 'curl_verify':'' } diff --git a/lib/tests/info_field_suggestions.py b/lib/tests/info_field_suggestions.py index 27b50b0..87581a4 100644 --- a/lib/tests/info_field_suggestions.py +++ b/lib/tests/info_field_suggestions.py @@ -8,7 +8,7 @@ def field_suggestions(url, proxy, headers): 'result':False, 'title':'Field Suggestions', 'description':'Field Suggestions are Enabled', - 'impact':'Information Leakage', + 'impact':'Information Leakage - /' + url.rsplit('/', 1)[-1], 'severity':'LOW', 'curl_verify':'' } diff --git a/lib/tests/info_get_based_mutation.py b/lib/tests/info_get_based_mutation.py index b74d0ad..2b92a44 100644 --- a/lib/tests/info_get_based_mutation.py +++ b/lib/tests/info_get_based_mutation.py @@ -7,7 +7,7 @@ def get_based_mutation(url, proxies, headers): 'result':False, 'title':'Mutation is allowed over GET (possible CSRF)', 'description':'GraphQL mutations allowed using the GET method', - 'impact':'Possible Cross Site Request Forgery', + 'impact':'Possible Cross Site Request Forgery - /' + url.rsplit('/', 1)[-1], 'severity':'MEDIUM', 'curl_verify':'' } diff --git a/lib/tests/info_get_method_support.py b/lib/tests/info_get_method_support.py index 146255c..975e159 100644 --- a/lib/tests/info_get_method_support.py +++ b/lib/tests/info_get_method_support.py @@ -8,7 +8,7 @@ def get_method_support(url, proxies, headers): 'result':False, 'title':'GET Method Query Support', 'description':'GraphQL queries allowed using the GET method', - 'impact':'Possible Cross Site Request Forgery (CSRF)', + 'impact':'Possible Cross Site Request Forgery (CSRF) - /' + url.rsplit('/', 1)[-1], 'severity':'MEDIUM', 'curl_verify':'' } diff --git a/lib/tests/info_graphiql.py b/lib/tests/info_graphiql.py index 8e43b0d..11b218c 100644 --- a/lib/tests/info_graphiql.py +++ b/lib/tests/info_graphiql.py @@ -1,5 +1,4 @@ """Collect GraphiQL details.""" -from urllib.parse import urlparse from lib.utils import request, curlify def detect_graphiql(url, proxy, headers): @@ -8,34 +7,24 @@ def detect_graphiql(url, proxy, headers): 'result':False, 'title':'GraphQL IDE', 'description':'GraphiQL Explorer/Playground Enabled', - 'impact':'Information Leakage', + 'impact':'Information Leakage - /' + url.rsplit('/', 1)[-1], 'severity':'LOW', 'curl_verify':'' } heuristics = ('graphiql.min.css', 'GraphQL Playground', 'GraphiQL', 'graphql-playground') - endpoints = ['graphiql', 'playground', 'console', 'graphql'] - parsed = urlparse(url) if "Accept" in headers.keys(): backup_accept_header=headers["Accept"] headers["Accept"]= "text/html" - truepath = "" - pathlist = parsed.path.split('/') - for p in range(len(pathlist)): - truepath += pathlist[p] + '/' - url = '{}://{}{}'.format(parsed.scheme, parsed.netloc, truepath) - for endpoint in endpoints: - response = request(url + endpoint, proxies=proxy, headers=headers) - res['curl_verify'] = curlify(response) - try: - if response and any(word in response.text for word in heuristics): - detect_graphiql.GraphQLIDEpath = url + endpoint - res['result'] = True - break - except: - pass + response = request(url, proxies=proxy, headers=headers) + res['curl_verify'] = curlify(response) + try: + if response and any(word in response.text for word in heuristics): + res['result'] = True + except: + pass del headers["Accept"] if 'backup_accept_header' in locals(): diff --git a/lib/tests/info_introspect.py b/lib/tests/info_introspect.py index eb7e9e8..7667bbc 100644 --- a/lib/tests/info_introspect.py +++ b/lib/tests/info_introspect.py @@ -8,7 +8,7 @@ def introspection(url, proxy, headers): 'result':False, 'title':'Introspection', 'description':'Introspection Query Enabled', - 'impact':'Information Leakage', + 'impact':'Information Leakage - /' + url.rsplit('/', 1)[-1], 'severity':'HIGH', 'curl_verify':'' } diff --git a/lib/tests/info_post_based_csrf.py b/lib/tests/info_post_based_csrf.py index d36dc58..6d15478 100644 --- a/lib/tests/info_post_based_csrf.py +++ b/lib/tests/info_post_based_csrf.py @@ -7,7 +7,7 @@ def post_based_csrf(url, proxies, headers): 'result':False, 'title':'POST based url-encoded query (possible CSRF)', 'description':'GraphQL accepts non-JSON queries over POST', - 'impact':'Possible Cross Site Request Forgery', + 'impact':'Possible Cross Site Request Forgery - /' + url.rsplit('/', 1)[-1], 'severity':'MEDIUM', 'curl_verify':'' } diff --git a/lib/tests/info_trace_mode.py b/lib/tests/info_trace_mode.py index e28a6eb..21493fc 100644 --- a/lib/tests/info_trace_mode.py +++ b/lib/tests/info_trace_mode.py @@ -8,7 +8,7 @@ def trace_mode(url, proxy, headers): 'result':False, 'title':'Trace Mode', 'description':'Tracing is Enabled', - 'impact':'Information Leakage - ' + url, + 'impact':'Information Leakage - /' + url.rsplit('/', 1)[-1], 'severity':'INFO', 'curl_verify':'' } diff --git a/lib/tests/info_unhandled_error.py b/lib/tests/info_unhandled_error.py index 05c47b9..35c625c 100644 --- a/lib/tests/info_unhandled_error.py +++ b/lib/tests/info_unhandled_error.py @@ -8,7 +8,7 @@ def unhandled_error_detection(url, proxy, headers): 'result':False, 'title':'Unhandled Errors Detection', 'description':'Exception errors are not handled', - 'impact':'Information Leakage - ' + url, + 'impact':'Information Leakage - /' + url.rsplit('/', 1)[-1], 'severity':'INFO', 'curl_verify':'' } diff --git a/version.py b/version.py index 0dfe791..781f106 100644 --- a/version.py +++ b/version.py @@ -1,2 +1,2 @@ """Version details of graphql-cop.""" -VERSION = '1.99' +VERSION = '1.10' From 61f38b29c6845d9b1e8d59cfc443723eff787b37 Mon Sep 17 00:00:00 2001 From: halfluke Date: Fri, 2 Sep 2022 23:55:42 +0100 Subject: [PATCH 4/6] minor fix --- graphql-cop.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/graphql-cop.py b/graphql-cop.py index 9711219..2ab5311 100644 --- a/graphql-cop.py +++ b/graphql-cop.py @@ -94,11 +94,13 @@ if options.format == 'json': for i in range(len(json_output)): print(json_output[i], end='\n\n') + if json_output[i]['result']: + isgraphql += 1 else: for i in json_output: if i['result']: print('[{}] {} - {} ({})'.format(i['severity'], i['title'], i['description'], i['impact'])) isgraphql += 1 -if isgraphql == 0 and options.format != 'json': +if isgraphql == 0: print(url, 'does not seem to be running GraphQL.') From fb93e25de3f85794fbb86c438b44ab07d47f0e2d Mon Sep 17 00:00:00 2001 From: halfluke Date: Sat, 3 Sep 2022 05:54:47 +0100 Subject: [PATCH 5/6] options description --- graphql-cop.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/graphql-cop.py b/graphql-cop.py index 2ab5311..b7387d1 100644 --- a/graphql-cop.py +++ b/graphql-cop.py @@ -23,8 +23,8 @@ from lib.utils import is_graphql, draw_art parser = OptionParser(usage='%prog -t http://example.com -o json') -parser.add_option('-t', '--target', dest='url', help='target url with the path') -parser.add_option('-H', '--header', dest='header', action='append', help='Append Header(s) to the request \'{"Authorization": "Bearer eyjt"}\' - Use multiple -H for multiple Headers') +parser.add_option('-t', '--target', dest='url', help='target url with the path - if a GraphQL path is not provided, GraphQL Cop will iterate through a series of common GraphQL paths') +parser.add_option('-H', '--header', dest='header', action='append', help='Append Header(s) to the request \'{"Authorization": "Bearer eyjt"}\' - Use multiple -H for additional Headers') parser.add_option('-o', '--output', dest='format', help='json', default=False) parser.add_option('--proxy', '-x', dest='proxy', action='store_true', default=False, From 1ecf45bdef9e0dfac050d0645c370ecad2085f43 Mon Sep 17 00:00:00 2001 From: halfluke Date: Sat, 3 Sep 2022 18:24:06 +0100 Subject: [PATCH 6/6] better handling of isgraphql() --- graphql-cop.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/graphql-cop.py b/graphql-cop.py index b7387d1..7cd0c36 100644 --- a/graphql-cop.py +++ b/graphql-cop.py @@ -85,22 +85,18 @@ json_output = [] for path in paths: + if not is_graphql(path, proxy, HEADERS): + print(path, 'does not seem to be running GraphQL.') + continue for test in tests: json_output.append(test(path, proxy, HEADERS)) json_output = sorted(json_output, key=lambda d: d['title']) -isgraphql=0 if options.format == 'json': for i in range(len(json_output)): print(json_output[i], end='\n\n') - if json_output[i]['result']: - isgraphql += 1 else: for i in json_output: if i['result']: print('[{}] {} - {} ({})'.format(i['severity'], i['title'], i['description'], i['impact'])) - isgraphql += 1 - -if isgraphql == 0: - print(url, 'does not seem to be running GraphQL.')