Skip to content

Commit

Permalink
Moved the fallback logic for determining GraphQL spec version to GQLS…
Browse files Browse the repository at this point in the history
…pection library
  • Loading branch information
execveat committed Apr 26, 2023
1 parent 17529b9 commit d7dfdd3
Show file tree
Hide file tree
Showing 5 changed files with 135 additions and 131 deletions.
2 changes: 1 addition & 1 deletion lib/GQLSpection
4 changes: 3 additions & 1 deletion python/inql/config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@
# Valid levels: DEBUG, INFO (called 'verbose' in UI), WARN (called 'normal' in UI)
'logging.level': 'WARN',
# The depth of the auto-generated GraphQL requests
'codegen.depth': 4
'codegen.depth': 2,
# The padding of the auto-generated GraphQL requests
'codegen.pad': 2
}

# Note that Config only supports strings, integers and bools.
Expand Down
6 changes: 4 additions & 2 deletions python/inql/extender.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,10 @@ def __init__(self, burp_callbacks, upstream_montoya):
sys.stderr = callbacks.getStderr()

# FIXME: Remove this once this is exposed through Settings UI
config.set('logging.level', 'INFO', scope='global')
config.set('codegen.depth', 6, scope='project')
config.set('logging.level', 'DEBUG', scope='global')

config.delete('codegen.depth', 'global')
config.delete('codegen.pad', 'global')

set_log_level(log, config.get('logging.level'))
set_log_level(gql_log, config.get('logging.level'))
Expand Down
149 changes: 56 additions & 93 deletions python/inql/scanner/introspection.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
# coding: utf-8
import json
import os
from collections import OrderedDict
from datetime import datetime
from urlparse import urlparse

from gqlspection import GQLQuery, GQLSchema
from gqlspection.introspection_query import get_introspection_query
from gqlspection import GQLSchema
from gqlspection.utils import query_introspection

from ..config import config
from ..globals import app
from ..logger import log
from ..utils.decorators import threaded
from ..utils.graphql import is_valid_graphql_name
from ..utils.http import request_template, send_request
from ..utils.http import Request
from ..utils.ui import visual_error


Expand All @@ -22,68 +23,36 @@
# 3. urlencoded POST
# 4. form-data POST
def _normalize_headers(host, explicit_headers):
"""Make sure headers contain valid host and content type."""
"""Make sure headers contain valid host and content type.
headers = []
If no content type is provided, default to application/json.
Explicit headers should be a dict. _normalize_headers will return a dict as well.
"""
explicit_headers = explicit_headers or {}

headers = OrderedDict()

# Host header is required, and must be the first header
if 'Host' in explicit_headers:
headers['Host'] = explicit_headers['Host']
del explicit_headers['Host']
else:
headers['Host'] = host

content_type_present, host_header_present = False, False
for k, v in (explicit_headers or []):
headers.append((k, v))
content_type_present = False
for k, v in explicit_headers:
headers[k] = v

if (k.lower() == 'content-type' and
v.lower() in ('application/json', 'application/graphql')):
content_type_present = True
elif k.lower() == 'host':
host_header_present = True

if not content_type_present:
headers.append(('Content-Type', 'application/json'))
headers['Content-Type'] = 'application/json'

if not host_header_present:
headers = [('Host', host)] + headers
return headers


def query_introspection(url, headers=None):
"""
Send introspection query (through Burp facilities) and get the GraphQL schema.
"""
log.debug("Introspection query about to be sent")
for version in ('draft', 'oct2021', 'jun2018'):
# Iterate through all introspection query versions, starting from the most recent one
log.debug("Will try to get introspection query using '%s' version from '%s'.", version, url)

# Get the introspection query
body = '{{"query":"{}"}}'.format(get_introspection_query(version=version))
log.debug("acquired introspection query body")

# Send HTTP request through Burp facilities
response = send_request(url, headers=headers, method='POST', body=body)
log.debug("sent the request and got the response")

try:
schema = json.loads(response)
log.debug("successfully parsed JSON")
except Exception:
# TODO: Doesn't this mean it's not a GraphQL endpoint? Maybe early return?
log.error("Could not parse introspection query for the url '%s' (version: %s).", url, version)
continue

if 'errors' in schema:
for msg in schema['errors']:
log.debug("Received an error from %s (version: %s): %s", url, version, msg)
continue

# Got successful introspection response!
log.info("Found the introspection response with '%s' version schema.", version)
log.debug("The received introspection schema: %s", schema)
return schema

# None of the introspection queries were successful
log.error("Introspection seems disabled for this endpoint: '%s'.", url)
raise Exception("Introspection seems disabled for this endpoint: '%s'." % url)


@threaded
def analyze(url, filename=None, headers=None):
"""
Expand Down Expand Up @@ -118,10 +87,18 @@ def _analyze(url, filename=None, explicit_headers=None):
# TODO: Doesn't this mean it's not a GraphQL endpoint? Maybe early return?
log.error("Could not parse introspection schema from the file '%s' (exception: %s)", filename, str(e))
raise Exception("Could not parse introspection schema, make sure it's valid JSON GraphQL schema.")
# Build the request template by initializing query_introspection with a mocked request
try:
request = Request(mock=True)
query_introspection(url, headers, request_fn=request)
except:
# Expected to fail, always
pass
else:
log.debug("GraphQL schema wil be queried from the server.")
try:
schema = query_introspection(url, headers)
request = Request()
schema = query_introspection(url, headers, request_fn=request)
except Exception as e:
# TODO: show some visual feedback here as well
log.error("No JSON schema provided and server '%s' did not return results for the introspection query (exception: %s).", host, e)
Expand All @@ -142,12 +119,10 @@ def _analyze(url, filename=None, explicit_headers=None):
log.warning("Failed to create a new directory for the reports '%s' - as it already exists")
log.debug("Created the directory structure for the '%s'", url)

# Dump request template
template = request_template(url, method='POST', headers=headers)
with open(os.path.join(report_dir, "request_template.txt"), "wb") as f:
log.debug("Dumping the request template.")
f.write(url + '\n')
f.write(template.toString())
f.write(request.template)

# Dump JSON schema
with open(os.path.join(report_dir, "schema.json"), "w") as schema_file:
Expand All @@ -161,64 +136,52 @@ def _analyze(url, filename=None, explicit_headers=None):
log.error("Could not parse the received GraphQL schema.")
raise Exception("Could not parse the received GraphQL schema. Validate dumped JSON manually and file a bug report if it seems correct.")

# Write queries
# Write query files
log.debug("Writing queries for the url: '%s'.", url)
try:
queries = [
GQLQuery(parsed_schema.query, 'query', name=field.name, fields=[field],
depth=config.get('codegen.depth'))
for field in parsed_schema.query.fields if field.name
]
except:
raise Exception("Failed to parse queries.")
for query in parsed_schema.query.fields:
if not query.name:
log.error("Query without a name detected.")
continue

for query in queries:
if not is_valid_graphql_name(query.name):
# TODO: this does not warrant a popup, but it would be nice to show some kind of indication anyway
log.error("Query with invalid GraphQL name detected: '%s'.", query.name)
continue

log.debug("Writing query '%s'.", query.name + '.graphql' + ' to ' + os.getcwd())
filename = os.path.join(
queries_dir,
"{}.graphql".format(query.name)
)

try:
parsed = query.to_string(pad=4)
except:
raise Exception("Failed to parse query '%s'!" % query.name)

log.debug("Writing query " + query.name + '.graphql to ' + filename)
with open(filename, "w") as query_file:
query_file.write(parsed)
query_file.write(
parsed_schema.generate_query(query, depth=config.get('codegen.depth'))
.to_string(pad=config.get('codegen.pad')))
log.debug("Wrote query '%s'.", query.name + '.graphql')

# Write mutations, if any
if parsed_schema.mutation is None:
log.debug("No mutations found for the url: '%s'.", url)
return

# Write mutations
log.debug("Writing mutations for the url: '%s'.", url)
try:
mutations = [
GQLQuery(parsed_schema.mutation, 'mutation', name=field.name, fields=[field],
depth=config.get('codegen.depth'))
for field in parsed_schema.query.fields if field.name
]
except:
raise Exception("Failed to parse mutations.")
for mutation in parsed_schema.mutation.fields:
if not mutation.name:
log.error("Mutation without a name detected.")
continue

for mutation in mutations:
if not is_valid_graphql_name(mutation.name):
# TODO: this does not warrant a popup, but it would be nice to show some kind of indication anyway
log.error("Mutation with invalid GraphQL name detected: '%s'.", mutation.name)
continue

log.debug("Writing mutation '%s'.", mutation.name + '.graphql')
filename = os.path.join(
mutations_dir,
"{}.graphql".format(mutation.name)
)

try:
parsed = mutation.to_string(pad=4)
except:
raise Exception("Failed to parse mutation: '%s'!" % mutation.name)

log.debug("Writing mutation " + mutation.name + '.graphql to ' + filename)
with open(filename, "w") as mutation_file:
mutation_file.write(parsed)
mutation_file.write(
parsed_schema.generate_mutation(mutation, depth=config.get('codegen.depth'))
.to_string(pad=config.get('codegen.pad')))
log.debug("Wrote mutation '%s'.", mutation.name + '.graphql')
105 changes: 71 additions & 34 deletions python/inql/utils/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,37 +8,74 @@
from ..logger import log


def request_template(url, method, headers):
"""Generate request template - the HTTP message sans body"""
# Create basic HTTP request from the URL (don't add any headers at this point)
http_service = httpService(url)
path = urlparse(url).path
req = httpRequest().withService(http_service).withMethod(method).withPath(path)

if headers:
# Set the provided headers
log.debug("Custom headers provided: %s of them", len(headers))
for k, v in headers:
req = req.withAddedHeader(k, v)
log.debug("Successfully added all headers")

log.debug("Created the request template")
return req


def send_request(url, headers=None, method='GET', body=None):
log.debug("send_request(url: %s, headers: %s, method: %s, body:%s)", url, headers, method, body)
req = request_template(url, method, headers)
log.debug("acquired request_template")

# Finally, add the body
req = req.withBody(body)
log.debug("Request that I'm sending to %s: %s", url, req.toString())

# Send the request through Burp
response = montoya.http().sendRequest(req).response()
log.debug("Sent the request through Burp")

result = response.bodyToString()
log.debug("Response that I received from %s: %s", url, result)
return result
class Request(object):
def __init__(self, mock=False):
self.mock = mock
self.template = None

def __call__(self, method, url, data=None, headers=None, cookies=None):
"""Mimic the requests library's 'request' function, but use Burp's HTTP API instead of the requests library."""

http_service = httpService(url)
path = urlparse(url).path
request = httpRequest().withService(http_service).withMethod(method).withPath(path)

# Set headers
headers = headers or {}
for header, value in headers.items():
request = request.withAddedHeader(header, value)

# Set cookies
cookies = cookies or {}
cookie_string = "; ".join(["{}={}".format(name, value) for name, value in cookies.items()])
if cookie_string:
request = request.withAddedHeader("Cookie", cookie_string)

# Save the template of the request (this gets dropped as a file in the Scanner's fileview)
self.template = request.toString()
log.debug("The request template is: {0}".format(self.template))

# If we're mocking, just return the template and raise an Exception
if self.mock:
log.debug("Mocking the request, so we're not actually gonna send it.")
raise Exception("Not actually gonna send a real request, we were just building the template.")
log.debug("Not mocking, so we're gonna send the request.")

# Set request body
if data is not None:
request = request.withBody(data)

# Send request and get response
response = montoya.http().sendRequest(request).response()

# Parse response
response_headers = {}
for header in response.headers():
response_headers[header.name()] = header.value()
response_cookies = parse_cookies(response_headers.get("Set-Cookie", ""))
response_body = response.bodyToString()
log.debug("The response is: {0}".format(response_body))

return Response(response.statusCode(), response_headers, response_body, response_cookies)


class Response(object):
"""Mimic the requests library's Response object."""
def __init__(self, status_code, headers, text, cookies):
self.status_code = status_code
self.headers = headers
self.text = text
self.cookies = cookies

def json(self):
# This implementation assumes that the response body is JSON
import json
return json.loads(self.text)

def parse_cookies(cookie_string):
cookies = {}
if cookie_string:
for cookie in cookie_string.split(";"):
name, value = cookie.strip().split("=")
cookies[name] = value
return cookies

0 comments on commit d7dfdd3

Please sign in to comment.