mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-25 11:03:47 +03:00
Support for chunked requests (#3536)
* Add the `--chunk` option to send requests in chunks * solve the httplib&urllib2 content-legnth * remove info * Solve the error caused by the mix of get mode and chunk * add CHUNKED_KEYWORDS `union`
This commit is contained in:
parent
3b3774abaa
commit
340e250fb1
|
@ -98,7 +98,7 @@ from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.exception import SqlmapValueException
|
from lib.core.exception import SqlmapValueException
|
||||||
from lib.core.log import LOGGER_HANDLER
|
from lib.core.log import LOGGER_HANDLER
|
||||||
from lib.core.optiondict import optDict
|
from lib.core.optiondict import optDict
|
||||||
from lib.core.settings import BANNER
|
from lib.core.settings import BANNER, CHUNKED_KEYWORDS
|
||||||
from lib.core.settings import BOLD_PATTERNS
|
from lib.core.settings import BOLD_PATTERNS
|
||||||
from lib.core.settings import BOUNDED_INJECTION_MARKER
|
from lib.core.settings import BOUNDED_INJECTION_MARKER
|
||||||
from lib.core.settings import BRUTE_DOC_ROOT_PREFIXES
|
from lib.core.settings import BRUTE_DOC_ROOT_PREFIXES
|
||||||
|
@ -4895,3 +4895,50 @@ def firstNotNone(*args):
|
||||||
break
|
break
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
def generateChunkDdata(data):
|
||||||
|
"""
|
||||||
|
Convert post data to chunked format data. If the keyword is in a block, the keyword will be cut.
|
||||||
|
|
||||||
|
>>> generateChunkDdata('select 1,2,3,4 from admin')
|
||||||
|
4;AZdYz
|
||||||
|
sele
|
||||||
|
2;fJS4D
|
||||||
|
ct
|
||||||
|
5;qbCOT
|
||||||
|
1,2,
|
||||||
|
7;KItpi
|
||||||
|
3,4 fro
|
||||||
|
2;pFu1R
|
||||||
|
m
|
||||||
|
5;uRoYZ
|
||||||
|
admin
|
||||||
|
0
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
dl = len(data)
|
||||||
|
ret = ""
|
||||||
|
keywords = CHUNKED_KEYWORDS
|
||||||
|
index = 0
|
||||||
|
while index < dl:
|
||||||
|
chunk_size = random.randint(1, 9)
|
||||||
|
if index + chunk_size >= dl:
|
||||||
|
chunk_size = dl - index
|
||||||
|
salt = ''.join(random.sample(string.ascii_letters + string.digits, 5))
|
||||||
|
while 1:
|
||||||
|
tmp_chunk = data[index:index + chunk_size]
|
||||||
|
tmp_bool = True
|
||||||
|
for k in keywords:
|
||||||
|
if k in tmp_chunk:
|
||||||
|
chunk_size -= 1
|
||||||
|
tmp_bool = False
|
||||||
|
break
|
||||||
|
if tmp_bool:
|
||||||
|
break
|
||||||
|
index += chunk_size
|
||||||
|
ret += "%s;%s\r\n" % (hex(chunk_size)[2:], salt)
|
||||||
|
ret += "%s\r\n" % tmp_chunk
|
||||||
|
|
||||||
|
ret += "0\r\n\r\n"
|
||||||
|
return ret
|
||||||
|
|
|
@ -7,6 +7,7 @@ See the file 'LICENSE' for copying permission
|
||||||
|
|
||||||
import cookielib
|
import cookielib
|
||||||
import glob
|
import glob
|
||||||
|
import httplib
|
||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
@ -139,6 +140,7 @@ from lib.request.basic import checkCharEncoding
|
||||||
from lib.request.connect import Connect as Request
|
from lib.request.connect import Connect as Request
|
||||||
from lib.request.dns import DNSServer
|
from lib.request.dns import DNSServer
|
||||||
from lib.request.basicauthhandler import SmartHTTPBasicAuthHandler
|
from lib.request.basicauthhandler import SmartHTTPBasicAuthHandler
|
||||||
|
from lib.request.httphandler import HTTPHandler
|
||||||
from lib.request.httpshandler import HTTPSHandler
|
from lib.request.httpshandler import HTTPSHandler
|
||||||
from lib.request.pkihandler import HTTPSPKIAuthHandler
|
from lib.request.pkihandler import HTTPSPKIAuthHandler
|
||||||
from lib.request.rangehandler import HTTPRangeHandler
|
from lib.request.rangehandler import HTTPRangeHandler
|
||||||
|
@ -156,6 +158,7 @@ from thirdparty.socks import socks
|
||||||
from xml.etree.ElementTree import ElementTree
|
from xml.etree.ElementTree import ElementTree
|
||||||
|
|
||||||
authHandler = urllib2.BaseHandler()
|
authHandler = urllib2.BaseHandler()
|
||||||
|
httpHandler = HTTPHandler()
|
||||||
httpsHandler = HTTPSHandler()
|
httpsHandler = HTTPSHandler()
|
||||||
keepAliveHandler = keepalive.HTTPHandler()
|
keepAliveHandler = keepalive.HTTPHandler()
|
||||||
proxyHandler = urllib2.ProxyHandler()
|
proxyHandler = urllib2.ProxyHandler()
|
||||||
|
@ -1106,7 +1109,7 @@ def _setHTTPHandlers():
|
||||||
debugMsg = "creating HTTP requests opener object"
|
debugMsg = "creating HTTP requests opener object"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
handlers = filter(None, [multipartPostHandler, proxyHandler if proxyHandler.proxies else None, authHandler, redirectHandler, rangeHandler, httpsHandler])
|
handlers = filter(None, [multipartPostHandler, proxyHandler if proxyHandler.proxies else None, authHandler, redirectHandler, rangeHandler, httpHandler, httpsHandler])
|
||||||
|
|
||||||
if not conf.dropSetCookie:
|
if not conf.dropSetCookie:
|
||||||
if not conf.loadCookies:
|
if not conf.loadCookies:
|
||||||
|
@ -2602,6 +2605,15 @@ def initOptions(inputOptions=AttribDict(), overrideOptions=False):
|
||||||
_setKnowledgeBaseAttributes()
|
_setKnowledgeBaseAttributes()
|
||||||
_mergeOptions(inputOptions, overrideOptions)
|
_mergeOptions(inputOptions, overrideOptions)
|
||||||
|
|
||||||
|
def _setHttpChunked():
|
||||||
|
conf.chunk = conf.chunk and conf.data
|
||||||
|
if conf.chunk:
|
||||||
|
def hook(self, a, b):
|
||||||
|
pass
|
||||||
|
|
||||||
|
httplib.HTTPConnection._set_content_length = hook
|
||||||
|
|
||||||
|
|
||||||
def init():
|
def init():
|
||||||
"""
|
"""
|
||||||
Set attributes into both configuration and knowledge base singletons
|
Set attributes into both configuration and knowledge base singletons
|
||||||
|
@ -2627,6 +2639,7 @@ def init():
|
||||||
_listTamperingFunctions()
|
_listTamperingFunctions()
|
||||||
_setTamperingFunctions()
|
_setTamperingFunctions()
|
||||||
_setPreprocessFunctions()
|
_setPreprocessFunctions()
|
||||||
|
_setHttpChunked()
|
||||||
_setWafFunctions()
|
_setWafFunctions()
|
||||||
_setTrafficOutputFP()
|
_setTrafficOutputFP()
|
||||||
_setupHTTPCollector()
|
_setupHTTPCollector()
|
||||||
|
|
|
@ -794,6 +794,9 @@ KB_CHARS_BOUNDARY_CHAR = 'q'
|
||||||
# Letters of lower frequency used in kb.chars
|
# Letters of lower frequency used in kb.chars
|
||||||
KB_CHARS_LOW_FREQUENCY_ALPHABET = "zqxjkvbp"
|
KB_CHARS_LOW_FREQUENCY_ALPHABET = "zqxjkvbp"
|
||||||
|
|
||||||
|
# Keywords that need to be cut in the chunked
|
||||||
|
CHUNKED_KEYWORDS = ['select', 'update', 'insert', 'from', 'load_file', 'sysdatabases', 'msysaccessobjects', 'msysqueries', 'sysmodules', 'information_schema', 'union']
|
||||||
|
|
||||||
# CSS style used in HTML dump format
|
# CSS style used in HTML dump format
|
||||||
HTML_DUMP_CSS_STYLE = """<style>
|
HTML_DUMP_CSS_STYLE = """<style>
|
||||||
table{
|
table{
|
||||||
|
|
|
@ -221,6 +221,8 @@ def cmdLineParser(argv=None):
|
||||||
request.add_option("--eval", dest="evalCode",
|
request.add_option("--eval", dest="evalCode",
|
||||||
help="Evaluate provided Python code before the request (e.g. \"import hashlib;id2=hashlib.md5(id).hexdigest()\")")
|
help="Evaluate provided Python code before the request (e.g. \"import hashlib;id2=hashlib.md5(id).hexdigest()\")")
|
||||||
|
|
||||||
|
request.add_option("--chunk", dest="chunk", action="store_true", help="all requests will be added headers with 'Transfer-Encoding: Chunked' and sent by transcoding")
|
||||||
|
|
||||||
# Optimization options
|
# Optimization options
|
||||||
optimization = OptionGroup(parser, "Optimization", "These options can be used to optimize the performance of sqlmap")
|
optimization = OptionGroup(parser, "Optimization", "These options can be used to optimize the performance of sqlmap")
|
||||||
|
|
||||||
|
|
|
@ -61,6 +61,7 @@ from lib.core.common import unicodeencode
|
||||||
from lib.core.common import unsafeVariableNaming
|
from lib.core.common import unsafeVariableNaming
|
||||||
from lib.core.common import urldecode
|
from lib.core.common import urldecode
|
||||||
from lib.core.common import urlencode
|
from lib.core.common import urlencode
|
||||||
|
from lib.core.common import generateChunkDdata
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
@ -271,9 +272,13 @@ class Connect(object):
|
||||||
checking = kwargs.get("checking", False)
|
checking = kwargs.get("checking", False)
|
||||||
skipRead = kwargs.get("skipRead", False)
|
skipRead = kwargs.get("skipRead", False)
|
||||||
finalCode = kwargs.get("finalCode", False)
|
finalCode = kwargs.get("finalCode", False)
|
||||||
|
chunked = conf.chunk
|
||||||
|
|
||||||
if multipart:
|
if multipart:
|
||||||
post = multipart
|
post = multipart
|
||||||
|
if chunked:
|
||||||
|
post = urllib.unquote(post)
|
||||||
|
post = generateChunkDdata(post)
|
||||||
|
|
||||||
websocket_ = url.lower().startswith("ws")
|
websocket_ = url.lower().startswith("ws")
|
||||||
|
|
||||||
|
@ -397,6 +402,9 @@ class Connect(object):
|
||||||
if conf.keepAlive:
|
if conf.keepAlive:
|
||||||
headers[HTTP_HEADER.CONNECTION] = "keep-alive"
|
headers[HTTP_HEADER.CONNECTION] = "keep-alive"
|
||||||
|
|
||||||
|
if chunked:
|
||||||
|
headers[HTTP_HEADER.TRANSFER_ENCODING] = "Chunked"
|
||||||
|
|
||||||
if auxHeaders:
|
if auxHeaders:
|
||||||
headers = forgeHeaders(auxHeaders, headers)
|
headers = forgeHeaders(auxHeaders, headers)
|
||||||
|
|
||||||
|
@ -455,7 +463,7 @@ class Connect(object):
|
||||||
requestHeaders += "\r\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))
|
requestHeaders += "\r\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))
|
||||||
|
|
||||||
if post is not None:
|
if post is not None:
|
||||||
if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH):
|
if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH) and not chunked:
|
||||||
requestHeaders += "\r\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
|
requestHeaders += "\r\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
|
||||||
|
|
||||||
if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
|
if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
|
||||||
|
@ -466,6 +474,7 @@ class Connect(object):
|
||||||
if post is not None:
|
if post is not None:
|
||||||
requestMsg += "\r\n\r\n%s" % getUnicode(post)
|
requestMsg += "\r\n\r\n%s" % getUnicode(post)
|
||||||
|
|
||||||
|
if not chunked:
|
||||||
requestMsg += "\r\n"
|
requestMsg += "\r\n"
|
||||||
|
|
||||||
if not multipart:
|
if not multipart:
|
||||||
|
|
46
lib/request/httphandler.py
Normal file
46
lib/request/httphandler.py
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
|
See the file 'LICENSE' for copying permission
|
||||||
|
"""
|
||||||
|
|
||||||
|
import urllib2
|
||||||
|
import httplib
|
||||||
|
from lib.core.data import conf
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPHandler(urllib2.HTTPHandler):
|
||||||
|
"""
|
||||||
|
The hook http_requests function ensures that the chunk function is working properly.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _hook(self, request):
|
||||||
|
host = request.get_host()
|
||||||
|
if not host:
|
||||||
|
raise urllib2.URLError('no host given')
|
||||||
|
|
||||||
|
if request.has_data(): # POST
|
||||||
|
data = request.get_data()
|
||||||
|
if not request.has_header('Content-type'):
|
||||||
|
request.add_unredirected_header(
|
||||||
|
'Content-type',
|
||||||
|
'application/x-www-form-urlencoded')
|
||||||
|
if not request.has_header('Content-length') and not conf.chunk:
|
||||||
|
request.add_unredirected_header(
|
||||||
|
'Content-length', '%d' % len(data))
|
||||||
|
|
||||||
|
sel_host = host
|
||||||
|
if request.has_proxy():
|
||||||
|
scheme, sel = urllib2.splittype(request.get_selector())
|
||||||
|
sel_host, sel_path = urllib2.splithost(sel)
|
||||||
|
|
||||||
|
if not request.has_header('Host'):
|
||||||
|
request.add_unredirected_header('Host', sel_host)
|
||||||
|
for name, value in self.parent.addheaders:
|
||||||
|
name = name.capitalize()
|
||||||
|
if not request.has_header(name):
|
||||||
|
request.add_unredirected_header(name, value)
|
||||||
|
return request
|
||||||
|
|
||||||
|
http_request = _hook
|
Loading…
Reference in New Issue
Block a user