2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-14 18:41:14 +04:00
|
|
|
Copyright (c) 2006-2010 sqlmap developers (http://sqlmap.sourceforge.net/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2010-05-24 15:00:49 +04:00
|
|
|
import codecs
|
2008-10-15 19:38:22 +04:00
|
|
|
import os
|
2010-06-30 01:07:23 +04:00
|
|
|
import re
|
2008-10-15 19:38:22 +04:00
|
|
|
import time
|
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
from extra.clientform.clientform import ParseResponse
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.common import dataToSessionFile
|
|
|
|
from lib.core.common import paramToDict
|
2010-10-10 22:56:43 +04:00
|
|
|
from lib.core.common import readInput
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
2009-09-26 03:03:45 +04:00
|
|
|
from lib.core.data import logger
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import paths
|
|
|
|
from lib.core.dump import dumper
|
2010-11-08 12:44:32 +03:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
|
|
|
from lib.core.enums import PLACE
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapFilePathException
|
|
|
|
from lib.core.exception import sqlmapGenericException
|
|
|
|
from lib.core.exception import sqlmapSyntaxException
|
|
|
|
from lib.core.session import resumeConfKb
|
2010-05-28 20:43:04 +04:00
|
|
|
from lib.core.xmldump import dumper as xmldumper
|
2010-10-10 22:56:43 +04:00
|
|
|
from lib.request.connect import Connect as Request
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def __setRequestParams():
|
|
|
|
"""
|
|
|
|
Check and set the parameters and perform checks on 'data' option for
|
|
|
|
HTTP method POST.
|
|
|
|
"""
|
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
if conf.direct:
|
|
|
|
conf.parameters[None] = "direct connection"
|
|
|
|
return
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
__testableParameters = False
|
|
|
|
|
|
|
|
# Perform checks on GET parameters
|
2010-11-08 12:44:32 +03:00
|
|
|
if conf.parameters.has_key(PLACE.GET) and conf.parameters[PLACE.GET]:
|
|
|
|
parameters = conf.parameters[PLACE.GET]
|
|
|
|
__paramDict = paramToDict(PLACE.GET, parameters)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if __paramDict:
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.paramDict[PLACE.GET] = __paramDict
|
2008-10-15 19:38:22 +04:00
|
|
|
__testableParameters = True
|
|
|
|
|
|
|
|
# Perform checks on POST parameters
|
2010-11-08 12:44:32 +03:00
|
|
|
if conf.method == HTTPMETHOD.POST and not conf.data:
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg = "HTTP POST method depends on HTTP data value to be posted"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
if conf.data:
|
2010-06-30 01:07:23 +04:00
|
|
|
conf.data = conf.data.replace("\n", " ")
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.parameters[PLACE.POST] = conf.data
|
2010-06-30 01:07:23 +04:00
|
|
|
|
|
|
|
# Check if POST data is in xml syntax
|
|
|
|
if re.match("[\n]*<(\?xml |soap\:|ns).*>", conf.data):
|
|
|
|
conf.paramDict["POSTxml"] = True
|
|
|
|
__paramDict = paramToDict("POSTxml", conf.data)
|
|
|
|
else:
|
2010-11-08 12:44:32 +03:00
|
|
|
__paramDict = paramToDict(PLACE.POST, conf.data)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if __paramDict:
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.paramDict[PLACE.POST] = __paramDict
|
2008-10-15 19:38:22 +04:00
|
|
|
__testableParameters = True
|
|
|
|
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.method = HTTPMETHOD.POST
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2010-09-24 13:19:14 +04:00
|
|
|
if "*" in conf.url:
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.parameters[PLACE.URI] = conf.url
|
|
|
|
conf.paramDict[PLACE.URI] = {}
|
2010-09-24 13:19:14 +04:00
|
|
|
parts = conf.url.split("*")
|
|
|
|
for i in range(len(parts)-1):
|
|
|
|
result = str()
|
|
|
|
for j in range(len(parts)):
|
|
|
|
result += parts[j]
|
|
|
|
if i == j:
|
|
|
|
result += "*"
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.paramDict[PLACE.URI]["#%d*" % (i+1)] = result
|
2010-09-24 13:19:14 +04:00
|
|
|
conf.url = conf.url.replace("*", str())
|
2010-09-22 15:56:35 +04:00
|
|
|
__testableParameters = True
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
# Perform checks on Cookie parameters
|
|
|
|
if conf.cookie:
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.parameters[PLACE.COOKIE] = conf.cookie
|
|
|
|
__paramDict = paramToDict(PLACE.COOKIE, conf.cookie)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if __paramDict:
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.paramDict[PLACE.COOKIE] = __paramDict
|
2008-10-15 19:38:22 +04:00
|
|
|
__testableParameters = True
|
|
|
|
|
|
|
|
# Perform checks on User-Agent header value
|
|
|
|
if conf.httpHeaders:
|
|
|
|
for httpHeader, headerValue in conf.httpHeaders:
|
|
|
|
if httpHeader == "User-Agent":
|
2010-01-02 05:02:12 +03:00
|
|
|
# No need for url encoding/decoding the user agent
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.parameters[PLACE.UA] = headerValue
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
condition = not conf.testParameter
|
|
|
|
condition |= "User-Agent" in conf.testParameter
|
|
|
|
condition |= "user-agent" in conf.testParameter
|
|
|
|
condition |= "useragent" in conf.testParameter
|
|
|
|
condition |= "ua" in conf.testParameter
|
|
|
|
|
|
|
|
if condition:
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.paramDict[PLACE.UA] = { PLACE.UA: headerValue }
|
2008-10-15 19:38:22 +04:00
|
|
|
__testableParameters = True
|
|
|
|
|
|
|
|
if not conf.parameters:
|
|
|
|
errMsg = "you did not provide any GET, POST and Cookie "
|
|
|
|
errMsg += "parameter, neither an User-Agent header"
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
|
|
|
|
elif not __testableParameters:
|
|
|
|
errMsg = "all testable parameters you provided are not present "
|
|
|
|
errMsg += "within the GET, POST and Cookie parameters"
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
|
2010-10-17 02:31:16 +04:00
|
|
|
def findPageForms():
|
2010-10-10 23:50:10 +04:00
|
|
|
infoMsg = "searching for forms"
|
|
|
|
logger.info(infoMsg)
|
2010-10-15 04:34:16 +04:00
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
response, _ = Request.queryPage(response=True)
|
|
|
|
forms = ParseResponse(response, backwards_compat=False)
|
2010-10-15 04:34:16 +04:00
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
count = 1
|
|
|
|
for form in forms:
|
|
|
|
request = form.click()
|
2010-10-15 04:34:16 +04:00
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
url = request.get_full_url()
|
|
|
|
method = request.get_method()
|
|
|
|
data = request.get_data() if request.has_data() else None
|
2010-10-15 04:34:16 +04:00
|
|
|
|
2010-10-10 23:50:10 +04:00
|
|
|
message = "(#%d) Do you want to test form '%s' (%s, %s%s) [Y/n] " % (count, form.name, method, url, ", %s" % repr(data) if data else "")
|
|
|
|
test = readInput(message, default="Y")
|
2010-10-15 04:34:16 +04:00
|
|
|
|
2010-10-10 23:50:10 +04:00
|
|
|
if not test or test[0] in ("y", "Y"):
|
2010-11-08 12:44:32 +03:00
|
|
|
if method == HTTPMETHOD.POST:
|
2010-10-10 23:50:10 +04:00
|
|
|
message = " Edit POST data [default: %s]: " % (data if data else "")
|
|
|
|
test = readInput(message, default=data)
|
2010-10-15 04:34:16 +04:00
|
|
|
|
2010-11-08 12:44:32 +03:00
|
|
|
elif method == HTTPMETHOD.GET:
|
2010-10-10 23:50:10 +04:00
|
|
|
if url.find("?") > -1:
|
|
|
|
firstPart = url[:url.find("?")]
|
|
|
|
secondPart = url[url.find("?")+1:]
|
|
|
|
message = " Edit GET data [default: %s]: " % secondPart
|
|
|
|
test = readInput(message, default=secondPart)
|
|
|
|
url = "%s?%s" % (firstPart, test)
|
2010-10-15 04:34:16 +04:00
|
|
|
|
2010-10-10 23:50:10 +04:00
|
|
|
kb.targetUrls.add((url, method, data, conf.cookie))
|
2010-10-15 04:34:16 +04:00
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
count +=1
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __setOutputResume():
|
|
|
|
"""
|
|
|
|
Check and set the output text file and the resume functionality.
|
|
|
|
"""
|
|
|
|
|
2009-09-26 03:03:45 +04:00
|
|
|
if not conf.sessionFile:
|
|
|
|
conf.sessionFile = "%s%ssession" % (conf.outputPath, os.sep)
|
|
|
|
|
|
|
|
logger.info("using '%s' as session file" % conf.sessionFile)
|
|
|
|
|
|
|
|
if os.path.exists(conf.sessionFile):
|
2010-03-04 16:01:18 +03:00
|
|
|
if not conf.flushSession:
|
2010-06-09 20:07:47 +04:00
|
|
|
readSessionFP = codecs.open(conf.sessionFile, "r", conf.dataEncoding, 'replace')
|
2010-05-11 17:36:30 +04:00
|
|
|
__url_cache = set()
|
|
|
|
__expression_cache = {}
|
2010-04-12 13:35:20 +04:00
|
|
|
|
2010-05-30 18:53:13 +04:00
|
|
|
for line in readSessionFP.readlines(): # xreadlines doesn't return unicode strings when codec.open() is used
|
2010-03-04 16:01:18 +03:00
|
|
|
if line.count("][") == 4:
|
|
|
|
line = line.split("][")
|
2010-04-12 13:35:20 +04:00
|
|
|
|
2010-03-04 16:01:18 +03:00
|
|
|
if len(line) != 5:
|
|
|
|
continue
|
2010-04-12 13:35:20 +04:00
|
|
|
|
2010-03-04 16:01:18 +03:00
|
|
|
url, _, _, expression, value = line
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2010-03-04 16:01:18 +03:00
|
|
|
if not value:
|
|
|
|
continue
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2010-03-04 16:01:18 +03:00
|
|
|
if url[0] == "[":
|
|
|
|
url = url[1:]
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2010-05-30 18:53:13 +04:00
|
|
|
value = value.rstrip('\r\n') # Strips both chars independently
|
2010-04-12 13:35:20 +04:00
|
|
|
|
|
|
|
if url not in ( conf.url, conf.hostname ):
|
2010-03-04 16:01:18 +03:00
|
|
|
continue
|
2010-04-12 13:35:20 +04:00
|
|
|
|
2010-05-11 17:36:30 +04:00
|
|
|
if url not in __url_cache:
|
2010-03-04 16:01:18 +03:00
|
|
|
kb.resumedQueries[url] = {}
|
|
|
|
kb.resumedQueries[url][expression] = value
|
2010-05-11 17:36:30 +04:00
|
|
|
__url_cache.add(url)
|
2010-05-11 17:55:30 +04:00
|
|
|
__expression_cache[url] = set(expression)
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2010-03-04 16:01:18 +03:00
|
|
|
resumeConfKb(expression, url, value)
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2010-05-11 17:36:30 +04:00
|
|
|
if expression not in __expression_cache[url]:
|
2010-03-04 16:01:18 +03:00
|
|
|
kb.resumedQueries[url][expression] = value
|
2010-05-11 17:36:30 +04:00
|
|
|
__expression_cache[url].add(value)
|
2010-03-04 16:01:18 +03:00
|
|
|
elif len(value) >= len(kb.resumedQueries[url][expression]):
|
|
|
|
kb.resumedQueries[url][expression] = value
|
2010-05-11 17:36:30 +04:00
|
|
|
|
2010-03-04 16:01:18 +03:00
|
|
|
readSessionFP.close()
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
os.remove(conf.sessionFile)
|
|
|
|
logger.info("flushing session file")
|
|
|
|
except OSError, msg:
|
|
|
|
errMsg = "unable to flush the session file (%s)" % msg
|
|
|
|
raise sqlmapFilePathException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-09-26 03:03:45 +04:00
|
|
|
try:
|
2010-05-24 15:12:40 +04:00
|
|
|
conf.sessionFP = codecs.open(conf.sessionFile, "a", conf.dataEncoding)
|
2009-09-26 03:03:45 +04:00
|
|
|
dataToSessionFile("\n[%s]\n" % time.strftime("%X %x"))
|
|
|
|
except IOError:
|
|
|
|
errMsg = "unable to write on the session file specified"
|
|
|
|
raise sqlmapFilePathException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def __createFilesDir():
|
|
|
|
"""
|
|
|
|
Create the file directory.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.rFile:
|
|
|
|
return
|
|
|
|
|
|
|
|
conf.filePath = paths.SQLMAP_FILES_PATH % conf.hostname
|
|
|
|
|
|
|
|
if not os.path.isdir(conf.filePath):
|
|
|
|
os.makedirs(conf.filePath, 0755)
|
|
|
|
|
|
|
|
def __createDumpDir():
|
|
|
|
"""
|
|
|
|
Create the dump directory.
|
|
|
|
"""
|
|
|
|
|
2010-06-02 15:01:41 +04:00
|
|
|
if not conf.dumpTable and not conf.dumpAll and not conf.search:
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
conf.dumpPath = paths.SQLMAP_DUMP_PATH % conf.hostname
|
|
|
|
|
|
|
|
if not os.path.isdir(conf.dumpPath):
|
|
|
|
os.makedirs(conf.dumpPath, 0755)
|
|
|
|
|
2010-05-28 20:43:04 +04:00
|
|
|
def __configureDumper():
|
|
|
|
if conf.xmlFile:
|
|
|
|
conf.dumper = xmldumper
|
|
|
|
else:
|
|
|
|
conf.dumper = dumper
|
|
|
|
|
|
|
|
conf.dumper.setOutputFile()
|
|
|
|
|
2010-03-15 14:55:13 +03:00
|
|
|
def __createTargetDirs():
|
2009-09-26 03:03:45 +04:00
|
|
|
"""
|
|
|
|
Create the output directory.
|
|
|
|
"""
|
|
|
|
|
|
|
|
conf.outputPath = "%s%s%s" % (paths.SQLMAP_OUTPUT_PATH, os.sep, conf.hostname)
|
|
|
|
|
|
|
|
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
|
|
|
os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755)
|
|
|
|
|
|
|
|
if not os.path.isdir(conf.outputPath):
|
|
|
|
os.makedirs(conf.outputPath, 0755)
|
|
|
|
|
|
|
|
__createDumpDir()
|
|
|
|
__createFilesDir()
|
2010-05-28 20:43:04 +04:00
|
|
|
__configureDumper()
|
2009-09-26 03:03:45 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def initTargetEnv():
|
|
|
|
"""
|
|
|
|
Initialize target environment.
|
|
|
|
"""
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
if conf.multipleTargets:
|
2010-04-06 14:15:19 +04:00
|
|
|
if conf.cj:
|
2010-04-09 14:16:15 +04:00
|
|
|
conf.cj.clear()
|
2010-04-06 14:15:19 +04:00
|
|
|
|
2010-10-19 22:17:34 +04:00
|
|
|
conf.paramDict = {}
|
|
|
|
conf.parameters = {}
|
|
|
|
conf.sessionFile = None
|
|
|
|
|
|
|
|
kb.authHeader = None
|
|
|
|
kb.dbms = None
|
|
|
|
kb.dbmsDetected = False
|
|
|
|
kb.dbmsVersion = [ "Unknown" ]
|
2010-10-25 14:41:37 +04:00
|
|
|
kb.dynamicMarkings = []
|
|
|
|
kb.errorTest = None
|
2010-10-19 22:17:34 +04:00
|
|
|
kb.htmlFp = []
|
|
|
|
kb.lastErrorPage = None
|
|
|
|
kb.injParameter = None
|
|
|
|
kb.injPlace = None
|
|
|
|
kb.injType = None
|
|
|
|
kb.nullConnection = None
|
2010-10-25 17:52:21 +04:00
|
|
|
kb.pageStable = None
|
2010-10-19 22:17:34 +04:00
|
|
|
kb.parenthesis = None
|
2010-11-04 19:44:34 +03:00
|
|
|
kb.paramMatchRatio = {}
|
2010-10-18 13:02:56 +04:00
|
|
|
kb.proxyAuthHeader = None
|
2010-10-19 22:17:34 +04:00
|
|
|
kb.stackedTest = None
|
2010-10-25 17:52:21 +04:00
|
|
|
kb.timeTest = None
|
2010-10-19 22:17:34 +04:00
|
|
|
kb.unionComment = ""
|
|
|
|
kb.unionCount = None
|
|
|
|
kb.unionPosition = None
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2010-03-15 14:33:34 +03:00
|
|
|
def setupTargetEnv():
|
2010-03-15 14:55:13 +03:00
|
|
|
__createTargetDirs()
|
2008-10-15 19:38:22 +04:00
|
|
|
__setRequestParams()
|
|
|
|
__setOutputResume()
|