mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-01-23 15:54:24 +03:00
Fixes #1305
This commit is contained in:
parent
a7c4400cc9
commit
21e8182ac6
|
@ -1249,10 +1249,10 @@ def checkNullConnection():
|
|||
infoMsg = "testing NULL connection to the target URL"
|
||||
logger.info(infoMsg)
|
||||
|
||||
pushValue(kb.pageCompress)
|
||||
kb.pageCompress = False
|
||||
|
||||
try:
|
||||
pushValue(kb.pageCompress)
|
||||
kb.pageCompress = False
|
||||
|
||||
page, headers, _ = Request.getPage(method=HTTPMETHOD.HEAD)
|
||||
|
||||
if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}):
|
||||
|
@ -1282,7 +1282,8 @@ def checkNullConnection():
|
|||
errMsg = getUnicode(errMsg)
|
||||
raise SqlmapConnectionException(errMsg)
|
||||
|
||||
kb.pageCompress = popValue()
|
||||
finally:
|
||||
kb.pageCompress = popValue()
|
||||
|
||||
return kb.nullConnection is not None
|
||||
|
||||
|
|
|
@ -501,47 +501,49 @@ def start():
|
|||
kb.testedParams.add(paramKey)
|
||||
|
||||
if testSqlInj:
|
||||
if place == PLACE.COOKIE:
|
||||
pushValue(kb.mergeCookies)
|
||||
kb.mergeCookies = False
|
||||
try:
|
||||
if place == PLACE.COOKIE:
|
||||
pushValue(kb.mergeCookies)
|
||||
kb.mergeCookies = False
|
||||
|
||||
check = heuristicCheckSqlInjection(place, parameter)
|
||||
check = heuristicCheckSqlInjection(place, parameter)
|
||||
|
||||
if check != HEURISTIC_TEST.POSITIVE:
|
||||
if conf.smart or (kb.ignoreCasted and check == HEURISTIC_TEST.CASTED):
|
||||
infoMsg = "skipping %s parameter '%s'" % (paramType, parameter)
|
||||
logger.info(infoMsg)
|
||||
continue
|
||||
if check != HEURISTIC_TEST.POSITIVE:
|
||||
if conf.smart or (kb.ignoreCasted and check == HEURISTIC_TEST.CASTED):
|
||||
infoMsg = "skipping %s parameter '%s'" % (paramType, parameter)
|
||||
logger.info(infoMsg)
|
||||
continue
|
||||
|
||||
infoMsg = "testing for SQL injection on %s " % paramType
|
||||
infoMsg += "parameter '%s'" % parameter
|
||||
logger.info(infoMsg)
|
||||
infoMsg = "testing for SQL injection on %s " % paramType
|
||||
infoMsg += "parameter '%s'" % parameter
|
||||
logger.info(infoMsg)
|
||||
|
||||
injection = checkSqlInjection(place, parameter, value)
|
||||
proceed = not kb.endDetection
|
||||
injection = checkSqlInjection(place, parameter, value)
|
||||
proceed = not kb.endDetection
|
||||
|
||||
if injection is not None and injection.place is not None:
|
||||
kb.injections.append(injection)
|
||||
if injection is not None and injection.place is not None:
|
||||
kb.injections.append(injection)
|
||||
|
||||
# In case when user wants to end detection phase (Ctrl+C)
|
||||
if not proceed:
|
||||
break
|
||||
# In case when user wants to end detection phase (Ctrl+C)
|
||||
if not proceed:
|
||||
break
|
||||
|
||||
msg = "%s parameter '%s' " % (injection.place, injection.parameter)
|
||||
msg += "is vulnerable. Do you want to keep testing the others (if any)? [y/N] "
|
||||
test = readInput(msg, default="N")
|
||||
msg = "%s parameter '%s' " % (injection.place, injection.parameter)
|
||||
msg += "is vulnerable. Do you want to keep testing the others (if any)? [y/N] "
|
||||
test = readInput(msg, default="N")
|
||||
|
||||
if test[0] not in ("y", "Y"):
|
||||
proceed = False
|
||||
paramKey = (conf.hostname, conf.path, None, None)
|
||||
kb.testedParams.add(paramKey)
|
||||
else:
|
||||
warnMsg = "%s parameter '%s' is not " % (paramType, parameter)
|
||||
warnMsg += "injectable"
|
||||
logger.warn(warnMsg)
|
||||
if test[0] not in ("y", "Y"):
|
||||
proceed = False
|
||||
paramKey = (conf.hostname, conf.path, None, None)
|
||||
kb.testedParams.add(paramKey)
|
||||
else:
|
||||
warnMsg = "%s parameter '%s' is not " % (paramType, parameter)
|
||||
warnMsg += "injectable"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
if place == PLACE.COOKIE:
|
||||
kb.mergeCookies = popValue()
|
||||
finally:
|
||||
if place == PLACE.COOKIE:
|
||||
kb.mergeCookies = popValue()
|
||||
|
||||
if len(kb.injections) == 0 or (len(kb.injections) == 1 and kb.injections[0].place is None):
|
||||
if kb.vainRun and not conf.multipleTargets:
|
||||
|
|
|
@ -1030,23 +1030,24 @@ class Connect(object):
|
|||
if kb.nullConnection and not content and not response and not timeBasedCompare:
|
||||
noteResponseTime = False
|
||||
|
||||
pushValue(kb.pageCompress)
|
||||
kb.pageCompress = False
|
||||
try:
|
||||
pushValue(kb.pageCompress)
|
||||
kb.pageCompress = False
|
||||
|
||||
if kb.nullConnection == NULLCONNECTION.HEAD:
|
||||
method = HTTPMETHOD.HEAD
|
||||
elif kb.nullConnection == NULLCONNECTION.RANGE:
|
||||
auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1"
|
||||
if kb.nullConnection == NULLCONNECTION.HEAD:
|
||||
method = HTTPMETHOD.HEAD
|
||||
elif kb.nullConnection == NULLCONNECTION.RANGE:
|
||||
auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1"
|
||||
|
||||
_, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
|
||||
_, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
|
||||
|
||||
if headers:
|
||||
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and HTTP_HEADER.CONTENT_LENGTH in headers:
|
||||
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH])
|
||||
elif kb.nullConnection == NULLCONNECTION.RANGE and HTTP_HEADER.CONTENT_RANGE in headers:
|
||||
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
|
||||
|
||||
kb.pageCompress = popValue()
|
||||
if headers:
|
||||
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and HTTP_HEADER.CONTENT_LENGTH in headers:
|
||||
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH])
|
||||
elif kb.nullConnection == NULLCONNECTION.RANGE and HTTP_HEADER.CONTENT_RANGE in headers:
|
||||
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
|
||||
finally:
|
||||
kb.pageCompress = popValue()
|
||||
|
||||
if not pageLength:
|
||||
try:
|
||||
|
|
|
@ -391,11 +391,13 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser
|
|||
warnMsg += ". Falling back to partial UNION technique"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
|
||||
pushValue(kb.forcePartialUnion)
|
||||
kb.forcePartialUnion = True
|
||||
value = _goUnion(query, unpack, dump)
|
||||
found = (value is not None) or (value is None and expectingNone)
|
||||
kb.forcePartialUnion = popValue()
|
||||
try:
|
||||
pushValue(kb.forcePartialUnion)
|
||||
kb.forcePartialUnion = True
|
||||
value = _goUnion(query, unpack, dump)
|
||||
found = (value is not None) or (value is None and expectingNone)
|
||||
finally:
|
||||
kb.forcePartialUnion = popValue()
|
||||
else:
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
|
||||
|
|
|
@ -81,73 +81,74 @@ def _findUnionCharCount(comment, place, parameter, value, prefix, suffix, where=
|
|||
|
||||
return found
|
||||
|
||||
pushValue(kb.errorIsNone)
|
||||
items, ratios = [], []
|
||||
kb.errorIsNone = False
|
||||
lowerCount, upperCount = conf.uColsStart, conf.uColsStop
|
||||
try:
|
||||
pushValue(kb.errorIsNone)
|
||||
items, ratios = [], []
|
||||
kb.errorIsNone = False
|
||||
lowerCount, upperCount = conf.uColsStart, conf.uColsStop
|
||||
|
||||
if lowerCount == 1:
|
||||
found = kb.orderByColumns or _orderByTechnique()
|
||||
if found:
|
||||
kb.orderByColumns = found
|
||||
infoMsg = "target URL appears to have %d column%s in query" % (found, 's' if found > 1 else "")
|
||||
singleTimeLogMessage(infoMsg)
|
||||
return found
|
||||
if lowerCount == 1:
|
||||
found = kb.orderByColumns or _orderByTechnique()
|
||||
if found:
|
||||
kb.orderByColumns = found
|
||||
infoMsg = "target URL appears to have %d column%s in query" % (found, 's' if found > 1 else "")
|
||||
singleTimeLogMessage(infoMsg)
|
||||
return found
|
||||
|
||||
if abs(upperCount - lowerCount) < MIN_UNION_RESPONSES:
|
||||
upperCount = lowerCount + MIN_UNION_RESPONSES
|
||||
if abs(upperCount - lowerCount) < MIN_UNION_RESPONSES:
|
||||
upperCount = lowerCount + MIN_UNION_RESPONSES
|
||||
|
||||
min_, max_ = MAX_RATIO, MIN_RATIO
|
||||
pages = {}
|
||||
min_, max_ = MAX_RATIO, MIN_RATIO
|
||||
pages = {}
|
||||
|
||||
for count in xrange(lowerCount, upperCount + 1):
|
||||
query = agent.forgeUnionQuery('', -1, count, comment, prefix, suffix, kb.uChar, where)
|
||||
payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where)
|
||||
page, headers = Request.queryPage(payload, place=place, content=True, raise404=False)
|
||||
if not isNullValue(kb.uChar):
|
||||
pages[count] = page
|
||||
ratio = comparison(page, headers, getRatioValue=True) or MIN_RATIO
|
||||
ratios.append(ratio)
|
||||
min_, max_ = min(min_, ratio), max(max_, ratio)
|
||||
items.append((count, ratio))
|
||||
|
||||
for count in xrange(lowerCount, upperCount + 1):
|
||||
query = agent.forgeUnionQuery('', -1, count, comment, prefix, suffix, kb.uChar, where)
|
||||
payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where)
|
||||
page, headers = Request.queryPage(payload, place=place, content=True, raise404=False)
|
||||
if not isNullValue(kb.uChar):
|
||||
pages[count] = page
|
||||
ratio = comparison(page, headers, getRatioValue=True) or MIN_RATIO
|
||||
ratios.append(ratio)
|
||||
min_, max_ = min(min_, ratio), max(max_, ratio)
|
||||
items.append((count, ratio))
|
||||
for regex in (kb.uChar, r'>\s*%s\s*<' % kb.uChar):
|
||||
contains = [(count, re.search(regex, page or "", re.IGNORECASE) is not None) for count, page in pages.items()]
|
||||
if len(filter(lambda x: x[1], contains)) == 1:
|
||||
retVal = filter(lambda x: x[1], contains)[0][0]
|
||||
break
|
||||
|
||||
if not isNullValue(kb.uChar):
|
||||
for regex in (kb.uChar, r'>\s*%s\s*<' % kb.uChar):
|
||||
contains = [(count, re.search(regex, page or "", re.IGNORECASE) is not None) for count, page in pages.items()]
|
||||
if len(filter(lambda x: x[1], contains)) == 1:
|
||||
retVal = filter(lambda x: x[1], contains)[0][0]
|
||||
break
|
||||
if not retVal:
|
||||
ratios.pop(ratios.index(min_))
|
||||
ratios.pop(ratios.index(max_))
|
||||
|
||||
if not retVal:
|
||||
ratios.pop(ratios.index(min_))
|
||||
ratios.pop(ratios.index(max_))
|
||||
minItem, maxItem = None, None
|
||||
|
||||
minItem, maxItem = None, None
|
||||
for item in items:
|
||||
if item[1] == min_:
|
||||
minItem = item
|
||||
elif item[1] == max_:
|
||||
maxItem = item
|
||||
|
||||
for item in items:
|
||||
if item[1] == min_:
|
||||
minItem = item
|
||||
elif item[1] == max_:
|
||||
maxItem = item
|
||||
if all(map(lambda x: x == min_ and x != max_, ratios)):
|
||||
retVal = maxItem[0]
|
||||
|
||||
if all(map(lambda x: x == min_ and x != max_, ratios)):
|
||||
retVal = maxItem[0]
|
||||
elif all(map(lambda x: x != min_ and x == max_, ratios)):
|
||||
retVal = minItem[0]
|
||||
|
||||
elif all(map(lambda x: x != min_ and x == max_, ratios)):
|
||||
retVal = minItem[0]
|
||||
elif abs(max_ - min_) >= MIN_STATISTICAL_RANGE:
|
||||
deviation = stdev(ratios)
|
||||
lower, upper = average(ratios) - UNION_STDEV_COEFF * deviation, average(ratios) + UNION_STDEV_COEFF * deviation
|
||||
|
||||
elif abs(max_ - min_) >= MIN_STATISTICAL_RANGE:
|
||||
deviation = stdev(ratios)
|
||||
lower, upper = average(ratios) - UNION_STDEV_COEFF * deviation, average(ratios) + UNION_STDEV_COEFF * deviation
|
||||
if min_ < lower:
|
||||
retVal = minItem[0]
|
||||
|
||||
if min_ < lower:
|
||||
retVal = minItem[0]
|
||||
|
||||
if max_ > upper:
|
||||
if retVal is None or abs(max_ - upper) > abs(min_ - lower):
|
||||
retVal = maxItem[0]
|
||||
|
||||
kb.errorIsNone = popValue()
|
||||
if max_ > upper:
|
||||
if retVal is None or abs(max_ - upper) > abs(min_ - lower):
|
||||
retVal = maxItem[0]
|
||||
finally:
|
||||
kb.errorIsNone = popValue()
|
||||
|
||||
if retVal:
|
||||
infoMsg = "target URL appears to be UNION injectable with %d columns" % retVal
|
||||
|
|
|
@ -742,32 +742,33 @@ class Databases:
|
|||
infoMsg = "enumerating database management system schema"
|
||||
logger.info(infoMsg)
|
||||
|
||||
pushValue(conf.db)
|
||||
pushValue(conf.tbl)
|
||||
pushValue(conf.col)
|
||||
try:
|
||||
pushValue(conf.db)
|
||||
pushValue(conf.tbl)
|
||||
pushValue(conf.col)
|
||||
|
||||
kb.data.cachedTables = {}
|
||||
kb.data.cachedColumns = {}
|
||||
kb.data.cachedTables = {}
|
||||
kb.data.cachedColumns = {}
|
||||
|
||||
self.getTables()
|
||||
self.getTables()
|
||||
|
||||
infoMsg = "fetched tables: "
|
||||
infoMsg += ", ".join(["%s" % ", ".join("%s%s%s" % (unsafeSQLIdentificatorNaming(db), ".." if \
|
||||
Backend.isDbms(DBMS.MSSQL) or Backend.isDbms(DBMS.SYBASE) \
|
||||
else ".", unsafeSQLIdentificatorNaming(t)) for t in tbl) for db, tbl in \
|
||||
kb.data.cachedTables.items()])
|
||||
logger.info(infoMsg)
|
||||
infoMsg = "fetched tables: "
|
||||
infoMsg += ", ".join(["%s" % ", ".join("%s%s%s" % (unsafeSQLIdentificatorNaming(db), ".." if \
|
||||
Backend.isDbms(DBMS.MSSQL) or Backend.isDbms(DBMS.SYBASE) \
|
||||
else ".", unsafeSQLIdentificatorNaming(t)) for t in tbl) for db, tbl in \
|
||||
kb.data.cachedTables.items()])
|
||||
logger.info(infoMsg)
|
||||
|
||||
for db, tables in kb.data.cachedTables.items():
|
||||
for tbl in tables:
|
||||
conf.db = db
|
||||
conf.tbl = tbl
|
||||
for db, tables in kb.data.cachedTables.items():
|
||||
for tbl in tables:
|
||||
conf.db = db
|
||||
conf.tbl = tbl
|
||||
|
||||
self.getColumns()
|
||||
|
||||
conf.col = popValue()
|
||||
conf.tbl = popValue()
|
||||
conf.db = popValue()
|
||||
self.getColumns()
|
||||
finally:
|
||||
conf.col = popValue()
|
||||
conf.tbl = popValue()
|
||||
conf.db = popValue()
|
||||
|
||||
return kb.data.cachedColumns
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user