Merge branch 'master' of github.com:sqlmapproject/sqlmap

This commit is contained in:
Bernardo Damele 2013-01-09 16:04:29 +00:00
commit 7f4ce4afbb
22 changed files with 229 additions and 221 deletions

View File

@ -14,6 +14,7 @@ import socket
import string import string
import sys import sys
import threading import threading
import time
import urllib2 import urllib2
import urlparse import urlparse
@ -135,7 +136,7 @@ from lib.request.httpshandler import HTTPSHandler
from lib.request.rangehandler import HTTPRangeHandler from lib.request.rangehandler import HTTPRangeHandler
from lib.request.redirecthandler import SmartRedirectHandler from lib.request.redirecthandler import SmartRedirectHandler
from lib.request.templates import getPageTemplate from lib.request.templates import getPageTemplate
from lib.utils.crawler import Crawler from lib.utils.crawler import crawl
from lib.utils.deps import checkDependencies from lib.utils.deps import checkDependencies
from lib.utils.google import Google from lib.utils.google import Google
from thirdparty.colorama.initialise import init as coloramainit from thirdparty.colorama.initialise import init as coloramainit
@ -462,8 +463,7 @@ def _setCrawler():
if not conf.crawlDepth: if not conf.crawlDepth:
return return
crawler = Crawler() crawl(conf.url)
crawler.getTargetUrls()
def _setGoogleDorking(): def _setGoogleDorking():
""" """
@ -571,15 +571,29 @@ def _findPageForms():
if not conf.forms or conf.crawlDepth: if not conf.forms or conf.crawlDepth:
return return
if not checkConnection(): if conf.url and not checkConnection():
return return
infoMsg = "searching for forms" infoMsg = "searching for forms"
logger.info(infoMsg) logger.info(infoMsg)
if not conf.bulkFile:
page, _ = Request.queryPage(content=True) page, _ = Request.queryPage(content=True)
findPageForms(page, conf.url, True, True) findPageForms(page, conf.url, True, True)
else:
targets = getFileItems(conf.bulkFile)
for i in xrange(len(targets)):
try:
target = targets[i]
page, _, _= Request.getPage(url=target.strip(), crawling=True, raise404=False)
findPageForms(page, target, False, True)
if conf.verbose in (1, 2):
status = '%d/%d links visited (%d%%)' % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets)))
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
except Exception, ex:
errMsg = "problem occured while searching for forms at '%s' ('%s')" % (target, ex)
logger.error(errMsg)
def _setDBMSAuthentication(): def _setDBMSAuthentication():
""" """
@ -1965,8 +1979,8 @@ def _basicOptionValidation():
errMsg = "maximum number of used threads is %d avoiding possible connection issues" % MAX_NUMBER_OF_THREADS errMsg = "maximum number of used threads is %d avoiding possible connection issues" % MAX_NUMBER_OF_THREADS
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)
if conf.forms and not conf.url: if conf.forms and not any ((conf.url, conf.bulkFile)):
errMsg = "switch '--forms' requires usage of option '-u' (--url)" errMsg = "switch '--forms' requires usage of option '-u' (--url) or '-m'"
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)
if conf.requestFile and conf.url: if conf.requestFile and conf.url:
@ -2009,8 +2023,8 @@ def _basicOptionValidation():
errMsg = "option '--proxy' is incompatible with switch '--ignore-proxy'" errMsg = "option '--proxy' is incompatible with switch '--ignore-proxy'"
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)
if conf.forms and any([conf.logFile, conf.bulkFile, conf.direct, conf.requestFile, conf.googleDork]): if conf.forms and any([conf.logFile, conf.direct, conf.requestFile, conf.googleDork]):
errMsg = "switch '--forms' is compatible only with option '-u' (--url)" errMsg = "switch '--forms' is compatible only with options '-u' (--url) and '-m'"
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)
if conf.timeSec < 1: if conf.timeSec < 1:

View File

@ -73,7 +73,7 @@ def smokeTest():
retVal = False retVal = False
count += 1 count += 1
status = '%d/%d (%d%s) ' % (count, length, round(100.0*count/length), '%') status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length))
dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status))
clearConsoleLine() clearConsoleLine()

View File

@ -412,7 +412,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
if conf.verbose in (1, 2) and not showEta: if conf.verbose in (1, 2) and not showEta:
_ = count - firstChar _ = count - firstChar
output += '_' * (min(length, conf.progressWidth) - len(output)) output += '_' * (min(length, conf.progressWidth) - len(output))
status = ' %d/%d (%d%s)' % (_, length, round(100.0 * _ / length), '%') status = ' %d/%d (%d%%)' % (_, length, round(100.0 * _ / length))
output += status if _ != length else " " * len(status) output += status if _ != length else " " * len(status)
dataToStdout("\r[%s] [INFO] retrieved: %s" % (time.strftime("%X"), filterControlChars(output))) dataToStdout("\r[%s] [INFO] retrieved: %s" % (time.strftime("%X"), filterControlChars(output)))

View File

@ -101,7 +101,7 @@ def tableExists(tableFile, regex=None):
dataToStdout(infoMsg, True) dataToStdout(infoMsg, True)
if conf.verbose in (1, 2): if conf.verbose in (1, 2):
status = '%d/%d items (%d%s)' % (threadData.shared.count, threadData.shared.limit, round(100.0*threadData.shared.count/threadData.shared.limit), '%') status = '%d/%d items (%d%%)' % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit))
dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True)
kb.locks.io.release() kb.locks.io.release()
@ -192,7 +192,7 @@ def columnExists(columnFile, regex=None):
dataToStdout(infoMsg, True) dataToStdout(infoMsg, True)
if conf.verbose in (1, 2): if conf.verbose in (1, 2):
status = '%d/%d items (%d%s)' % (threadData.shared.count, threadData.shared.limit, round(100.0*threadData.shared.count/threadData.shared.limit), '%') status = '%d/%d items (%d%%)' % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit))
dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True)
kb.locks.io.release() kb.locks.io.release()

View File

@ -25,13 +25,7 @@ from lib.request.connect import Connect as Request
from thirdparty.beautifulsoup.beautifulsoup import BeautifulSoup from thirdparty.beautifulsoup.beautifulsoup import BeautifulSoup
from thirdparty.oset.pyoset import oset from thirdparty.oset.pyoset import oset
class Crawler(object): def crawl(target):
"""
This class defines methods used to perform crawling (command
line option '--crawl'
"""
def getTargetUrls(self):
try: try:
threadData = getCurrentThreadData() threadData = getCurrentThreadData()
threadData.shared.value = oset() threadData.shared.value = oset()
@ -78,10 +72,10 @@ class Crawler(object):
href = tag.get("href") if hasattr(tag, "get") else tag.group("href") href = tag.get("href") if hasattr(tag, "get") else tag.group("href")
if href: if href:
url = urlparse.urljoin(conf.url, href) url = urlparse.urljoin(target, href)
# flag to know if we are dealing with the same target host # flag to know if we are dealing with the same target host
_ = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], (url, conf.url))) _ = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], (url, target)))
if conf.scope: if conf.scope:
if not re.search(conf.scope, url, re.I): if not re.search(conf.scope, url, re.I):
@ -102,11 +96,11 @@ class Crawler(object):
if conf.verbose in (1, 2): if conf.verbose in (1, 2):
threadData.shared.count += 1 threadData.shared.count += 1
status = '%d/%d links visited (%d%s)' % (threadData.shared.count, threadData.shared.length, round(100.0*threadData.shared.count/threadData.shared.length), '%') status = '%d/%d links visited (%d%%)' % (threadData.shared.count, threadData.shared.length, round(100.0 * threadData.shared.count / threadData.shared.length))
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True) dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
threadData.shared.deeper = set() threadData.shared.deeper = set()
threadData.shared.unprocessed = set([conf.url]) threadData.shared.unprocessed = set([target])
logger.info("starting crawler") logger.info("starting crawler")