Removing -x as I doubt that anybody uses it

This commit is contained in:
Miroslav Stampar 2019-11-04 22:43:28 +01:00
parent ce9618c307
commit bb18c4db09
7 changed files with 36 additions and 71 deletions

View File

@ -336,13 +336,10 @@ def _setCrawler():
if not conf.crawlDepth: if not conf.crawlDepth:
return return
if not any((conf.bulkFile, conf.sitemapUrl)): if not conf.bulkFile:
crawl(conf.url) crawl(conf.url)
else: else:
if conf.bulkFile: targets = getFileItems(conf.bulkFile)
targets = getFileItems(conf.bulkFile)
else:
targets = list(parseSitemap(conf.sitemapUrl))
for i in xrange(len(targets)): for i in xrange(len(targets)):
try: try:
@ -443,23 +440,6 @@ def _setBulkMultipleTargets():
warnMsg = "no usable links found (with GET parameters)" warnMsg = "no usable links found (with GET parameters)"
logger.warn(warnMsg) logger.warn(warnMsg)
def _setSitemapTargets():
if not conf.sitemapUrl:
return
infoMsg = "parsing sitemap '%s'" % conf.sitemapUrl
logger.info(infoMsg)
found = False
for item in parseSitemap(conf.sitemapUrl):
if re.match(r"[^ ]+\?(.+)", item, re.I):
found = True
kb.targets.add((item.strip(), None, None, None, None))
if not found and not conf.forms and not conf.crawlDepth:
warnMsg = "no usable links found (with GET parameters)"
logger.warn(warnMsg)
def _findPageForms(): def _findPageForms():
if not conf.forms or conf.crawlDepth: if not conf.forms or conf.crawlDepth:
return return
@ -471,15 +451,13 @@ def _findPageForms():
infoMsg = "searching for forms" infoMsg = "searching for forms"
logger.info(infoMsg) logger.info(infoMsg)
if not any((conf.bulkFile, conf.googleDork, conf.sitemapUrl)): if not any((conf.bulkFile, conf.googleDork)):
page, _, _ = Request.queryPage(content=True, ignoreSecondOrder=True) page, _, _ = Request.queryPage(content=True, ignoreSecondOrder=True)
if findPageForms(page, conf.url, True, True): if findPageForms(page, conf.url, True, True):
found = True found = True
else: else:
if conf.bulkFile: if conf.bulkFile:
targets = getFileItems(conf.bulkFile) targets = getFileItems(conf.bulkFile)
elif conf.sitemapUrl:
targets = list(parseSitemap(conf.sitemapUrl))
elif conf.googleDork: elif conf.googleDork:
targets = [_[0] for _ in kb.targets] targets = [_[0] for _ in kb.targets]
kb.targets.clear() kb.targets.clear()
@ -1653,16 +1631,13 @@ def _cleanupOptions():
if conf.fileDest: if conf.fileDest:
conf.fileDest = ntToPosixSlashes(normalizePath(conf.fileDest)) conf.fileDest = ntToPosixSlashes(normalizePath(conf.fileDest))
if conf.sitemapUrl and not conf.sitemapUrl.lower().startswith("http"):
conf.sitemapUrl = "http%s://%s" % ('s' if conf.forceSSL else '', conf.sitemapUrl)
if conf.msfPath: if conf.msfPath:
conf.msfPath = ntToPosixSlashes(normalizePath(conf.msfPath)) conf.msfPath = ntToPosixSlashes(normalizePath(conf.msfPath))
if conf.tmpPath: if conf.tmpPath:
conf.tmpPath = ntToPosixSlashes(normalizePath(conf.tmpPath)) conf.tmpPath = ntToPosixSlashes(normalizePath(conf.tmpPath))
if any((conf.googleDork, conf.logFile, conf.bulkFile, conf.sitemapUrl, conf.forms, conf.crawlDepth)): if any((conf.googleDork, conf.logFile, conf.bulkFile, conf.forms, conf.crawlDepth)):
conf.multipleTargets = True conf.multipleTargets = True
if conf.optimize: if conf.optimize:
@ -2508,8 +2483,8 @@ def _basicOptionValidation():
errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)
if conf.forms and not any((conf.url, conf.googleDork, conf.bulkFile, conf.sitemapUrl)): if conf.forms and not any((conf.url, conf.googleDork, conf.bulkFile)):
errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g', '-m' or '-x'" errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g' or '-m'"
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)
if conf.crawlExclude and not conf.crawlDepth: if conf.crawlExclude and not conf.crawlDepth:
@ -2610,7 +2585,7 @@ def _basicOptionValidation():
errMsg = "value for option '--union-char' must be an alpha-numeric value (e.g. 1)" errMsg = "value for option '--union-char' must be an alpha-numeric value (e.g. 1)"
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)
if conf.hashFile and any((conf.direct, conf.url, conf.logFile, conf.bulkFile, conf.googleDork, conf.configFile, conf.requestFile, conf.updateAll, conf.smokeTest, conf.liveTest, conf.wizard, conf.dependencies, conf.purge, conf.sitemapUrl, conf.listTampers)): if conf.hashFile and any((conf.direct, conf.url, conf.logFile, conf.bulkFile, conf.googleDork, conf.configFile, conf.requestFile, conf.updateAll, conf.smokeTest, conf.liveTest, conf.wizard, conf.dependencies, conf.purge, conf.listTampers)):
errMsg = "option '--crack' should be used as a standalone" errMsg = "option '--crack' should be used as a standalone"
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)
@ -2677,7 +2652,7 @@ def init():
parseTargetDirect() parseTargetDirect()
if any((conf.url, conf.logFile, conf.bulkFile, conf.sitemapUrl, conf.requestFile, conf.googleDork, conf.liveTest)): if any((conf.url, conf.logFile, conf.bulkFile, conf.requestFile, conf.googleDork, conf.liveTest)):
_setHostname() _setHostname()
_setHTTPTimeout() _setHTTPTimeout()
_setHTTPExtraHeaders() _setHTTPExtraHeaders()
@ -2692,7 +2667,6 @@ def init():
_setSafeVisit() _setSafeVisit()
_doSearch() _doSearch()
_setBulkMultipleTargets() _setBulkMultipleTargets()
_setSitemapTargets()
_checkTor() _checkTor()
_setCrawler() _setCrawler()
_findPageForms() _findPageForms()

View File

@ -19,7 +19,6 @@ optDict = {
"sessionFile": "string", "sessionFile": "string",
"googleDork": "string", "googleDork": "string",
"configFile": "string", "configFile": "string",
"sitemapUrl": "string",
}, },
"Request": { "Request": {

View File

@ -18,7 +18,7 @@ from lib.core.enums import OS
from thirdparty.six import unichr as _unichr from thirdparty.six import unichr as _unichr
# sqlmap version (<major>.<minor>.<month>.<monthly commit>) # sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.3.11.2" VERSION = "1.3.11.3"
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable" TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34} TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE) VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)

View File

@ -130,9 +130,6 @@ def cmdLineParser(argv=None):
target.add_argument("-l", dest="logFile", target.add_argument("-l", dest="logFile",
help="Parse target(s) from Burp or WebScarab proxy log file") help="Parse target(s) from Burp or WebScarab proxy log file")
target.add_argument("-x", dest="sitemapUrl",
help="Parse target(s) from remote sitemap(.xml) file")
target.add_argument("-m", dest="bulkFile", target.add_argument("-m", dest="bulkFile",
help="Scan multiple targets given in a textual file ") help="Scan multiple targets given in a textual file ")
@ -994,8 +991,8 @@ def cmdLineParser(argv=None):
if args.dummy: if args.dummy:
args.url = args.url or DUMMY_URL args.url = args.url or DUMMY_URL
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, args.requestFile, args.updateAll, args.smokeTest, args.vulnTest, args.liveTest, args.wizard, args.dependencies, args.purge, args.sitemapUrl, args.listTampers, args.hashFile)): if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, args.requestFile, args.updateAll, args.smokeTest, args.vulnTest, args.liveTest, args.wizard, args.dependencies, args.purge, args.listTampers, args.hashFile)):
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --list-tampers, --wizard, --update, --purge or --dependencies). " errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, --list-tampers, --wizard, --update, --purge or --dependencies). "
errMsg += "Use -h for basic and -hh for advanced help\n" errMsg += "Use -h for basic and -hh for advanced help\n"
parser.error(errMsg) parser.error(errMsg)

View File

@ -79,14 +79,14 @@ def configFileParser(configFile):
mandatory = False mandatory = False
for option in ("direct", "url", "logFile", "bulkFile", "googleDork", "requestFile", "sitemapUrl", "wizard"): for option in ("direct", "url", "logFile", "bulkFile", "googleDork", "requestFile", "wizard"):
if config.has_option("Target", option) and config.get("Target", option) or cmdLineOptions.get(option): if config.has_option("Target", option) and config.get("Target", option) or cmdLineOptions.get(option):
mandatory = True mandatory = True
break break
if not mandatory: if not mandatory:
errMsg = "missing a mandatory option in the configuration file " errMsg = "missing a mandatory option in the configuration file "
errMsg += "(direct, url, logFile, bulkFile, googleDork, requestFile, sitemapUrl or wizard)" errMsg += "(direct, url, logFile, bulkFile, googleDork, requestFile or wizard)"
raise SqlmapMissingMandatoryOptionException(errMsg) raise SqlmapMissingMandatoryOptionException(errMsg)
for family, optionData in optDict.items(): for family, optionData in optDict.items():

View File

@ -133,31 +133,30 @@ def crawl(target):
threadData.shared.deeper = set() threadData.shared.deeper = set()
threadData.shared.unprocessed = set([target]) threadData.shared.unprocessed = set([target])
if not conf.sitemapUrl: message = "do you want to check for the existence of "
message = "do you want to check for the existence of " message += "site's sitemap(.xml) [y/N] "
message += "site's sitemap(.xml) [y/N] "
if readInput(message, default='N', boolean=True): if readInput(message, default='N', boolean=True):
found = True found = True
items = None items = None
url = _urllib.parse.urljoin(target, "/sitemap.xml") url = _urllib.parse.urljoin(target, "/sitemap.xml")
try: try:
items = parseSitemap(url) items = parseSitemap(url)
except SqlmapConnectionException as ex: except SqlmapConnectionException as ex:
if "page not found" in getSafeExString(ex): if "page not found" in getSafeExString(ex):
found = False found = False
logger.warn("'sitemap.xml' not found") logger.warn("'sitemap.xml' not found")
except: except:
pass pass
finally: finally:
if found: if found:
if items: if items:
for item in items: for item in items:
if re.search(r"(.*?)\?(.+)", item): if re.search(r"(.*?)\?(.+)", item):
threadData.shared.value.add(item) threadData.shared.value.add(item)
if conf.crawlDepth > 1: if conf.crawlDepth > 1:
threadData.shared.unprocessed.update(items) threadData.shared.unprocessed.update(items)
logger.info("%s links found" % ("no" if not items else len(items))) logger.info("%s links found" % ("no" if not items else len(items)))
infoMsg = "starting crawler" infoMsg = "starting crawler"
if conf.bulkFile: if conf.bulkFile:

View File

@ -32,10 +32,6 @@ requestFile =
# Example: +ext:php +inurl:"&id=" +intext:"powered by " # Example: +ext:php +inurl:"&id=" +intext:"powered by "
googleDork = googleDork =
# Parse target(s) from remote sitemap(.xml) file.
# Example: http://192.168.1.121/sitemap.xml
sitemapUrl =
# These options can be used to specify how to connect to the target URL. # These options can be used to specify how to connect to the target URL.
[Request] [Request]