From bb18c4db09e2769999dcfd6055804322a39bb7f5 Mon Sep 17 00:00:00 2001 From: Miroslav Stampar Date: Mon, 4 Nov 2019 22:43:28 +0100 Subject: [PATCH] Removing -x as I doubt that anybody uses it --- lib/core/option.py | 42 +++++++----------------------------- lib/core/optiondict.py | 1 - lib/core/settings.py | 2 +- lib/parse/cmdline.py | 7 ++---- lib/parse/configfile.py | 4 ++-- lib/utils/crawler.py | 47 ++++++++++++++++++++--------------------- sqlmap.conf | 4 ---- 7 files changed, 36 insertions(+), 71 deletions(-) diff --git a/lib/core/option.py b/lib/core/option.py index b7ae66269..ca7c07bc3 100644 --- a/lib/core/option.py +++ b/lib/core/option.py @@ -336,13 +336,10 @@ def _setCrawler(): if not conf.crawlDepth: return - if not any((conf.bulkFile, conf.sitemapUrl)): + if not conf.bulkFile: crawl(conf.url) else: - if conf.bulkFile: - targets = getFileItems(conf.bulkFile) - else: - targets = list(parseSitemap(conf.sitemapUrl)) + targets = getFileItems(conf.bulkFile) for i in xrange(len(targets)): try: @@ -443,23 +440,6 @@ def _setBulkMultipleTargets(): warnMsg = "no usable links found (with GET parameters)" logger.warn(warnMsg) -def _setSitemapTargets(): - if not conf.sitemapUrl: - return - - infoMsg = "parsing sitemap '%s'" % conf.sitemapUrl - logger.info(infoMsg) - - found = False - for item in parseSitemap(conf.sitemapUrl): - if re.match(r"[^ ]+\?(.+)", item, re.I): - found = True - kb.targets.add((item.strip(), None, None, None, None)) - - if not found and not conf.forms and not conf.crawlDepth: - warnMsg = "no usable links found (with GET parameters)" - logger.warn(warnMsg) - def _findPageForms(): if not conf.forms or conf.crawlDepth: return @@ -471,15 +451,13 @@ def _findPageForms(): infoMsg = "searching for forms" logger.info(infoMsg) - if not any((conf.bulkFile, conf.googleDork, conf.sitemapUrl)): + if not any((conf.bulkFile, conf.googleDork)): page, _, _ = Request.queryPage(content=True, ignoreSecondOrder=True) if findPageForms(page, conf.url, True, True): found = True else: if conf.bulkFile: targets = getFileItems(conf.bulkFile) - elif conf.sitemapUrl: - targets = list(parseSitemap(conf.sitemapUrl)) elif conf.googleDork: targets = [_[0] for _ in kb.targets] kb.targets.clear() @@ -1653,16 +1631,13 @@ def _cleanupOptions(): if conf.fileDest: conf.fileDest = ntToPosixSlashes(normalizePath(conf.fileDest)) - if conf.sitemapUrl and not conf.sitemapUrl.lower().startswith("http"): - conf.sitemapUrl = "http%s://%s" % ('s' if conf.forceSSL else '', conf.sitemapUrl) - if conf.msfPath: conf.msfPath = ntToPosixSlashes(normalizePath(conf.msfPath)) if conf.tmpPath: conf.tmpPath = ntToPosixSlashes(normalizePath(conf.tmpPath)) - if any((conf.googleDork, conf.logFile, conf.bulkFile, conf.sitemapUrl, conf.forms, conf.crawlDepth)): + if any((conf.googleDork, conf.logFile, conf.bulkFile, conf.forms, conf.crawlDepth)): conf.multipleTargets = True if conf.optimize: @@ -2508,8 +2483,8 @@ def _basicOptionValidation(): errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS raise SqlmapSyntaxException(errMsg) - if conf.forms and not any((conf.url, conf.googleDork, conf.bulkFile, conf.sitemapUrl)): - errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g', '-m' or '-x'" + if conf.forms and not any((conf.url, conf.googleDork, conf.bulkFile)): + errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g' or '-m'" raise SqlmapSyntaxException(errMsg) if conf.crawlExclude and not conf.crawlDepth: @@ -2610,7 +2585,7 @@ def _basicOptionValidation(): errMsg = "value for option '--union-char' must be an alpha-numeric value (e.g. 1)" raise SqlmapSyntaxException(errMsg) - if conf.hashFile and any((conf.direct, conf.url, conf.logFile, conf.bulkFile, conf.googleDork, conf.configFile, conf.requestFile, conf.updateAll, conf.smokeTest, conf.liveTest, conf.wizard, conf.dependencies, conf.purge, conf.sitemapUrl, conf.listTampers)): + if conf.hashFile and any((conf.direct, conf.url, conf.logFile, conf.bulkFile, conf.googleDork, conf.configFile, conf.requestFile, conf.updateAll, conf.smokeTest, conf.liveTest, conf.wizard, conf.dependencies, conf.purge, conf.listTampers)): errMsg = "option '--crack' should be used as a standalone" raise SqlmapSyntaxException(errMsg) @@ -2677,7 +2652,7 @@ def init(): parseTargetDirect() - if any((conf.url, conf.logFile, conf.bulkFile, conf.sitemapUrl, conf.requestFile, conf.googleDork, conf.liveTest)): + if any((conf.url, conf.logFile, conf.bulkFile, conf.requestFile, conf.googleDork, conf.liveTest)): _setHostname() _setHTTPTimeout() _setHTTPExtraHeaders() @@ -2692,7 +2667,6 @@ def init(): _setSafeVisit() _doSearch() _setBulkMultipleTargets() - _setSitemapTargets() _checkTor() _setCrawler() _findPageForms() diff --git a/lib/core/optiondict.py b/lib/core/optiondict.py index 0d90eedc9..8da3b0399 100644 --- a/lib/core/optiondict.py +++ b/lib/core/optiondict.py @@ -19,7 +19,6 @@ optDict = { "sessionFile": "string", "googleDork": "string", "configFile": "string", - "sitemapUrl": "string", }, "Request": { diff --git a/lib/core/settings.py b/lib/core/settings.py index 3a9720c4c..93435b1e4 100644 --- a/lib/core/settings.py +++ b/lib/core/settings.py @@ -18,7 +18,7 @@ from lib.core.enums import OS from thirdparty.six import unichr as _unichr # sqlmap version (...) -VERSION = "1.3.11.2" +VERSION = "1.3.11.3" TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable" TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34} VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE) diff --git a/lib/parse/cmdline.py b/lib/parse/cmdline.py index 01b100b06..b11af39d2 100644 --- a/lib/parse/cmdline.py +++ b/lib/parse/cmdline.py @@ -130,9 +130,6 @@ def cmdLineParser(argv=None): target.add_argument("-l", dest="logFile", help="Parse target(s) from Burp or WebScarab proxy log file") - target.add_argument("-x", dest="sitemapUrl", - help="Parse target(s) from remote sitemap(.xml) file") - target.add_argument("-m", dest="bulkFile", help="Scan multiple targets given in a textual file ") @@ -994,8 +991,8 @@ def cmdLineParser(argv=None): if args.dummy: args.url = args.url or DUMMY_URL - if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, args.requestFile, args.updateAll, args.smokeTest, args.vulnTest, args.liveTest, args.wizard, args.dependencies, args.purge, args.sitemapUrl, args.listTampers, args.hashFile)): - errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --list-tampers, --wizard, --update, --purge or --dependencies). " + if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, args.requestFile, args.updateAll, args.smokeTest, args.vulnTest, args.liveTest, args.wizard, args.dependencies, args.purge, args.listTampers, args.hashFile)): + errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, --list-tampers, --wizard, --update, --purge or --dependencies). " errMsg += "Use -h for basic and -hh for advanced help\n" parser.error(errMsg) diff --git a/lib/parse/configfile.py b/lib/parse/configfile.py index dc6c4ac63..aa1c207b7 100644 --- a/lib/parse/configfile.py +++ b/lib/parse/configfile.py @@ -79,14 +79,14 @@ def configFileParser(configFile): mandatory = False - for option in ("direct", "url", "logFile", "bulkFile", "googleDork", "requestFile", "sitemapUrl", "wizard"): + for option in ("direct", "url", "logFile", "bulkFile", "googleDork", "requestFile", "wizard"): if config.has_option("Target", option) and config.get("Target", option) or cmdLineOptions.get(option): mandatory = True break if not mandatory: errMsg = "missing a mandatory option in the configuration file " - errMsg += "(direct, url, logFile, bulkFile, googleDork, requestFile, sitemapUrl or wizard)" + errMsg += "(direct, url, logFile, bulkFile, googleDork, requestFile or wizard)" raise SqlmapMissingMandatoryOptionException(errMsg) for family, optionData in optDict.items(): diff --git a/lib/utils/crawler.py b/lib/utils/crawler.py index 107e05e72..2ca95da71 100644 --- a/lib/utils/crawler.py +++ b/lib/utils/crawler.py @@ -133,31 +133,30 @@ def crawl(target): threadData.shared.deeper = set() threadData.shared.unprocessed = set([target]) - if not conf.sitemapUrl: - message = "do you want to check for the existence of " - message += "site's sitemap(.xml) [y/N] " + message = "do you want to check for the existence of " + message += "site's sitemap(.xml) [y/N] " - if readInput(message, default='N', boolean=True): - found = True - items = None - url = _urllib.parse.urljoin(target, "/sitemap.xml") - try: - items = parseSitemap(url) - except SqlmapConnectionException as ex: - if "page not found" in getSafeExString(ex): - found = False - logger.warn("'sitemap.xml' not found") - except: - pass - finally: - if found: - if items: - for item in items: - if re.search(r"(.*?)\?(.+)", item): - threadData.shared.value.add(item) - if conf.crawlDepth > 1: - threadData.shared.unprocessed.update(items) - logger.info("%s links found" % ("no" if not items else len(items))) + if readInput(message, default='N', boolean=True): + found = True + items = None + url = _urllib.parse.urljoin(target, "/sitemap.xml") + try: + items = parseSitemap(url) + except SqlmapConnectionException as ex: + if "page not found" in getSafeExString(ex): + found = False + logger.warn("'sitemap.xml' not found") + except: + pass + finally: + if found: + if items: + for item in items: + if re.search(r"(.*?)\?(.+)", item): + threadData.shared.value.add(item) + if conf.crawlDepth > 1: + threadData.shared.unprocessed.update(items) + logger.info("%s links found" % ("no" if not items else len(items))) infoMsg = "starting crawler" if conf.bulkFile: diff --git a/sqlmap.conf b/sqlmap.conf index e2fa753c4..8b3df5077 100644 --- a/sqlmap.conf +++ b/sqlmap.conf @@ -32,10 +32,6 @@ requestFile = # Example: +ext:php +inurl:"&id=" +intext:"powered by " googleDork = -# Parse target(s) from remote sitemap(.xml) file. -# Example: http://192.168.1.121/sitemap.xml -sitemapUrl = - # These options can be used to specify how to connect to the target URL. [Request]