mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-22 09:36:35 +03:00
Adding new warning message
This commit is contained in:
parent
b86b7c06e9
commit
193f8190c4
|
@ -15,6 +15,7 @@ import time
|
||||||
from lib.core.common import clearConsoleLine
|
from lib.core.common import clearConsoleLine
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import findPageForms
|
from lib.core.common import findPageForms
|
||||||
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.common import openFile
|
from lib.core.common import openFile
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.common import safeCSValue
|
from lib.core.common import safeCSValue
|
||||||
|
@ -127,20 +128,26 @@ def crawl(target):
|
||||||
message += "site's sitemap(.xml) [y/N] "
|
message += "site's sitemap(.xml) [y/N] "
|
||||||
test = readInput(message, default="n")
|
test = readInput(message, default="n")
|
||||||
if test[0] in ("y", "Y"):
|
if test[0] in ("y", "Y"):
|
||||||
|
found = True
|
||||||
items = None
|
items = None
|
||||||
url = urlparse.urljoin(target, "/sitemap.xml")
|
url = urlparse.urljoin(target, "/sitemap.xml")
|
||||||
try:
|
try:
|
||||||
items = parseSitemap(url)
|
items = parseSitemap(url)
|
||||||
|
except SqlmapConnectionException, ex:
|
||||||
|
if "page not found" in getSafeExString(ex):
|
||||||
|
found = False
|
||||||
|
logger.warn("'sitemap.xml' not found")
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
finally:
|
finally:
|
||||||
if items:
|
if found:
|
||||||
for item in items:
|
if items:
|
||||||
if re.search(r"(.*?)\?(.+)", item):
|
for item in items:
|
||||||
threadData.shared.value.add(item)
|
if re.search(r"(.*?)\?(.+)", item):
|
||||||
if conf.crawlDepth > 1:
|
threadData.shared.value.add(item)
|
||||||
threadData.shared.unprocessed.update(items)
|
if conf.crawlDepth > 1:
|
||||||
logger.info("%s links found" % ("no" if not items else len(items)))
|
threadData.shared.unprocessed.update(items)
|
||||||
|
logger.info("%s links found" % ("no" if not items else len(items)))
|
||||||
|
|
||||||
infoMsg = "starting crawler"
|
infoMsg = "starting crawler"
|
||||||
if conf.bulkFile:
|
if conf.bulkFile:
|
||||||
|
|
Loading…
Reference in New Issue
Block a user