mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-05-25 07:59:18 +03:00
Minor patch
This commit is contained in:
parent
a90324d592
commit
7eb7bddb25
|
@ -4422,8 +4422,8 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||||
forms = ParseResponse(response, backwards_compat=False)
|
forms = ParseResponse(response, backwards_compat=False)
|
||||||
except ParseError:
|
except ParseError:
|
||||||
if re.search(r"(?i)<!DOCTYPE html|<html", content or ""):
|
if re.search(r"(?i)<!DOCTYPE html|<html", content or ""):
|
||||||
warnMsg = "badly formed HTML at the given URL ('%s'). Going to filter it" % url
|
dbgMsg = "badly formed HTML at the given URL ('%s'). Going to filter it" % url
|
||||||
logger.warning(warnMsg)
|
logger.debug(dbgMsg)
|
||||||
filtered = _("".join(re.findall(FORM_SEARCH_REGEX, content)), url)
|
filtered = _("".join(re.findall(FORM_SEARCH_REGEX, content)), url)
|
||||||
|
|
||||||
if filtered and filtered != content:
|
if filtered and filtered != content:
|
||||||
|
|
|
@ -18,7 +18,7 @@ from lib.core.enums import OS
|
||||||
from thirdparty.six import unichr as _unichr
|
from thirdparty.six import unichr as _unichr
|
||||||
|
|
||||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||||
VERSION = "1.3.11.19"
|
VERSION = "1.3.11.20"
|
||||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||||
|
|
|
@ -117,7 +117,7 @@ def crawl(target):
|
||||||
if (extractRegexResult(r"\A[^?]+\.(?P<result>\w+)(\?|\Z)", url) or "").lower() not in CRAWL_EXCLUDE_EXTENSIONS:
|
if (extractRegexResult(r"\A[^?]+\.(?P<result>\w+)(\?|\Z)", url) or "").lower() not in CRAWL_EXCLUDE_EXTENSIONS:
|
||||||
with kb.locks.value:
|
with kb.locks.value:
|
||||||
threadData.shared.deeper.add(url)
|
threadData.shared.deeper.add(url)
|
||||||
if re.search(r"(.*?)\?(.+)", url) and not re.search(r"\?\d+\Z", url):
|
if re.search(r"(.*?)\?(.+)", url) and not re.search(r"\?(v=)?\d+\Z", url):
|
||||||
threadData.shared.value.add(url)
|
threadData.shared.value.add(url)
|
||||||
except UnicodeEncodeError: # for non-HTML files
|
except UnicodeEncodeError: # for non-HTML files
|
||||||
pass
|
pass
|
||||||
|
|
Loading…
Reference in New Issue
Block a user