Patch related to the #3438

This commit is contained in:
Miroslav Stampar 2019-01-17 15:06:00 +01:00
parent 669afdd81b
commit 7eb45b9d8f
3 changed files with 18 additions and 7 deletions

View File

@ -19,7 +19,7 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
from lib.core.enums import OS
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.3.1.52"
VERSION = "1.3.1.53"
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
@ -109,7 +109,7 @@ MAX_MURPHY_SLEEP_TIME = 3
GOOGLE_REGEX = r"webcache\.googleusercontent\.com/search\?q=cache:[^:]+:([^+]+)\+&amp;cd=|url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)"
# Regular expression used for extracting results from DuckDuckGo search
DUCKDUCKGO_REGEX = r'"u":"([^"]+)'
DUCKDUCKGO_REGEX = r'<a class="result__url" href="(htt[^"]+)'
# Regular expression used for extracting results from Bing search
BING_REGEX = r'<h2><a href="([^"]+)" h='

View File

@ -45,6 +45,7 @@ def _search(dork):
if not dork:
return None
data = None
headers = {}
headers[HTTP_HEADER.USER_AGENT] = dict(conf.httpHeaders).get(HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT)
@ -123,12 +124,12 @@ def _search(dork):
url = "https://www.bing.com/search?q=%s&first=%d" % (urlencode(dork, convall=True), (gpage - 1) * 10 + 1)
regex = BING_REGEX
else:
url = "https://duckduckgo.com/d.js?"
url += "q=%s&p=%d&s=100" % (urlencode(dork, convall=True), gpage)
url = "https://duckduckgo.com/html/"
data = "q=%s&s=%d" % (urlencode(dork, convall=True), (gpage - 1) * 30)
regex = DUCKDUCKGO_REGEX
try:
req = urllib2.Request(url, headers=headers)
req = urllib2.Request(url, data=data, headers=headers)
conn = urllib2.urlopen(req)
requestMsg = "HTTP request:\nGET %s" % url
@ -152,6 +153,7 @@ def _search(dork):
except urllib2.HTTPError, e:
try:
page = e.read()
page = decodePage(page, e.headers.get("Content-Encoding"), e.headers.get("Content-Type"))
except socket.timeout:
warnMsg = "connection timed out while trying "
warnMsg += "to get error page information (%d)" % e.code
@ -163,6 +165,15 @@ def _search(dork):
retVal = [urllib.unquote(match.group(1)) for match in re.finditer(regex, page, re.I | re.S)]
if not retVal and "issue with the Tor Exit Node you are currently using" in page:
warnMsg = "DuckDuckGo has detected 'unusual' traffic from "
warnMsg += "used (Tor) IP address"
if conf.proxyList:
raise SqlmapBaseException(warnMsg)
else:
logger.critical(warnMsg)
return retVal
@stackedmethod

View File

@ -49,7 +49,7 @@ fe370021c6bc99daf44b2bfc0d1effb3 lib/core/patch.py
9a7d68d5fa01561500423791f15cc676 lib/core/replication.py
3179d34f371e0295dd4604568fb30bcd lib/core/revision.py
d6269c55789f78cf707e09a0f5b45443 lib/core/session.py
d678e90ba0f7ce756b88a0540e5e7db9 lib/core/settings.py
bba284b24e810f3f7b319ee06a7844bc lib/core/settings.py
a8a7501d1e6b21669b858a62e921d191 lib/core/shell.py
5dc606fdf0afefd4b305169c21ab2612 lib/core/subprocessng.py
eec3080ba5baca44c6de4595f1c92a0d lib/core/target.py
@ -114,7 +114,7 @@ fb6be55d21a70765e35549af2484f762 lib/utils/__init__.py
2a40a6bd1779f7db5199f089411b1c1c lib/utils/pivotdumptable.py
5a8902fd6fa94ea73cf44952f9ed5a57 lib/utils/progress.py
a41136344768902f82b2855e88fd228d lib/utils/purge.py
b6e16ad8ea04e2c1ed65966fda1c66ac lib/utils/search.py
ba40e595754bc6e8ad16e944cb578d99 lib/utils/search.py
8d6b244ca3d6f99a9d6cd8c1856ccfeb lib/utils/sqlalchemy.py
a90c568a9b88eaea832a77581bd39d85 lib/utils/timeout.py
164f830baad3e13b226ee57d44d69dfa lib/utils/versioncheck.py