Merge remote-tracking branch 'sqlmapproject/master'

This commit is contained in:
cxh852456 2015-11-09 09:34:40 +08:00
commit b34f53fa56
18 changed files with 2936 additions and 2090 deletions

View File

@ -60,3 +60,4 @@ Translations
* [Greek](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-gr-GR.md)
* [Indonesian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-id-ID.md)
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
* [Spanish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-es-MX.md)

View File

@ -0,0 +1,51 @@
sqlmap
==
sqlmap es una herramienta para pruebas de penetración "penetration testing" de software libre que automatiza el proceso de detección y explotación de fallos mediante inyección de SQL además de tomar el control de servidores de bases de datos. Contiene un poderoso motor de detección, así como muchas de las funcionalidades escenciales para el "pentester" y una amplia gama de opciones desde la recopilación de información para identificar el objetivo conocido como "fingerprinting" mediante la extracción de información de la base de datos, hasta el acceso al sistema de archivos subyacente para ejecutar comandos en el sistema operativo a través de conexiones alternativas conocidas como "Out-of-band".
Capturas de Pantalla
---
![Screenshot](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png)
Visita la [colección de capturas de pantalla](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) que demuestra algunas de las características en la documentación(wiki).
Instalación
---
Se puede descargar el "tarball" más actual haciendo clic [aquí](https://github.com/sqlmapproject/sqlmap/tarball/master) o el "zipball" [aquí](https://github.com/sqlmapproject/sqlmap/zipball/master).
Preferentemente, se puede descargar sqlmap clonando el repositorio [Git](https://github.com/sqlmapproject/sqlmap):
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
sqlmap funciona con las siguientes versiones de [Python](http://www.python.org/download/) ** 2.6.x** y ** 2.7.x** en cualquier plataforma.
Uso
---
Para obtener una lista de opciones básicas:
python sqlmap.py -h
Para obtener una lista de todas las opciones:
python sqlmap.py -hh
Se puede encontrar una muestra de su funcionamiento [aquí](https://gist.github.com/stamparm/5335217).
Para obtener una visión general de las capacidades de sqlmap, así como un listado funciones soportadas y descripción de todas las opciones y modificadores, junto con ejemplos, se recomienda consultar el [manual de usuario](https://github.com/sqlmapproject/sqlmap/wiki).
Enlaces
---
* Página principal: http://sqlmap.org
* Descargar: [. tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) o [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
* Fuente de Cambios "Commit RSS feed": https://github.com/sqlmapproject/sqlmap/commits/master.atom
* Seguimiento de problemas "Issue tracker": https://github.com/sqlmapproject/sqlmap/issues
* Manual de usuario: https://github.com/sqlmapproject/sqlmap/wiki
* Preguntas frecuentes (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
* Subscripción a la lista de correo: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
* Fuente de la lista de correo "RSS feed": http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
* Archivos de lista de correo: http://news.gmane.org/gmane.comp.security.sqlmap
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
* Demostraciones: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
* Imágenes: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots

View File

@ -316,8 +316,8 @@ def start():
if conf.multipleTargets:
hostCount += 1
if conf.forms:
message = "[#%d] form:\n%s %s" % (hostCount, conf.method or HTTPMETHOD.GET, targetUrl)
if conf.forms and conf.method:
message = "[#%d] form:\n%s %s" % (hostCount, conf.method, targetUrl)
else:
message = "URL %d:\n%s %s%s" % (hostCount, HTTPMETHOD.GET, targetUrl, " (PageRank: %s)" % get_pagerank(targetUrl) if conf.googleDork and conf.pageRank else "")
@ -327,7 +327,7 @@ def start():
if conf.data is not None:
message += "\n%s data: %s" % ((conf.method if conf.method != HTTPMETHOD.GET else conf.method) or HTTPMETHOD.POST, urlencode(conf.data) if conf.data else "")
if conf.forms:
if conf.forms and conf.method:
if conf.method == HTTPMETHOD.GET and targetUrl.find("?") == -1:
continue

View File

@ -493,6 +493,8 @@ class Agent(object):
if not _:
fieldsSelectFrom = None
fieldsToCastStr = fieldsNoSelect
if fieldsSubstr:
fieldsToCastStr = query
elif fieldsMinMaxstr:
@ -516,8 +518,6 @@ class Agent(object):
fieldsToCastStr = re.sub(r"\ASELECT%s\s+" % prefixRegex, "", fieldsToCastStr)
elif fieldsSelect:
fieldsToCastStr = fieldsSelect.groups()[0]
else:
fieldsToCastStr = fieldsNoSelect
# Function
if re.search("\A\w+\(.*\)", fieldsToCastStr, re.I) or (fieldsSelectCase and "WHEN use" not in query) or fieldsSubstr:

View File

@ -3561,7 +3561,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
try:
forms = ParseResponse(response, backwards_compat=False)
except UnicodeError:
except (UnicodeError, ValueError):
pass
except ParseError:
if "<html" in (content or ""):
@ -3861,7 +3861,7 @@ def resetCookieJar(cookieJar):
os.close(handle)
# Reference: http://www.hashbangcode.com/blog/netscape-http-cooke-file-parser-php-584.html
with open(filename, "w+b") as f:
with openFile(filename, "w+b") as f:
f.write("%s\n" % NETSCAPE_FORMAT_HEADER_COOKIES)
for line in lines:
_ = line.split("\t")

View File

@ -27,7 +27,7 @@ import lib.core.common
import lib.core.threads
import lib.core.convert
import lib.request.connect
import lib.utils.google
import lib.utils.search
from lib.controller.checks import checkConnection
from lib.core.common import Backend
@ -148,7 +148,7 @@ from lib.request.redirecthandler import SmartRedirectHandler
from lib.request.templates import getPageTemplate
from lib.utils.crawler import crawl
from lib.utils.deps import checkDependencies
from lib.utils.google import Google
from lib.utils.search import search
from lib.utils.purge import purge
from thirdparty.colorama.initialise import init as coloramainit
from thirdparty.keepalive import keepalive
@ -163,42 +163,6 @@ proxyHandler = urllib2.ProxyHandler()
redirectHandler = SmartRedirectHandler()
rangeHandler = HTTPRangeHandler()
def _urllib2Opener():
"""
This function creates the urllib2 OpenerDirector.
"""
debugMsg = "creating HTTP requests opener object"
logger.debug(debugMsg)
handlers = [proxyHandler, authHandler, redirectHandler, rangeHandler, httpsHandler]
if not conf.dropSetCookie:
if not conf.loadCookies:
conf.cj = cookielib.CookieJar()
else:
conf.cj = cookielib.MozillaCookieJar()
resetCookieJar(conf.cj)
handlers.append(urllib2.HTTPCookieProcessor(conf.cj))
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
if conf.keepAlive:
warnMsg = "persistent HTTP(s) connections, Keep-Alive, has "
warnMsg += "been disabled because of its incompatibility "
if conf.proxy:
warnMsg += "with HTTP(s) proxy"
logger.warn(warnMsg)
elif conf.authType:
warnMsg += "with authentication methods"
logger.warn(warnMsg)
else:
handlers.append(keepAliveHandler)
opener = urllib2.build_opener(*handlers)
urllib2.install_opener(opener)
def _feedTargetsDict(reqFile, addedTargetUrls):
"""
Parses web scarab and burp logs and adds results to the target URL list
@ -539,46 +503,23 @@ def _setCrawler():
errMsg = "problem occurred while crawling at '%s' ('%s')" % (target, ex)
logger.error(errMsg)
def _setGoogleDorking():
def _doSearch():
"""
This function checks if the way to request testable hosts is through
Google dorking then requests to Google the search parameter, parses
the results and save the testable hosts into the knowledge base.
This function performs search dorking, parses results
and saves the testable hosts into the knowledge base.
"""
if not conf.googleDork:
return
global keepAliveHandler
global proxyHandler
debugMsg = "initializing Google dorking requests"
logger.debug(debugMsg)
infoMsg = "first request to Google to get the session cookie"
logger.info(infoMsg)
handlers = [proxyHandler]
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
if conf.keepAlive:
if conf.proxy:
warnMsg = "persistent HTTP(s) connections, Keep-Alive, has "
warnMsg += "been disabled because of its incompatibility "
warnMsg += "with HTTP(s) proxy"
logger.warn(warnMsg)
else:
handlers.append(keepAliveHandler)
googleObj = Google(handlers)
kb.data.onlyGETs = None
def retrieve():
links = googleObj.search(conf.googleDork)
links = search(conf.googleDork)
if not links:
errMsg = "unable to find results for your "
errMsg += "Google dork expression"
errMsg += "search dork expression"
raise SqlmapGenericException(errMsg)
for link in links:
@ -600,7 +541,7 @@ def _setGoogleDorking():
if kb.targets:
infoMsg = "sqlmap got %d results for your " % len(links)
infoMsg += "Google dork expression, "
infoMsg += "search dork expression, "
if len(links) == len(kb.targets):
infoMsg += "all "
@ -613,7 +554,7 @@ def _setGoogleDorking():
else:
message = "sqlmap got %d results " % len(links)
message += "for your Google dork expression, but none of them "
message += "for your search dork expression, but none of them "
message += "have GET parameters to test for SQL injection. "
message += "Do you want to skip to the next result page? [Y/n]"
test = readInput(message, default="Y")
@ -971,7 +912,7 @@ def _setTamperingFunctions():
sys.path.insert(0, dirname)
try:
module = __import__(filename[:-3])
module = __import__(filename[:-3].encode(sys.getfilesystemencoding()))
except (ImportError, SyntaxError), msg:
raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], msg))
@ -1077,7 +1018,7 @@ def _setDNSCache():
socket._getaddrinfo = socket.getaddrinfo
socket.getaddrinfo = _getaddrinfo
def _setHTTPProxy():
def _setHTTPHandlers():
"""
Check and set the HTTP/SOCKS proxy for all HTTP requests.
"""
@ -1102,63 +1043,93 @@ def _setHTTPProxy():
if conf.hostname in ("localhost", "127.0.0.1") or conf.ignoreProxy:
proxyHandler.proxies = {}
return
if conf.proxy:
debugMsg = "setting the HTTP/SOCKS proxy for all HTTP requests"
logger.debug(debugMsg)
debugMsg = "setting the HTTP/SOCKS proxy for all HTTP requests"
logger.debug(debugMsg)
try:
_ = urlparse.urlsplit(conf.proxy)
except Exception, ex:
errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, ex)
raise SqlmapSyntaxException, errMsg
hostnamePort = _.netloc.split(":")
scheme = _.scheme.upper()
hostname = hostnamePort[0]
port = None
username = None
password = None
if len(hostnamePort) == 2:
try:
port = int(hostnamePort[1])
except:
pass # drops into the next check block
_ = urlparse.urlsplit(conf.proxy)
except Exception, ex:
errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, ex)
raise SqlmapSyntaxException, errMsg
if not all((scheme, hasattr(PROXY_TYPE, scheme), hostname, port)):
errMsg = "proxy value must be in format '(%s)://address:port'" % "|".join(_[0].lower() for _ in getPublicTypeMembers(PROXY_TYPE))
raise SqlmapSyntaxException(errMsg)
hostnamePort = _.netloc.split(":")
if conf.proxyCred:
_ = re.search("^(.*?):(.*?)$", conf.proxyCred)
if not _:
errMsg = "proxy authentication credentials "
errMsg += "value must be in format username:password"
scheme = _.scheme.upper()
hostname = hostnamePort[0]
port = None
username = None
password = None
if len(hostnamePort) == 2:
try:
port = int(hostnamePort[1])
except:
pass # drops into the next check block
if not all((scheme, hasattr(PROXY_TYPE, scheme), hostname, port)):
errMsg = "proxy value must be in format '(%s)://address:port'" % "|".join(_[0].lower() for _ in getPublicTypeMembers(PROXY_TYPE))
raise SqlmapSyntaxException(errMsg)
else:
username = _.group(1)
password = _.group(2)
if scheme in (PROXY_TYPE.SOCKS4, PROXY_TYPE.SOCKS5):
proxyHandler.proxies = {}
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if scheme == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, hostname, port, username=username, password=password)
socks.wrapmodule(urllib2)
else:
socks.unwrapmodule(urllib2)
if conf.proxyCred:
# Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection
proxyString = "%s@" % conf.proxyCred
_ = re.search("^(.*?):(.*?)$", conf.proxyCred)
if not _:
errMsg = "proxy authentication credentials "
errMsg += "value must be in format username:password"
raise SqlmapSyntaxException(errMsg)
else:
username = _.group(1)
password = _.group(2)
if scheme in (PROXY_TYPE.SOCKS4, PROXY_TYPE.SOCKS5):
proxyHandler.proxies = {}
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if scheme == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, hostname, port, username=username, password=password)
socks.wrapmodule(urllib2)
else:
proxyString = ""
socks.unwrapmodule(urllib2)
proxyString += "%s:%d" % (hostname, port)
proxyHandler.proxies = {"http": proxyString, "https": proxyString}
if conf.proxyCred:
# Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection
proxyString = "%s@" % conf.proxyCred
else:
proxyString = ""
proxyHandler.__init__(proxyHandler.proxies)
proxyString += "%s:%d" % (hostname, port)
proxyHandler.proxies = {"http": proxyString, "https": proxyString}
proxyHandler.__init__(proxyHandler.proxies)
debugMsg = "creating HTTP requests opener object"
logger.debug(debugMsg)
handlers = filter(None, [proxyHandler if proxyHandler.proxies else None, authHandler, redirectHandler, rangeHandler, httpsHandler])
if not conf.dropSetCookie:
if not conf.loadCookies:
conf.cj = cookielib.CookieJar()
else:
conf.cj = cookielib.MozillaCookieJar()
resetCookieJar(conf.cj)
handlers.append(urllib2.HTTPCookieProcessor(conf.cj))
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
if conf.keepAlive:
warnMsg = "persistent HTTP(s) connections, Keep-Alive, has "
warnMsg += "been disabled because of its incompatibility "
if conf.proxy:
warnMsg += "with HTTP(s) proxy"
logger.warn(warnMsg)
elif conf.authType:
warnMsg += "with authentication methods"
logger.warn(warnMsg)
else:
handlers.append(keepAliveHandler)
opener = urllib2.build_opener(*handlers)
urllib2.install_opener(opener)
def _setSafeVisit():
"""
@ -2494,8 +2465,8 @@ def _resolveCrossReferences():
lib.core.threads.readInput = readInput
lib.core.common.getPageTemplate = getPageTemplate
lib.core.convert.singleTimeWarnMessage = singleTimeWarnMessage
lib.request.connect.setHTTPProxy = _setHTTPProxy
lib.utils.google.setHTTPProxy = _setHTTPProxy
lib.request.connect.setHTTPHandlers = _setHTTPHandlers
lib.utils.search.setHTTPHandlers = _setHTTPHandlers
lib.controller.checks.setVerbosity = setVerbosity
def initOptions(inputOptions=AttribDict(), overrideOptions=False):
@ -2544,13 +2515,12 @@ def init():
_setHTTPHost()
_setHTTPUserAgent()
_setHTTPAuthentication()
_setHTTPProxy()
_setHTTPHandlers()
_setDNSCache()
_setSafeVisit()
_setGoogleDorking()
_doSearch()
_setBulkMultipleTargets()
_setSitemapTargets()
_urllib2Opener()
_checkTor()
_setCrawler()
_findPageForms()

View File

@ -73,7 +73,7 @@ PERMISSION_DENIED_REGEX = r"(command|permission|access)\s*(was|is)?\s*denied"
MAX_CONNECTIONS_REGEX = r"max.+connections"
# Regular expression used for extracting results from Google search
GOOGLE_REGEX = r"url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)"
GOOGLE_REGEX = r"webcache\.googleusercontent\.com/search\?q=cache:[^:]+:([^+]+)\+&amp;cd=|url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)"
# Regular expression used for extracting results from DuckDuckGo search
DUCKDUCKGO_REGEX = r'"u":"([^"]+)'

View File

@ -17,6 +17,7 @@ from lib.core.common import extractErrorMessage
from lib.core.common import extractRegexResult
from lib.core.common import getPublicTypeMembers
from lib.core.common import getUnicode
from lib.core.common import randomStr
from lib.core.common import readInput
from lib.core.common import resetCookieJar
from lib.core.common import singleTimeLogMessage
@ -113,7 +114,7 @@ def forgeHeaders(items=None):
elif not kb.testMode:
headers[HTTP_HEADER.COOKIE] += "%s %s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, cookie.name, getUnicode(cookie.value))
if kb.testMode and not conf.csrfToken:
if kb.testMode and not any((conf.csrfToken, conf.safeUrl)):
resetCookieJar(conf.cj)
return headers
@ -206,6 +207,15 @@ def checkCharEncoding(encoding, warn=True):
singleTimeLogMessage(warnMsg, logging.WARN, encoding)
encoding = None
if encoding:
try:
unicode(randomStr(), encoding)
except:
if warn:
warnMsg = "invalid web page charset '%s'" % encoding
singleTimeLogMessage(warnMsg, logging.WARN, encoding)
encoding = None
return encoding
def getHeuristicCharEncoding(page):

View File

@ -132,7 +132,9 @@ class Connect(object):
logger.warn(warnMsg)
conf.proxy = None
setHTTPProxy()
threadData.retriesCount = 0
setHTTPHandlers()
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
# timed based payloads can cause web server unresponsiveness
@ -369,15 +371,17 @@ class Connect(object):
headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary)
if auxHeaders:
for key, item in auxHeaders.items():
for key, value in auxHeaders.items():
for _ in headers.keys():
if _.upper() == key.upper():
del headers[_]
headers[key] = item
headers[key] = value
for key, item in headers.items():
for key, value in headers.items():
del headers[key]
headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding)
headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(value, kb.pageEncoding)
for char in (r"\r", r"\n"):
value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", value)
url = unicodeencode(url)
post = unicodeencode(post)
@ -586,7 +590,7 @@ class Connect(object):
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
logger.debug(debugMsg)
except (urllib2.URLError, socket.error, socket.timeout, httplib.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException):
except (urllib2.URLError, socket.error, socket.timeout, httplib.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError):
tbMsg = traceback.format_exc()
if "no host given" in tbMsg:
@ -1114,5 +1118,5 @@ class Connect(object):
else:
return comparison(page, headers, code, getRatioValue, pageLength)
def setHTTPProxy(): # Cross-linked function
def setHTTPHandlers(): # Cross-linked function
raise NotImplementedError

View File

@ -226,7 +226,7 @@ def _unionPosition(comment, place, parameter, prefix, suffix, count, where=PAYLO
if content.count(phrase) > 0 and content.count(phrase) < LIMITED_ROWS_TEST_NUMBER:
warnMsg = "output with limited number of rows detected. Switching to partial mode"
logger.warn(warnMsg)
vector = (position, count, comment, prefix, suffix, kb.uChar, PAYLOAD.WHERE.NEGATIVE, kb.unionDuplicates, False)
vector = (position, count, comment, prefix, suffix, kb.uChar, where, kb.unionDuplicates, True)
unionErrorCase = kb.errorIsNone and wasLastResponseDBMSError()

View File

@ -15,6 +15,7 @@ import time
from lib.core.common import clearConsoleLine
from lib.core.common import dataToStdout
from lib.core.common import findPageForms
from lib.core.common import getSafeExString
from lib.core.common import openFile
from lib.core.common import readInput
from lib.core.common import safeCSValue
@ -127,20 +128,26 @@ def crawl(target):
message += "site's sitemap(.xml) [y/N] "
test = readInput(message, default="n")
if test[0] in ("y", "Y"):
found = True
items = None
url = urlparse.urljoin(target, "/sitemap.xml")
try:
items = parseSitemap(url)
except SqlmapConnectionException, ex:
if "page not found" in getSafeExString(ex):
found = False
logger.warn("'sitemap.xml' not found")
except:
pass
finally:
if items:
for item in items:
if re.search(r"(.*?)\?(.+)", item):
threadData.shared.value.add(item)
if conf.crawlDepth > 1:
threadData.shared.unprocessed.update(items)
logger.info("%s links found" % ("no" if not items else len(items)))
if found:
if items:
for item in items:
if re.search(r"(.*?)\?(.+)", item):
threadData.shared.value.add(item)
if conf.crawlDepth > 1:
threadData.shared.unprocessed.update(items)
logger.info("%s links found" % ("no" if not items else len(items)))
infoMsg = "starting crawler"
if conf.bulkFile:

View File

@ -1,183 +0,0 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import cookielib
import httplib
import re
import socket
import urllib
import urllib2
from lib.core.common import getSafeExString
from lib.core.common import getUnicode
from lib.core.common import readInput
from lib.core.common import urlencode
from lib.core.data import conf
from lib.core.data import logger
from lib.core.enums import CUSTOM_LOGGING
from lib.core.enums import HTTP_HEADER
from lib.core.exception import SqlmapConnectionException
from lib.core.exception import SqlmapGenericException
from lib.core.exception import SqlmapUserQuitException
from lib.core.settings import DUMMY_SEARCH_USER_AGENT
from lib.core.settings import DUCKDUCKGO_REGEX
from lib.core.settings import DISCONNECT_SEARCH_REGEX
from lib.core.settings import GOOGLE_REGEX
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
from lib.core.settings import UNICODE_ENCODING
from lib.request.basic import decodePage
from lib.request.httpshandler import HTTPSHandler
from thirdparty.socks import socks
class Google(object):
"""
This class defines methods used to perform Google dorking (command
line option '-g <google dork>')
"""
def __init__(self, handlers):
self._cj = cookielib.CookieJar()
handlers.append(urllib2.HTTPCookieProcessor(self._cj))
handlers.append(HTTPSHandler())
self.opener = urllib2.build_opener(*handlers)
self.opener.addheaders = conf.httpHeaders
try:
conn = self.opener.open("https://www.google.com/ncr")
conn.info() # retrieve session cookie
except Exception, ex:
errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex)
raise SqlmapConnectionException(errMsg)
def search(self, dork):
"""
This method performs the effective search on Google providing
the google dork and the Google session cookie
"""
gpage = conf.googlePage if conf.googlePage > 1 else 1
logger.info("using Google result page #%d" % gpage)
if not dork:
return None
url = "https://www.google.com/search?"
url += "q=%s&" % urlencode(dork, convall=True)
url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search"
url += "&start=%d" % ((gpage - 1) * 100)
try:
conn = self.opener.open(url)
requestMsg = "HTTP request:\nGET %s" % url
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
page = conn.read()
code = conn.code
status = conn.msg
responseHeaders = conn.info()
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
responseMsg = "HTTP response (%s - %d):\n" % (status, code)
if conf.verbose <= 4:
responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
elif conf.verbose > 4:
responseMsg += "%s\n%s\n" % (responseHeaders, page)
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
except urllib2.HTTPError, e:
try:
page = e.read()
except Exception, ex:
warnMsg = "problem occurred while trying to get "
warnMsg += "an error page information (%s)" % getSafeExString(ex)
logger.critical(warnMsg)
return None
except (urllib2.URLError, httplib.error, socket.error, socket.timeout, socks.ProxyError):
errMsg = "unable to connect to Google"
raise SqlmapConnectionException(errMsg)
retVal = [urllib.unquote(match.group(1)) for match in re.finditer(GOOGLE_REGEX, page, re.I | re.S)]
if not retVal and "detected unusual traffic" in page:
warnMsg = "Google has detected 'unusual' traffic from "
warnMsg += "used IP address disabling further searches"
raise SqlmapGenericException(warnMsg)
if not retVal:
message = "no usable links found. What do you want to do?"
message += "\n[1] (re)try with DuckDuckGo (default)"
message += "\n[2] (re)try with Disconnect Search"
message += "\n[3] quit"
choice = readInput(message, default="1").strip().upper()
if choice == "Q":
raise SqlmapUserQuitException
elif choice == "2":
url = "https://search.disconnect.me/searchTerms/search?"
url += "start=nav&option=Web"
url += "&query=%s" % urlencode(dork, convall=True)
url += "&ses=Google&location_option=US"
url += "&nextDDG=%s" % urlencode("/search?q=&num=100&hl=en&start=%d&sa=N" % ((gpage - 1) * 10), convall=True)
url += "&sa=N&showIcons=false&filterIcons=none&js_enabled=1"
regex = DISCONNECT_SEARCH_REGEX
else:
url = "https://duckduckgo.com/d.js?"
url += "q=%s&p=%d&s=100" % (urlencode(dork, convall=True), gpage)
regex = DUCKDUCKGO_REGEX
if not conf.randomAgent:
self.opener.addheaders = [_ for _ in self.opener.addheaders if _[0].lower() != HTTP_HEADER.USER_AGENT.lower()]
self.opener.addheaders.append((HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT))
self.opener.addheaders = [_ for _ in self.opener.addheaders if _[0].lower() != HTTP_HEADER.ACCEPT_ENCODING.lower()]
self.opener.addheaders.append((HTTP_HEADER.ACCEPT_ENCODING, HTTP_ACCEPT_ENCODING_HEADER_VALUE))
try:
conn = self.opener.open(url)
requestMsg = "HTTP request:\nGET %s" % url
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
page = conn.read()
code = conn.code
status = conn.msg
responseHeaders = conn.info()
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
responseMsg = "HTTP response (%s - %d):\n" % (status, code)
if conf.verbose <= 4:
responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
elif conf.verbose > 4:
responseMsg += "%s\n%s\n" % (responseHeaders, page)
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
except urllib2.HTTPError, e:
try:
page = e.read()
except socket.timeout:
warnMsg = "connection timed out while trying "
warnMsg += "to get error page information (%d)" % e.code
logger.critical(warnMsg)
return None
except:
errMsg = "unable to connect"
raise SqlmapConnectionException(errMsg)
retVal = [urllib.unquote(match.group(1)) for match in re.finditer(regex, page, re.I | re.S)]
return retVal
def setHTTPProxy(): # Cross-linked function
raise NotImplementedError

195
lib/utils/search.py Normal file
View File

@ -0,0 +1,195 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import cookielib
import httplib
import re
import socket
import urllib
import urllib2
from lib.core.common import getSafeExString
from lib.core.common import getUnicode
from lib.core.common import popValue
from lib.core.common import pushValue
from lib.core.common import readInput
from lib.core.common import urlencode
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.enums import CUSTOM_LOGGING
from lib.core.enums import HTTP_HEADER
from lib.core.enums import REDIRECTION
from lib.core.exception import SqlmapBaseException
from lib.core.exception import SqlmapConnectionException
from lib.core.exception import SqlmapGenericException
from lib.core.exception import SqlmapUserQuitException
from lib.core.settings import DUMMY_SEARCH_USER_AGENT
from lib.core.settings import DUCKDUCKGO_REGEX
from lib.core.settings import DISCONNECT_SEARCH_REGEX
from lib.core.settings import GOOGLE_REGEX
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
from lib.core.settings import UNICODE_ENCODING
from lib.request.basic import decodePage
from lib.request.httpshandler import HTTPSHandler
from thirdparty.socks import socks
def _search(dork):
"""
This method performs the effective search on Google providing
the google dork and the Google session cookie
"""
if not dork:
return None
headers = {}
headers[HTTP_HEADER.USER_AGENT] = dict(conf.httpHeaders).get(HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT)
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE
try:
req = urllib2.Request("https://www.google.com/ncr", headers=headers)
conn = urllib2.urlopen(req)
except Exception, ex:
errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex)
raise SqlmapConnectionException(errMsg)
gpage = conf.googlePage if conf.googlePage > 1 else 1
logger.info("using search result page #%d" % gpage)
url = "https://www.google.com/search?"
url += "q=%s&" % urlencode(dork, convall=True)
url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search"
url += "&start=%d" % ((gpage - 1) * 100)
try:
req = urllib2.Request(url, headers=headers)
conn = urllib2.urlopen(req)
requestMsg = "HTTP request:\nGET %s" % url
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
page = conn.read()
code = conn.code
status = conn.msg
responseHeaders = conn.info()
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
responseMsg = "HTTP response (%s - %d):\n" % (status, code)
if conf.verbose <= 4:
responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
elif conf.verbose > 4:
responseMsg += "%s\n%s\n" % (responseHeaders, page)
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
except urllib2.HTTPError, e:
try:
page = e.read()
except Exception, ex:
warnMsg = "problem occurred while trying to get "
warnMsg += "an error page information (%s)" % getSafeExString(ex)
logger.critical(warnMsg)
return None
except (urllib2.URLError, httplib.error, socket.error, socket.timeout, socks.ProxyError):
errMsg = "unable to connect to Google"
raise SqlmapConnectionException(errMsg)
retVal = [urllib.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I)]
if not retVal and "detected unusual traffic" in page:
warnMsg = "Google has detected 'unusual' traffic from "
warnMsg += "used IP address disabling further searches"
logger.warn(warnMsg)
if not retVal:
message = "no usable links found. What do you want to do?"
message += "\n[1] (re)try with DuckDuckGo (default)"
message += "\n[2] (re)try with Disconnect Search"
message += "\n[3] quit"
choice = readInput(message, default="1").strip().upper()
if choice == "Q":
raise SqlmapUserQuitException
elif choice == "2":
url = "https://search.disconnect.me/searchTerms/search?"
url += "start=nav&option=Web"
url += "&query=%s" % urlencode(dork, convall=True)
url += "&ses=Google&location_option=US"
url += "&nextDDG=%s" % urlencode("/search?q=%s&setmkt=en-US&setplang=en-us&setlang=en-us&first=%d&FORM=PORE" % (urlencode(dork, convall=True), (gpage - 1) * 10), convall=True)
url += "&sa=N&showIcons=false&filterIcons=none&js_enabled=1"
regex = DISCONNECT_SEARCH_REGEX
else:
url = "https://duckduckgo.com/d.js?"
url += "q=%s&p=%d&s=100" % (urlencode(dork, convall=True), gpage)
regex = DUCKDUCKGO_REGEX
try:
req = urllib2.Request(url, headers=headers)
conn = urllib2.urlopen(req)
requestMsg = "HTTP request:\nGET %s" % url
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
page = conn.read()
code = conn.code
status = conn.msg
responseHeaders = conn.info()
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
responseMsg = "HTTP response (%s - %d):\n" % (status, code)
if conf.verbose <= 4:
responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
elif conf.verbose > 4:
responseMsg += "%s\n%s\n" % (responseHeaders, page)
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
except urllib2.HTTPError, e:
try:
page = e.read()
except socket.timeout:
warnMsg = "connection timed out while trying "
warnMsg += "to get error page information (%d)" % e.code
logger.critical(warnMsg)
return None
except:
errMsg = "unable to connect"
raise SqlmapConnectionException(errMsg)
retVal = [urllib.unquote(match.group(1)) for match in re.finditer(regex, page, re.I | re.S)]
return retVal
def search(dork):
pushValue(kb.redirectChoice)
kb.redirectChoice = REDIRECTION.YES
try:
return _search(dork)
except SqlmapBaseException, ex:
if conf.proxyList:
logger.critical(getSafeExString(ex))
warnMsg = "changing proxy"
logger.warn(warnMsg)
conf.proxy = None
setHTTPHandlers()
return search(dork)
else:
raise
finally:
kb.redirectChoice = popValue()
def setHTTPHandlers(): # Cross-linked function
raise NotImplementedError

View File

@ -8,6 +8,8 @@ See the file 'doc/COPYING' for copying permission
import logging
import optparse
from lib.utils import versioncheck # this has to be the first non-standard import
from sqlmap import modulePath
from lib.core.common import setPaths
from lib.core.data import paths

43
tamper/commalessmid.py Normal file
View File

@ -0,0 +1,43 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import os
import re
from lib.core.common import singleTimeWarnMessage
from lib.core.enums import PRIORITY
__priority__ = PRIORITY.HIGH
def dependencies():
pass
def tamper(payload, **kwargs):
"""
Replaces instances like 'MID(A, B, C)' with 'MID(A FROM B FOR C)'
Requirement:
* MySQL
Tested against:
* MySQL 5.0 and 5.5
>>> tamper('MID(VERSION(), 1, 1)')
'MID(VERSION() FROM 1 FOR 1)'
"""
retVal = payload
warnMsg = "you should consider usage of switch '--no-cast' along with "
warnMsg += "tamper script '%s'" % os.path.basename(__file__).split(".")[0]
singleTimeWarnMessage(warnMsg)
match = re.search(r"(?i)MID\((.+?)\s*,\s*(\d+)\s*\,\s*(\d+)\s*\)", payload or "")
if match:
retVal = retVal.replace(match.group(0), "MID(%s FROM %s FOR %s)" % (match.group(1), match.group(2), match.group(3)))
return retVal

View File

@ -79,8 +79,8 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE, DAMMIT.
from __future__ import generators
__author__ = "Leonard Richardson (leonardr@segfault.org)"
__version__ = "3.2.0"
__copyright__ = "Copyright (c) 2004-2010 Leonard Richardson"
__version__ = "3.2.1"
__copyright__ = "Copyright (c) 2004-2012 Leonard Richardson"
__license__ = "New-style BSD"
from sgmllib import SGMLParser, SGMLParseError
@ -114,6 +114,21 @@ class PageElement(object):
"""Contains the navigational information for some part of the page
(either a tag or a piece of text)"""
def _invert(h):
"Cheap function to invert a hash."
i = {}
for k,v in h.items():
i[v] = k
return i
XML_ENTITIES_TO_SPECIAL_CHARS = { "apos" : "'",
"quot" : '"',
"amp" : "&",
"lt" : "<",
"gt" : ">" }
XML_SPECIAL_CHARS_TO_ENTITIES = _invert(XML_ENTITIES_TO_SPECIAL_CHARS)
def setup(self, parent=None, previous=None):
"""Sets up the initial relations between this element and
other elements."""
@ -421,6 +436,16 @@ class PageElement(object):
s = unicode(s)
return s
BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
+ "&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
+ ")")
def _sub_entity(self, x):
"""Used with a regular expression to substitute the
appropriate XML entity for an XML special character."""
return "&" + self.XML_SPECIAL_CHARS_TO_ENTITIES[x.group(0)[0]] + ";"
class NavigableString(unicode, PageElement):
def __new__(cls, value):
@ -451,10 +476,12 @@ class NavigableString(unicode, PageElement):
return str(self).decode(DEFAULT_OUTPUT_ENCODING)
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
# Substitute outgoing XML entities.
data = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, self)
if encoding:
return self.encode(encoding)
return data.encode(encoding)
else:
return self
return data
class CData(NavigableString):
@ -480,21 +507,6 @@ class Tag(PageElement):
"""Represents a found HTML tag with its attributes and contents."""
def _invert(h):
"Cheap function to invert a hash."
i = {}
for k,v in h.items():
i[v] = k
return i
XML_ENTITIES_TO_SPECIAL_CHARS = { "apos" : "'",
"quot" : '"',
"amp" : "&",
"lt" : "<",
"gt" : ">" }
XML_SPECIAL_CHARS_TO_ENTITIES = _invert(XML_ENTITIES_TO_SPECIAL_CHARS)
def _convertEntities(self, match):
"""Used in a call to re.sub to replace HTML, XML, and numeric
entities with the appropriate Unicode characters. If HTML
@ -681,15 +693,6 @@ class Tag(PageElement):
def __unicode__(self):
return self.__str__(None)
BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
+ "&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
+ ")")
def _sub_entity(self, x):
"""Used with a regular expression to substitute the
appropriate XML entity for an XML special character."""
return "&" + self.XML_SPECIAL_CHARS_TO_ENTITIES[x.group(0)[0]] + ";"
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING,
prettyPrint=False, indentLevel=0):
"""Returns a string or Unicode representation of this tag and

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff