2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
This file is part of the sqlmap project, http://sqlmap.sourceforge.net.
|
|
|
|
|
2010-03-03 18:26:27 +03:00
|
|
|
Copyright (c) 2007-2010 Bernardo Damele A. G. <bernardo.damele@gmail.com>
|
2009-04-22 15:48:07 +04:00
|
|
|
Copyright (c) 2006 Daniele Bellucci <daniele.bellucci@gmail.com>
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
sqlmap is free software; you can redistribute it and/or modify it under
|
|
|
|
the terms of the GNU General Public License as published by the Free
|
|
|
|
Software Foundation version 2 of the License.
|
|
|
|
|
|
|
|
sqlmap is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
|
|
|
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
|
|
|
details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License along
|
|
|
|
with sqlmap; if not, write to the Free Software Foundation, Inc., 51
|
|
|
|
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
import gzip
|
2009-04-28 15:05:07 +04:00
|
|
|
import os
|
2008-10-15 19:38:22 +04:00
|
|
|
import re
|
2010-01-02 05:02:12 +03:00
|
|
|
import StringIO
|
|
|
|
import zlib
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-05-21 18:42:59 +04:00
|
|
|
from lib.core.common import getCompiledRegex
|
2010-04-23 20:34:20 +04:00
|
|
|
from lib.core.common import isWindowsDriveLetterPath
|
2010-02-04 17:37:00 +03:00
|
|
|
from lib.core.common import posixToNtSlashes
|
2010-01-15 14:44:05 +03:00
|
|
|
from lib.core.common import urlEncodeCookieValues
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
2008-11-17 03:00:54 +03:00
|
|
|
from lib.parse.headers import headersParser
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.parse.html import htmlParser
|
|
|
|
|
|
|
|
def forgeHeaders(cookie, ua):
|
|
|
|
"""
|
|
|
|
Prepare HTTP Cookie and HTTP User-Agent headers to use when performing
|
|
|
|
the HTTP requests
|
|
|
|
"""
|
|
|
|
|
|
|
|
headers = {}
|
|
|
|
|
|
|
|
for header, value in conf.httpHeaders:
|
|
|
|
if cookie and header == "Cookie":
|
2010-01-15 14:24:30 +03:00
|
|
|
if conf.cookieUrlencode:
|
2010-01-15 14:44:05 +03:00
|
|
|
cookie = urlEncodeCookieValues(cookie)
|
2010-01-15 14:24:30 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
headers[header] = cookie
|
|
|
|
elif ua and header == "User-Agent":
|
|
|
|
headers[header] = ua
|
|
|
|
else:
|
|
|
|
headers[header] = value
|
|
|
|
|
|
|
|
return headers
|
|
|
|
|
2008-11-17 03:00:54 +03:00
|
|
|
def parseResponse(page, headers):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
@param page: the page to parse to feed the knowledge base htmlFp
|
|
|
|
(back-end DBMS fingerprint based upon DBMS error messages return
|
|
|
|
through the web application) list and absFilePaths (absolute file
|
|
|
|
paths) set.
|
|
|
|
"""
|
|
|
|
|
2008-11-17 03:00:54 +03:00
|
|
|
if headers:
|
|
|
|
headersParser(headers)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-11-17 03:00:54 +03:00
|
|
|
if page:
|
|
|
|
htmlParser(page)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-11-17 03:00:54 +03:00
|
|
|
# Detect injectable page absolute system path
|
|
|
|
# NOTE: this regular expression works if the remote web application
|
|
|
|
# is written in PHP and debug/error messages are enabled.
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-05-21 18:42:59 +04:00
|
|
|
for regex in ( r" in <b>(?P<result>.*?)</b> on line", r"(?:>|\s)(?P<result>[A-Za-z]:[\\/][\w.\\/]*)", r"(?:>|\s)(?P<result>/\w[/\w.]+)" ):
|
|
|
|
regObj = getCompiledRegex(regex)
|
|
|
|
for match in regObj.finditer(page):
|
2010-01-05 14:30:33 +03:00
|
|
|
absFilePath = match.group("result").strip()
|
2010-02-03 18:06:41 +03:00
|
|
|
page = page.replace(absFilePath, "")
|
2010-04-23 20:34:20 +04:00
|
|
|
|
|
|
|
if isWindowsDriveLetterPath(absFilePath):
|
2010-02-04 17:37:00 +03:00
|
|
|
absFilePath = posixToNtSlashes(absFilePath)
|
2010-04-23 20:34:20 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if absFilePath not in kb.absFilePaths:
|
2010-02-03 19:40:12 +03:00
|
|
|
kb.absFilePaths.add(absFilePath)
|
2010-06-10 15:34:17 +04:00
|
|
|
|
2010-06-09 18:40:36 +04:00
|
|
|
def decodePage(page, contentEncoding, contentType):
|
2010-01-02 05:02:12 +03:00
|
|
|
"""
|
2010-06-09 18:40:36 +04:00
|
|
|
Decode compressed/charset HTTP response
|
2010-01-02 05:02:12 +03:00
|
|
|
"""
|
|
|
|
|
2010-06-09 18:40:36 +04:00
|
|
|
if isinstance(contentEncoding, basestring) and contentEncoding.lower() in ('gzip', 'x-gzip', 'deflate'):
|
|
|
|
if contentEncoding == 'deflate':
|
2010-01-02 05:02:12 +03:00
|
|
|
# http://stackoverflow.com/questions/1089662/python-inflate-and-deflate-implementations
|
|
|
|
data = StringIO.StringIO(zlib.decompress(page, -15))
|
|
|
|
else:
|
|
|
|
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(page))
|
|
|
|
|
|
|
|
page = data.read()
|
|
|
|
|
2010-06-09 18:40:36 +04:00
|
|
|
#http://stackoverflow.com/questions/1020892/python-urllib2-read-to-unicode
|
|
|
|
if contentType and (contentType.find('charset=') != -1):
|
2010-06-10 15:34:17 +04:00
|
|
|
page = unicode(page, contentType.split('charset=')[-1]) #don't use getUnicode here. it needs to stay as is.
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
return page
|