2013-02-14 15:32:17 +04:00
|
|
|
#!/usr/bin/env python
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
"""
|
2017-01-02 16:19:18 +03:00
|
|
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
import re
|
|
|
|
|
|
|
|
from xml.sax.handler import ContentHandler
|
|
|
|
|
2010-04-16 23:57:00 +04:00
|
|
|
from lib.core.common import parseXmlFile
|
2008-11-17 03:00:54 +03:00
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import paths
|
2010-12-24 15:13:48 +03:00
|
|
|
from lib.core.threads import getCurrentThreadData
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-12-06 16:46:24 +04:00
|
|
|
class HTMLHandler(ContentHandler):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This class defines methods to parse the input HTML page to
|
|
|
|
fingerprint the back-end database management system
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, page):
|
2011-01-15 15:53:40 +03:00
|
|
|
ContentHandler.__init__(self)
|
|
|
|
|
2012-03-30 13:42:58 +04:00
|
|
|
self._dbms = None
|
2016-04-08 16:30:25 +03:00
|
|
|
self._page = (page or "")
|
|
|
|
self._lower_page = self._page.lower()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
self.dbms = None
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-03-30 13:42:58 +04:00
|
|
|
def _markAsErrorPage(self):
|
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
threadData.lastErrorPage = (threadData.lastRequestUID, self._page)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def startElement(self, name, attrs):
|
2016-04-08 16:30:25 +03:00
|
|
|
if self.dbms:
|
|
|
|
return
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if name == "dbms":
|
2012-03-30 13:42:58 +04:00
|
|
|
self._dbms = attrs.get("value")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-11-16 20:06:21 +04:00
|
|
|
elif name == "error":
|
2016-04-08 16:30:25 +03:00
|
|
|
regexp = attrs.get("regexp")
|
|
|
|
if regexp not in kb.cache.regex:
|
|
|
|
keywords = re.findall("\w+", re.sub(r"\\.", " ", regexp))
|
|
|
|
keywords = sorted(keywords, key=len)
|
|
|
|
kb.cache.regex[regexp] = keywords[-1].lower()
|
|
|
|
|
|
|
|
if kb.cache.regex[regexp] in self._lower_page and re.search(regexp, self._page, re.I):
|
2012-03-30 13:42:58 +04:00
|
|
|
self.dbms = self._dbms
|
|
|
|
self._markAsErrorPage()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-11-17 03:13:49 +03:00
|
|
|
def htmlParser(page):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This function calls a class that parses the input HTML page to
|
|
|
|
fingerprint the back-end database management system
|
|
|
|
"""
|
|
|
|
|
2008-11-17 03:13:49 +03:00
|
|
|
xmlfile = paths.ERRORS_XML
|
2012-12-06 16:46:24 +04:00
|
|
|
handler = HTMLHandler(page)
|
2016-09-09 12:06:38 +03:00
|
|
|
key = hash(page)
|
|
|
|
|
|
|
|
if key in kb.cache.parsedDbms:
|
|
|
|
retVal = kb.cache.parsedDbms[key]
|
|
|
|
if retVal:
|
|
|
|
handler._markAsErrorPage()
|
|
|
|
return retVal
|
2011-01-23 13:12:01 +03:00
|
|
|
|
2011-01-23 14:21:27 +03:00
|
|
|
parseXmlFile(xmlfile, handler)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-11-17 03:00:54 +03:00
|
|
|
if handler.dbms and handler.dbms not in kb.htmlFp:
|
2010-12-24 15:13:48 +03:00
|
|
|
kb.lastParserStatus = handler.dbms
|
2008-11-17 03:00:54 +03:00
|
|
|
kb.htmlFp.append(handler.dbms)
|
2010-12-24 15:13:48 +03:00
|
|
|
else:
|
|
|
|
kb.lastParserStatus = None
|
2008-11-17 03:00:54 +03:00
|
|
|
|
2016-09-09 12:06:38 +03:00
|
|
|
kb.cache.parsedDbms[key] = handler.dbms
|
|
|
|
|
2012-03-30 13:42:58 +04:00
|
|
|
# generic SQL warning/error messages
|
|
|
|
if re.search(r"SQL (warning|error|syntax)", page, re.I):
|
|
|
|
handler._markAsErrorPage()
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return handler.dbms
|