2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
This file is part of the sqlmap project, http://sqlmap.sourceforge.net.
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
Copyright (c) 2007-2009 Bernardo Damele A. G. <bernardo.damele@gmail.com>
|
|
|
|
Copyright (c) 2006 Daniele Bellucci <daniele.bellucci@gmail.com>
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
sqlmap is free software; you can redistribute it and/or modify it under
|
|
|
|
the terms of the GNU General Public License as published by the Free
|
|
|
|
Software Foundation version 2 of the License.
|
|
|
|
|
|
|
|
sqlmap is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
|
|
|
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
|
|
|
details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License along
|
|
|
|
with sqlmap; if not, write to the Free Software Foundation, Inc., 51
|
|
|
|
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
|
|
|
"""
|
|
|
|
|
|
|
|
import os
|
|
|
|
import random
|
|
|
|
import re
|
2009-04-22 15:48:07 +04:00
|
|
|
import socket
|
2008-10-15 19:38:22 +04:00
|
|
|
import string
|
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
import urlparse
|
2010-01-05 14:30:33 +03:00
|
|
|
import ntpath
|
|
|
|
import posixpath
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.contrib import magic
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.data import paths
|
|
|
|
from lib.core.data import queries
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import temp
|
|
|
|
from lib.core.exception import sqlmapFilePathException
|
2009-06-11 19:01:48 +04:00
|
|
|
from lib.core.settings import IS_WIN
|
2008-12-19 23:09:46 +03:00
|
|
|
from lib.core.settings import SQL_STATEMENTS
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.settings import VERSION_STRING
|
|
|
|
|
|
|
|
|
|
|
|
def paramToDict(place, parameters=None):
|
|
|
|
"""
|
|
|
|
Split the parameters into names and values, check if these parameters
|
|
|
|
are within the testable parameters and return in a dictionary.
|
|
|
|
|
|
|
|
@param place: where sqlmap has to work, can be GET, POST or Cookie.
|
|
|
|
@type place: C{str}
|
|
|
|
|
|
|
|
@param parameters: parameters string in the format for instance
|
|
|
|
'p1=v1&p2=v2' (GET and POST) or 'p1=v1;p2=v2' (Cookie).
|
|
|
|
@type parameters: C{str}
|
|
|
|
|
|
|
|
@return: the parameters in a dictionary.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
testableParameters = {}
|
|
|
|
|
|
|
|
if conf.parameters.has_key(place) and not parameters:
|
|
|
|
parameters = conf.parameters[place]
|
|
|
|
|
|
|
|
parameters = parameters.replace(", ", ",")
|
|
|
|
|
|
|
|
if place == "Cookie":
|
|
|
|
splitParams = parameters.split(";")
|
|
|
|
else:
|
|
|
|
splitParams = parameters.split("&")
|
|
|
|
|
|
|
|
for element in splitParams:
|
|
|
|
elem = element.split("=")
|
|
|
|
|
|
|
|
if len(elem) == 2:
|
2008-11-28 01:33:33 +03:00
|
|
|
parameter = elem[0].replace(" ", "")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
condition = not conf.testParameter
|
|
|
|
condition |= parameter in conf.testParameter
|
|
|
|
|
|
|
|
if condition:
|
|
|
|
value = elem[1]
|
2009-07-09 15:25:35 +04:00
|
|
|
testableParameters[parameter] = value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.testParameter and not testableParameters:
|
|
|
|
paramStr = ", ".join(test for test in conf.testParameter)
|
|
|
|
|
|
|
|
if len(conf.testParameter) > 1:
|
|
|
|
warnMsg = "the testable parameters '%s' " % paramStr
|
|
|
|
warnMsg += "you provided are not into the %s" % place
|
|
|
|
else:
|
|
|
|
parameter = conf.testParameter[0]
|
|
|
|
|
|
|
|
warnMsg = "the testable parameter '%s' " % paramStr
|
|
|
|
warnMsg += "you provided is not into the %s" % place
|
|
|
|
|
2008-11-20 20:56:09 +03:00
|
|
|
if conf.multipleTargets:
|
2008-10-15 19:38:22 +04:00
|
|
|
warnMsg += ", skipping to next url"
|
|
|
|
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
elif len(conf.testParameter) != len(testableParameters.keys()):
|
|
|
|
for parameter in conf.testParameter:
|
|
|
|
if not testableParameters.has_key(parameter):
|
2008-11-02 17:39:38 +03:00
|
|
|
warnMsg = "the testable parameter '%s' " % parameter
|
2008-10-15 19:38:22 +04:00
|
|
|
warnMsg += "you provided is not into the %s" % place
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
return testableParameters
|
|
|
|
|
2008-11-16 02:41:31 +03:00
|
|
|
def formatDBMSfp(versions=None):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This function format the back-end DBMS fingerprint value and return its
|
|
|
|
values formatted as a human readable string.
|
|
|
|
|
|
|
|
@return: detected back-end DBMS based upon fingerprint techniques.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not versions:
|
|
|
|
versions = kb.dbmsVersion
|
|
|
|
|
|
|
|
if isinstance(versions, str):
|
|
|
|
return "%s %s" % (kb.dbms, versions)
|
|
|
|
elif isinstance(versions, (list, set, tuple)):
|
|
|
|
return "%s %s" % (kb.dbms, " and ".join([version for version in versions]))
|
2008-12-02 02:27:51 +03:00
|
|
|
elif not versions:
|
|
|
|
warnMsg = "unable to extensively fingerprint the back-end "
|
|
|
|
warnMsg += "DBMS version"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
return kb.dbms
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def formatFingerprintString(values, chain=" or "):
|
2009-04-28 03:05:11 +04:00
|
|
|
strJoin = "|".join([v for v in values])
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return strJoin.replace("|", chain)
|
2008-11-17 20:41:02 +03:00
|
|
|
|
2008-11-18 20:42:46 +03:00
|
|
|
def formatFingerprint(target, info):
|
2008-11-16 02:41:31 +03:00
|
|
|
"""
|
|
|
|
This function format the back-end operating system fingerprint value
|
|
|
|
and return its values formatted as a human readable string.
|
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
Example of info (kb.headersFp) dictionary:
|
2008-11-18 20:42:46 +03:00
|
|
|
|
|
|
|
{
|
2008-11-19 03:36:44 +03:00
|
|
|
'distrib': set(['Ubuntu']),
|
|
|
|
'type': set(['Linux']),
|
|
|
|
'technology': set(['PHP 5.2.6', 'Apache 2.2.9']),
|
|
|
|
'release': set(['8.10'])
|
2008-11-18 20:42:46 +03:00
|
|
|
}
|
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
Example of info (kb.bannerFp) dictionary:
|
|
|
|
|
2008-11-18 20:42:46 +03:00
|
|
|
{
|
2008-11-19 03:36:44 +03:00
|
|
|
'sp': set(['Service Pack 4']),
|
|
|
|
'dbmsVersion': '8.00.194',
|
|
|
|
'dbmsServicePack': '0',
|
|
|
|
'distrib': set(['2000']),
|
|
|
|
'dbmsRelease': '2000',
|
|
|
|
'type': set(['Windows'])
|
2008-11-18 20:42:46 +03:00
|
|
|
}
|
|
|
|
|
2008-11-16 02:41:31 +03:00
|
|
|
@return: detected back-end operating system based upon fingerprint
|
|
|
|
techniques.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
infoStr = ""
|
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if info and "type" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += "%s operating system: %s" % (target, formatFingerprintString(info["type"]))
|
2008-11-18 20:42:46 +03:00
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if "distrib" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += " %s" % formatFingerprintString(info["distrib"])
|
2008-11-16 02:41:31 +03:00
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if "release" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += " %s" % formatFingerprintString(info["release"])
|
2008-11-16 02:41:31 +03:00
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if "sp" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += " %s" % formatFingerprintString(info["sp"])
|
2008-11-16 02:41:31 +03:00
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if "codename" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += " (%s)" % formatFingerprintString(info["codename"])
|
2008-11-16 02:41:31 +03:00
|
|
|
|
2008-11-18 20:42:46 +03:00
|
|
|
if "technology" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += "\nweb application technology: %s" % formatFingerprintString(info["technology"], ", ")
|
2008-11-16 02:41:31 +03:00
|
|
|
|
|
|
|
return infoStr
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def getHtmlErrorFp():
|
|
|
|
"""
|
|
|
|
This function parses the knowledge base htmlFp list and return its
|
|
|
|
values formatted as a human readable string.
|
|
|
|
|
|
|
|
@return: list of possible back-end DBMS based upon error messages
|
|
|
|
parsing.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
htmlParsed = ""
|
|
|
|
|
|
|
|
if not kb.htmlFp:
|
|
|
|
return None
|
|
|
|
|
|
|
|
if len(kb.htmlFp) == 1:
|
|
|
|
htmlVer = kb.htmlFp[0]
|
|
|
|
htmlParsed = htmlVer
|
|
|
|
elif len(kb.htmlFp) > 1:
|
2008-11-17 03:00:54 +03:00
|
|
|
htmlParsed = " or ".join([htmlFp for htmlFp in kb.htmlFp])
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return htmlParsed
|
|
|
|
|
|
|
|
def getDocRoot():
|
|
|
|
docRoot = None
|
2010-01-05 14:30:33 +03:00
|
|
|
pagePath = directoryPath(conf.path)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if kb.os == "Windows":
|
2009-04-29 01:51:22 +04:00
|
|
|
defaultDocRoot = "C:/Inetpub/wwwroot/"
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2009-04-28 03:05:11 +04:00
|
|
|
defaultDocRoot = "/var/www/"
|
|
|
|
|
|
|
|
if kb.absFilePaths:
|
|
|
|
for absFilePath in kb.absFilePaths:
|
2010-01-05 14:30:33 +03:00
|
|
|
absFilePath = normalizePath(absFilePath)
|
2009-04-28 03:05:11 +04:00
|
|
|
absFilePathWin = None
|
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
if re.match("[A-Za-z]:(\\[\w.\\]*)?", absFilePath):
|
2009-04-28 03:05:11 +04:00
|
|
|
absFilePathWin = absFilePath
|
|
|
|
absFilePath = absFilePath[2:].replace("\\", "/")
|
2010-01-05 14:30:33 +03:00
|
|
|
|
2009-04-28 15:05:07 +04:00
|
|
|
if pagePath in absFilePath:
|
|
|
|
index = absFilePath.index(pagePath)
|
2009-04-28 03:05:11 +04:00
|
|
|
docRoot = absFilePath[:index]
|
|
|
|
|
|
|
|
if absFilePathWin:
|
2009-04-29 01:51:22 +04:00
|
|
|
docRoot = "C:/%s" % docRoot.replace("\\", "/")
|
2009-04-28 03:05:11 +04:00
|
|
|
|
|
|
|
break
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if docRoot:
|
2009-04-28 03:05:11 +04:00
|
|
|
infoMsg = "retrieved the web server document root: '%s'" % docRoot
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2009-04-28 03:05:11 +04:00
|
|
|
warnMsg = "unable to retrieve the web server document root"
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
message = "please provide the web server document root "
|
|
|
|
message += "[%s]: " % defaultDocRoot
|
|
|
|
inputDocRoot = readInput(message, default=defaultDocRoot)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if inputDocRoot:
|
|
|
|
docRoot = inputDocRoot
|
|
|
|
else:
|
|
|
|
docRoot = defaultDocRoot
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return docRoot
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
def getDirs():
|
2008-10-15 19:38:22 +04:00
|
|
|
directories = set()
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if kb.os == "Windows":
|
2009-04-29 01:51:22 +04:00
|
|
|
defaultDir = "C:/Inetpub/wwwroot/test/"
|
2009-04-28 03:05:11 +04:00
|
|
|
else:
|
|
|
|
defaultDir = "/var/www/test/"
|
|
|
|
|
|
|
|
if kb.absFilePaths:
|
|
|
|
infoMsg = "retrieved web server full paths: "
|
|
|
|
infoMsg += "'%s'" % ", ".join(path for path in kb.absFilePaths)
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for absFilePath in kb.absFilePaths:
|
2009-07-09 15:11:25 +04:00
|
|
|
if absFilePath:
|
|
|
|
directories.add(os.path.dirname(absFilePath))
|
2009-04-28 03:05:11 +04:00
|
|
|
else:
|
|
|
|
warnMsg = "unable to retrieve any web server path"
|
|
|
|
logger.warn(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
message = "please provide any additional web server full path to try "
|
|
|
|
message += "to upload the agent [%s]: " % defaultDir
|
|
|
|
inputDirs = readInput(message, default=defaultDir)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if inputDirs:
|
|
|
|
inputDirs = inputDirs.replace(", ", ",")
|
|
|
|
inputDirs = inputDirs.split(",")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for inputDir in inputDirs:
|
2009-07-09 15:11:25 +04:00
|
|
|
if inputDir:
|
|
|
|
directories.add(inputDir)
|
2009-04-28 03:05:11 +04:00
|
|
|
else:
|
|
|
|
directories.add(defaultDir)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return directories
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def filePathToString(filePath):
|
2009-04-28 03:05:11 +04:00
|
|
|
strRepl = filePath.replace("/", "_").replace("\\", "_")
|
|
|
|
strRepl = strRepl.replace(" ", "_").replace(":", "_")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return strRepl
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def dataToStdout(data):
|
|
|
|
sys.stdout.write(data)
|
|
|
|
sys.stdout.flush()
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def dataToSessionFile(data):
|
2008-10-16 19:31:02 +04:00
|
|
|
if not conf.sessionFile:
|
|
|
|
return
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.sessionFP.write(data)
|
|
|
|
conf.sessionFP.flush()
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def dataToDumpFile(dumpFile, data):
|
|
|
|
dumpFile.write(data)
|
|
|
|
dumpFile.flush()
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def dataToOutFile(data):
|
|
|
|
if not data:
|
|
|
|
return "No data retrieved"
|
|
|
|
|
|
|
|
rFile = filePathToString(conf.rFile)
|
|
|
|
rFilePath = "%s%s%s" % (conf.filePath, os.sep, rFile)
|
|
|
|
rFileFP = open(rFilePath, "wb")
|
|
|
|
|
|
|
|
rFileFP.write(data)
|
|
|
|
rFileFP.flush()
|
|
|
|
rFileFP.close()
|
|
|
|
|
|
|
|
return rFilePath
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
def strToHex(inpStr):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2009-04-28 03:05:11 +04:00
|
|
|
@param inpStr: inpStr to be converted into its hexadecimal value.
|
|
|
|
@type inpStr: C{str}
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
@return: the hexadecimal converted inpStr.
|
2008-10-15 19:38:22 +04:00
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
hexStr = ""
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for character in inpStr:
|
2008-10-15 19:38:22 +04:00
|
|
|
if character == "\n":
|
|
|
|
character = " "
|
|
|
|
|
|
|
|
hexChar = "%2x" % ord(character)
|
|
|
|
hexChar = hexChar.replace(" ", "0")
|
|
|
|
hexChar = hexChar.upper()
|
|
|
|
|
|
|
|
hexStr += hexChar
|
|
|
|
|
|
|
|
return hexStr
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def fileToStr(fileName):
|
|
|
|
"""
|
|
|
|
@param fileName: file path to read the content and return as a no
|
|
|
|
NEWLINE string.
|
|
|
|
@type fileName: C{file.open}
|
|
|
|
|
|
|
|
@return: the file content as a string without TAB and NEWLINE.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
filePointer = open(fileName, "r")
|
|
|
|
fileText = filePointer.read()
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
return fileText.replace(" ", "").replace("\t", "").replace("\r", "").replace("\n", " ")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def fileToHex(fileName):
|
|
|
|
"""
|
|
|
|
@param fileName: file path to read the content and return as an
|
|
|
|
hexadecimal string.
|
|
|
|
@type fileName: C{file.open}
|
|
|
|
|
|
|
|
@return: the file content as an hexadecimal string.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
fileText = fileToStr(fileName)
|
|
|
|
hexFile = strToHex(fileText)
|
|
|
|
|
|
|
|
return hexFile
|
|
|
|
|
|
|
|
def readInput(message, default=None):
|
|
|
|
"""
|
|
|
|
@param message: message to display on terminal.
|
|
|
|
@type message: C{str}
|
|
|
|
|
|
|
|
@return: a string read from keyboard as input.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if "\n" in message:
|
|
|
|
message += "\n> "
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if conf.batch and default:
|
|
|
|
infoMsg = "%s%s" % (message, str(default))
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
debugMsg = "used the default behaviour, running in batch mode"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
data = default
|
|
|
|
else:
|
2009-04-22 15:48:07 +04:00
|
|
|
data = raw_input(message)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-09-26 03:03:45 +04:00
|
|
|
if not data:
|
|
|
|
data = default
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return data
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def randomRange(start=0, stop=1000):
|
|
|
|
"""
|
|
|
|
@param start: starting number.
|
|
|
|
@type start: C{int}
|
|
|
|
|
|
|
|
@param stop: last number.
|
|
|
|
@type stop: C{int}
|
|
|
|
|
|
|
|
@return: a random number within the range.
|
|
|
|
@rtype: C{int}
|
|
|
|
"""
|
|
|
|
|
|
|
|
return int(random.randint(start, stop))
|
|
|
|
|
|
|
|
def randomInt(length=4):
|
|
|
|
"""
|
|
|
|
@param length: length of the random string.
|
|
|
|
@type length: C{int}
|
|
|
|
|
|
|
|
@return: a random string of digits.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
return int("".join([random.choice(string.digits) for _ in xrange(0, length)]))
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def randomStr(length=5, lowercase=False):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
@param length: length of the random string.
|
|
|
|
@type length: C{int}
|
|
|
|
|
|
|
|
@return: a random string of characters.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if lowercase:
|
2009-04-22 15:48:07 +04:00
|
|
|
rndStr = "".join([random.choice(string.lowercase) for _ in xrange(0, length)])
|
|
|
|
else:
|
|
|
|
rndStr = "".join([random.choice(string.letters) for _ in xrange(0, length)])
|
|
|
|
|
|
|
|
return rndStr
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
def sanitizeStr(inpStr):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2009-04-28 03:05:11 +04:00
|
|
|
@param inpStr: inpStr to sanitize: cast to str datatype and replace
|
2008-10-15 19:38:22 +04:00
|
|
|
newlines with one space and strip carriage returns.
|
2009-04-28 03:05:11 +04:00
|
|
|
@type inpStr: C{str}
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
@return: sanitized inpStr
|
2008-10-15 19:38:22 +04:00
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
cleanString = str(inpStr)
|
2008-10-15 19:38:22 +04:00
|
|
|
cleanString = cleanString.replace("\n", " ").replace("\r", "")
|
|
|
|
|
|
|
|
return cleanString
|
|
|
|
|
|
|
|
def checkFile(filename):
|
|
|
|
"""
|
|
|
|
@param filename: filename to check if it exists.
|
|
|
|
@type filename: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not os.path.exists(filename):
|
|
|
|
raise sqlmapFilePathException, "unable to read file '%s'" % filename
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
def replaceNewlineTabs(inpStr):
|
|
|
|
replacedString = inpStr.replace("\n", "__NEWLINE__").replace("\t", "__TAB__")
|
2008-10-15 19:38:22 +04:00
|
|
|
replacedString = replacedString.replace(temp.delimiter, "__DEL__")
|
|
|
|
|
|
|
|
return replacedString
|
|
|
|
|
|
|
|
def banner():
|
|
|
|
"""
|
|
|
|
This function prints sqlmap banner with its version
|
|
|
|
"""
|
|
|
|
|
|
|
|
print """
|
2009-04-22 15:48:07 +04:00
|
|
|
%s
|
|
|
|
by Bernardo Damele A. G. <bernardo.damele@gmail.com>
|
2008-10-15 19:38:22 +04:00
|
|
|
""" % VERSION_STRING
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def parsePasswordHash(password):
|
|
|
|
blank = " " * 8
|
|
|
|
|
|
|
|
if not password or password == " ":
|
|
|
|
password = "NULL"
|
|
|
|
|
|
|
|
if kb.dbms == "Microsoft SQL Server" and password != "NULL":
|
|
|
|
hexPassword = password
|
|
|
|
password = "%s\n" % hexPassword
|
|
|
|
password += "%sheader: %s\n" % (blank, hexPassword[:6])
|
|
|
|
password += "%ssalt: %s\n" % (blank, hexPassword[6:14])
|
|
|
|
password += "%smixedcase: %s\n" % (blank, hexPassword[14:54])
|
|
|
|
|
|
|
|
if kb.dbmsVersion[0] not in ( "2005", "2008" ):
|
|
|
|
password += "%suppercase: %s" % (blank, hexPassword[54:])
|
|
|
|
|
|
|
|
return password
|
2010-01-05 19:15:31 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def cleanQuery(query):
|
2008-12-19 23:09:46 +03:00
|
|
|
upperQuery = query
|
|
|
|
|
|
|
|
for sqlStatements in SQL_STATEMENTS.values():
|
|
|
|
for sqlStatement in sqlStatements:
|
2009-01-28 17:53:11 +03:00
|
|
|
sqlStatementEsc = sqlStatement.replace("(", "\\(")
|
|
|
|
queryMatch = re.search("(%s)" % sqlStatementEsc, query, re.I)
|
|
|
|
|
|
|
|
if queryMatch:
|
|
|
|
upperQuery = upperQuery.replace(queryMatch.group(1), sqlStatement.upper())
|
2008-12-18 23:38:57 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return upperQuery
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def setPaths():
|
|
|
|
# sqlmap paths
|
2009-12-18 01:04:01 +03:00
|
|
|
paths.SQLMAP_CONTRIB_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "lib", "contrib")
|
|
|
|
paths.SQLMAP_SHELL_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "shell")
|
|
|
|
paths.SQLMAP_TXT_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "txt")
|
|
|
|
paths.SQLMAP_UDF_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "udf")
|
|
|
|
paths.SQLMAP_XML_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "xml")
|
|
|
|
paths.SQLMAP_XML_BANNER_PATH = os.path.join(paths.SQLMAP_XML_PATH, "banner")
|
|
|
|
paths.SQLMAP_OUTPUT_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "output")
|
|
|
|
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
|
|
|
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
# sqlmap files
|
2009-12-18 01:04:01 +03:00
|
|
|
paths.SQLMAP_HISTORY = os.path.join(paths.SQLMAP_ROOT_PATH, ".sqlmap_history")
|
|
|
|
paths.SQLMAP_CONFIG = os.path.join(paths.SQLMAP_ROOT_PATH, "sqlmap-%s.conf" % randomStr())
|
|
|
|
paths.FUZZ_VECTORS = os.path.join(paths.SQLMAP_TXT_PATH, "fuzz_vectors.txt")
|
|
|
|
paths.ERRORS_XML = os.path.join(paths.SQLMAP_XML_PATH, "errors.xml")
|
|
|
|
paths.QUERIES_XML = os.path.join(paths.SQLMAP_XML_PATH, "queries.xml")
|
|
|
|
paths.GENERIC_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "generic.xml")
|
|
|
|
paths.MSSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "mssql.xml")
|
|
|
|
paths.MYSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "mysql.xml")
|
|
|
|
paths.ORACLE_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "oracle.xml")
|
|
|
|
paths.PGSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "postgresql.xml")
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def weAreFrozen():
|
|
|
|
"""
|
|
|
|
Returns whether we are frozen via py2exe.
|
|
|
|
This will affect how we find out where we are located.
|
|
|
|
Reference: http://www.py2exe.org/index.cgi/WhereAmI
|
|
|
|
"""
|
|
|
|
|
|
|
|
return hasattr(sys, "frozen")
|
|
|
|
|
|
|
|
def parseTargetUrl():
|
|
|
|
"""
|
|
|
|
Parse target url and set some attributes into the configuration
|
|
|
|
singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.url:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not re.search("^http[s]*://", conf.url):
|
|
|
|
if ":443/" in conf.url:
|
|
|
|
conf.url = "https://" + conf.url
|
|
|
|
else:
|
|
|
|
conf.url = "http://" + conf.url
|
|
|
|
|
|
|
|
__urlSplit = urlparse.urlsplit(conf.url)
|
|
|
|
__hostnamePort = __urlSplit[1].split(":")
|
|
|
|
|
|
|
|
conf.scheme = __urlSplit[0]
|
|
|
|
conf.path = __urlSplit[2]
|
|
|
|
conf.hostname = __hostnamePort[0]
|
|
|
|
|
|
|
|
if len(__hostnamePort) == 2:
|
|
|
|
conf.port = int(__hostnamePort[1])
|
|
|
|
elif conf.scheme == "https":
|
|
|
|
conf.port = 443
|
|
|
|
else:
|
|
|
|
conf.port = 80
|
|
|
|
|
|
|
|
if __urlSplit[3]:
|
2010-01-02 05:02:12 +03:00
|
|
|
conf.parameters["GET"] = __urlSplit[3]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
conf.url = "%s://%s:%d%s" % (conf.scheme, conf.hostname, conf.port, conf.path)
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def expandAsteriskForColumns(expression):
|
|
|
|
# If the user provided an asterisk rather than the column(s)
|
|
|
|
# name, sqlmap will retrieve the columns itself and reprocess
|
|
|
|
# the SQL query string (expression)
|
2008-11-13 01:53:25 +03:00
|
|
|
asterisk = re.search("^SELECT\s+\*\s+FROM\s+([\w\.\_]+)\s*", expression, re.I)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if asterisk:
|
|
|
|
infoMsg = "you did not provide the fields in your query. "
|
|
|
|
infoMsg += "sqlmap will retrieve the column names itself"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2008-11-13 01:53:25 +03:00
|
|
|
dbTbl = asterisk.group(1)
|
|
|
|
|
|
|
|
if dbTbl and "." in dbTbl:
|
|
|
|
conf.db, conf.tbl = dbTbl.split(".")
|
|
|
|
else:
|
|
|
|
conf.tbl = dbTbl
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
columnsDict = conf.dbmsHandler.getColumns(onlyColNames=True)
|
|
|
|
|
|
|
|
if columnsDict and conf.db in columnsDict and conf.tbl in columnsDict[conf.db]:
|
|
|
|
columns = columnsDict[conf.db][conf.tbl].keys()
|
|
|
|
columns.sort()
|
|
|
|
columnsStr = ", ".join([column for column in columns])
|
|
|
|
expression = expression.replace("*", columnsStr, 1)
|
|
|
|
|
|
|
|
infoMsg = "the query with column names is: "
|
|
|
|
infoMsg += "%s" % expression
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
return expression
|
2010-01-09 02:50:06 +03:00
|
|
|
|
2008-11-25 14:33:44 +03:00
|
|
|
def getRange(count, dump=False, plusOne=False):
|
2008-10-15 19:38:22 +04:00
|
|
|
count = int(count)
|
|
|
|
indexRange = None
|
|
|
|
limitStart = 1
|
|
|
|
limitStop = count
|
|
|
|
|
|
|
|
if dump:
|
2008-11-02 17:39:38 +03:00
|
|
|
if isinstance(conf.limitStop, int) and conf.limitStop > 0 and conf.limitStop < limitStop:
|
2008-10-15 19:38:22 +04:00
|
|
|
limitStop = conf.limitStop
|
|
|
|
|
2008-11-02 17:39:38 +03:00
|
|
|
if isinstance(conf.limitStart, int) and conf.limitStart > 0 and conf.limitStart <= limitStop:
|
2008-10-15 19:38:22 +04:00
|
|
|
limitStart = conf.limitStart
|
|
|
|
|
2010-01-09 02:50:06 +03:00
|
|
|
if plusOne:
|
2008-10-15 19:38:22 +04:00
|
|
|
indexRange = range(limitStart, limitStop + 1)
|
|
|
|
else:
|
|
|
|
indexRange = range(limitStart - 1, limitStop)
|
|
|
|
|
|
|
|
return indexRange
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def parseUnionPage(output, expression, partial=False, condition=None, sort=True):
|
2008-12-10 20:23:07 +03:00
|
|
|
data = []
|
|
|
|
|
|
|
|
outCond1 = ( output.startswith(temp.start) and output.endswith(temp.stop) )
|
|
|
|
outCond2 = ( output.startswith("__START__") and output.endswith("__STOP__") )
|
|
|
|
|
|
|
|
if outCond1 or outCond2:
|
|
|
|
if outCond1:
|
|
|
|
regExpr = '%s(.*?)%s' % (temp.start, temp.stop)
|
|
|
|
elif outCond2:
|
|
|
|
regExpr = '__START__(.*?)__STOP__'
|
|
|
|
|
|
|
|
output = re.findall(regExpr, output, re.S)
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if condition is None:
|
2008-12-10 20:23:07 +03:00
|
|
|
condition = (
|
|
|
|
kb.resumedQueries and conf.url in kb.resumedQueries.keys()
|
|
|
|
and expression in kb.resumedQueries[conf.url].keys()
|
|
|
|
)
|
|
|
|
|
|
|
|
if partial or not condition:
|
|
|
|
logOutput = "".join(["__START__%s__STOP__" % replaceNewlineTabs(value) for value in output])
|
|
|
|
dataToSessionFile("[%s][%s][%s][%s][%s]\n" % (conf.url, kb.injPlace, conf.parameters[kb.injPlace], expression, logOutput))
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if sort:
|
|
|
|
output = set(output)
|
2008-12-10 20:23:07 +03:00
|
|
|
|
|
|
|
for entry in output:
|
|
|
|
info = []
|
|
|
|
|
|
|
|
if "__DEL__" in entry:
|
|
|
|
entry = entry.split("__DEL__")
|
|
|
|
else:
|
|
|
|
entry = entry.split(temp.delimiter)
|
|
|
|
|
|
|
|
if len(entry) == 1:
|
|
|
|
data.append(entry[0])
|
|
|
|
else:
|
|
|
|
for value in entry:
|
|
|
|
info.append(value)
|
|
|
|
|
|
|
|
data.append(info)
|
|
|
|
else:
|
|
|
|
data = output
|
|
|
|
|
|
|
|
if len(data) == 1 and isinstance(data[0], str):
|
|
|
|
data = data[0]
|
|
|
|
|
|
|
|
return data
|
2010-01-05 14:43:16 +03:00
|
|
|
|
|
|
|
def getDelayQuery(andCond=False):
|
2009-04-22 15:48:07 +04:00
|
|
|
query = None
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if kb.dbms in ("MySQL", "PostgreSQL"):
|
2009-04-22 15:48:07 +04:00
|
|
|
if not kb.data.banner:
|
|
|
|
conf.dbmsHandler.getVersionFromBanner()
|
|
|
|
|
|
|
|
banVer = kb.bannerFp["dbmsVersion"]
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if (kb.dbms == "MySQL" and banVer >= "5.0.12") or (kb.dbms == "PostgreSQL" and banVer >= "8.2"):
|
2009-04-22 15:48:07 +04:00
|
|
|
query = queries[kb.dbms].timedelay % conf.timeSec
|
2010-01-05 14:43:16 +03:00
|
|
|
|
|
|
|
if kb.dbms == "MySQL" and andCond:
|
|
|
|
query = query.replace("SELECT ", "")
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
else:
|
|
|
|
query = queries[kb.dbms].timedelay2 % conf.timeSec
|
2010-01-02 05:02:12 +03:00
|
|
|
else:
|
2009-04-22 15:48:07 +04:00
|
|
|
query = queries[kb.dbms].timedelay % conf.timeSec
|
|
|
|
|
|
|
|
return query
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def getLocalIP():
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
s.connect((conf.hostname, conf.port))
|
|
|
|
ip, _ = s.getsockname()
|
|
|
|
s.close()
|
|
|
|
|
|
|
|
return ip
|
|
|
|
|
|
|
|
def getRemoteIP():
|
|
|
|
return socket.gethostbyname(conf.hostname)
|
|
|
|
|
|
|
|
def getFileType(filePath):
|
2009-04-22 16:44:16 +04:00
|
|
|
try:
|
|
|
|
magicFileType = magic.from_file(filePath)
|
|
|
|
except:
|
|
|
|
return "unknown"
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if "ASCII" in magicFileType or "text" in magicFileType:
|
|
|
|
return "text"
|
|
|
|
else:
|
|
|
|
return "binary"
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def pollProcess(process):
|
|
|
|
while True:
|
|
|
|
dataToStdout(".")
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
returncode = process.poll()
|
|
|
|
|
2009-07-20 18:36:33 +04:00
|
|
|
if returncode is not None:
|
2009-04-22 15:48:07 +04:00
|
|
|
if returncode == 0:
|
|
|
|
dataToStdout(" done\n")
|
2009-07-20 18:36:33 +04:00
|
|
|
elif returncode < 0:
|
|
|
|
dataToStdout(" process terminated by signal %d\n" % returncode)
|
|
|
|
elif returncode > 0:
|
|
|
|
dataToStdout(" quit unexpectedly with return code %d\n" % returncode)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
break
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def getCharset(charsetType=None):
|
|
|
|
asciiTbl = []
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if charsetType is None:
|
2009-04-22 15:48:07 +04:00
|
|
|
asciiTbl = range(0, 128)
|
|
|
|
|
|
|
|
# 0 or 1
|
|
|
|
elif charsetType == 1:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 50))
|
|
|
|
|
|
|
|
# Digits
|
|
|
|
elif charsetType == 2:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 58))
|
|
|
|
|
|
|
|
# Hexadecimal
|
|
|
|
elif charsetType == 3:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 58))
|
|
|
|
asciiTbl.extend(range(64, 71))
|
|
|
|
asciiTbl.extend(range(96, 103))
|
|
|
|
|
|
|
|
# Characters
|
|
|
|
elif charsetType == 4:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(64, 91))
|
|
|
|
asciiTbl.extend(range(96, 123))
|
|
|
|
|
|
|
|
# Characters and digits
|
|
|
|
elif charsetType == 5:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 58))
|
|
|
|
asciiTbl.extend(range(64, 91))
|
|
|
|
asciiTbl.extend(range(96, 123))
|
|
|
|
|
|
|
|
return asciiTbl
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-05-13 00:24:47 +04:00
|
|
|
def searchEnvPath(fileName):
|
|
|
|
envPaths = os.environ["PATH"]
|
2010-01-02 05:02:12 +03:00
|
|
|
result = None
|
2009-05-13 00:24:47 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if IS_WIN:
|
2009-05-13 00:24:47 +04:00
|
|
|
envPaths = envPaths.split(";")
|
|
|
|
else:
|
|
|
|
envPaths = envPaths.split(":")
|
|
|
|
|
|
|
|
for envPath in envPaths:
|
|
|
|
envPath = envPath.replace(";", "")
|
2010-01-02 05:02:12 +03:00
|
|
|
result = os.path.exists(os.path.normpath(os.path.join(envPath, fileName)))
|
2009-05-13 00:24:47 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if result:
|
2009-05-13 00:24:47 +04:00
|
|
|
break
|
|
|
|
|
|
|
|
return result
|
2010-01-02 05:02:12 +03:00
|
|
|
|
|
|
|
def sanitizeCookie(cookieStr, warn=False):
|
|
|
|
if cookieStr:
|
|
|
|
result = ""
|
|
|
|
changed = False
|
|
|
|
for part in cookieStr.split(';'):
|
|
|
|
index = part.find('=') + 1
|
|
|
|
if index > 0:
|
|
|
|
name = part[:index - 1].strip()
|
|
|
|
value = part[index:].replace(",","%2C").replace(";","%3B").replace(" ","%20")
|
|
|
|
if value != part[index:]:
|
|
|
|
changed = True
|
|
|
|
result += ";%s=%s" % (name, value)
|
|
|
|
elif part.strip().lower() != "secure":
|
|
|
|
result += "%s%s" % ("%3B", part.replace(",","%2C").replace(";","%3B").replace(" ","%20"))
|
|
|
|
else:
|
|
|
|
result += ";secure"
|
|
|
|
if result.startswith(';'):
|
|
|
|
result = result[1:]
|
|
|
|
elif result.startswith('%3B'):
|
|
|
|
result = result[3:]
|
|
|
|
if changed and warn:
|
|
|
|
warnMsg = "cookie is provided in HTTP unsafe format containing one "
|
|
|
|
warnMsg += "of problematic characters: ' ,;'. temporary sanitized."
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
return result
|
|
|
|
else:
|
|
|
|
return None
|
2010-01-05 14:30:33 +03:00
|
|
|
|
|
|
|
def directoryPath(path):
|
|
|
|
retVal = None
|
|
|
|
if path.find('/') != -1:
|
|
|
|
retVal = posixpath.dirname(path)
|
|
|
|
else:
|
|
|
|
retVal = ntpath.dirname(path)
|
|
|
|
return retVal
|
|
|
|
|
|
|
|
def normalizePath(path):
|
|
|
|
retVal = None
|
|
|
|
if path.find('/') != -1:
|
|
|
|
retVal = posixpath.normpath(path)
|
|
|
|
else:
|
|
|
|
retVal = ntpath.normpath(path)
|
|
|
|
return retVal
|