mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-25 11:03:47 +03:00
minor optimization
This commit is contained in:
parent
7c1af97852
commit
440b7efe55
|
@ -348,7 +348,7 @@ class Agent:
|
|||
nulledCastedFields.append(self.nullAndCastField(field))
|
||||
|
||||
delimiterStr = "%s'%s'%s" % (dbmsDelimiter, kb.chars.delimiter, dbmsDelimiter)
|
||||
nulledCastedConcatFields = delimiterStr.join([field for field in nulledCastedFields])
|
||||
nulledCastedConcatFields = delimiterStr.join(field for field in nulledCastedFields)
|
||||
|
||||
return nulledCastedConcatFields
|
||||
|
||||
|
|
|
@ -62,10 +62,10 @@ def md5hash(value):
|
|||
|
||||
def orddecode(value):
|
||||
packedString = struct.pack("!"+"I" * len(value), *value)
|
||||
return "".join([chr(char) for char in struct.unpack("!"+"I"*(len(packedString)/4), packedString)])
|
||||
return "".join(chr(char) for char in struct.unpack("!"+"I"*(len(packedString)/4), packedString))
|
||||
|
||||
def ordencode(value):
|
||||
return tuple([ord(char) for char in value])
|
||||
return tuple(ord(char) for char in value)
|
||||
|
||||
def sha1hash(value):
|
||||
if sys.modules.has_key('hashlib'):
|
||||
|
|
|
@ -152,30 +152,30 @@ SYBASE_SYSTEM_DBS = ( "master", "model", "sybsystemdb", "sybsystemprocs" )
|
|||
DB2_SYSTEM_DBS = ( "NULLID", "SQLJ", "SYSCAT", "SYSFUN", "SYSIBM", "SYSIBMADM", "SYSIBMINTERNAL", "SYSIBMTS",\
|
||||
"SYSPROC", "SYSPUBLIC", "SYSSTAT", "SYSTOOLS" )
|
||||
|
||||
MSSQL_ALIASES = [ "microsoft sql server", "mssqlserver", "mssql", "ms" ]
|
||||
MYSQL_ALIASES = [ "mysql", "my" ]
|
||||
PGSQL_ALIASES = [ "postgresql", "postgres", "pgsql", "psql", "pg" ]
|
||||
ORACLE_ALIASES = [ "oracle", "orcl", "ora", "or" ]
|
||||
SQLITE_ALIASES = [ "sqlite", "sqlite3" ]
|
||||
ACCESS_ALIASES = [ "msaccess", "access", "jet", "microsoft access" ]
|
||||
FIREBIRD_ALIASES = [ "firebird", "mozilla firebird", "interbase", "ibase", "fb" ]
|
||||
MAXDB_ALIASES = [ "maxdb", "sap maxdb", "sap db" ]
|
||||
SYBASE_ALIASES = [ "sybase", "sybase sql server" ]
|
||||
DB2_ALIASES = [ "db2", "ibm db2", "ibmdb2" ]
|
||||
MSSQL_ALIASES = ( "microsoft sql server", "mssqlserver", "mssql", "ms" )
|
||||
MYSQL_ALIASES = ( "mysql", "my" )
|
||||
PGSQL_ALIASES = ( "postgresql", "postgres", "pgsql", "psql", "pg" )
|
||||
ORACLE_ALIASES = ( "oracle", "orcl", "ora", "or" )
|
||||
SQLITE_ALIASES = ( "sqlite", "sqlite3" )
|
||||
ACCESS_ALIASES = ( "msaccess", "access", "jet", "microsoft access" )
|
||||
FIREBIRD_ALIASES = ( "firebird", "mozilla firebird", "interbase", "ibase", "fb" )
|
||||
MAXDB_ALIASES = ( "maxdb", "sap maxdb", "sap db" )
|
||||
SYBASE_ALIASES = ( "sybase", "sybase sql server" )
|
||||
DB2_ALIASES = ( "db2", "ibm db2", "ibmdb2" )
|
||||
|
||||
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES
|
||||
SUPPORTED_OS = ( "linux", "windows" )
|
||||
|
||||
DBMS_DICT = { DBMS.MSSQL: [MSSQL_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/"],
|
||||
DBMS.MYSQL: [MYSQL_ALIASES, "python pymysql", "http://code.google.com/p/pymysql/"],
|
||||
DBMS.PGSQL: [PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/"],
|
||||
DBMS.ORACLE: [ORACLE_ALIASES, "python cx_Oracle", "http://cx-oracle.sourceforge.net/"],
|
||||
DBMS.SQLITE: [SQLITE_ALIASES, "python-pysqlite2", "http://pysqlite.googlecode.com/"],
|
||||
DBMS.ACCESS: [ACCESS_ALIASES, "python-pyodbc", "http://pyodbc.googlecode.com/"],
|
||||
DBMS.FIREBIRD: [FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/"],
|
||||
DBMS.MAXDB: [MAXDB_ALIASES, None, None],
|
||||
DBMS.SYBASE: [SYBASE_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/"],
|
||||
DBMS.DB2: [DB2_ALIASES, "python ibm-db", "http://code.google.com/p/ibm-db/"]
|
||||
DBMS_DICT = { DBMS.MSSQL: (MSSQL_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/"),
|
||||
DBMS.MYSQL: (MYSQL_ALIASES, "python pymysql", "http://code.google.com/p/pymysql/"),
|
||||
DBMS.PGSQL: (PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/"),
|
||||
DBMS.ORACLE: (ORACLE_ALIASES, "python cx_Oracle", "http://cx-oracle.sourceforge.net/"),
|
||||
DBMS.SQLITE: (SQLITE_ALIASES, "python-pysqlite2", "http://pysqlite.googlecode.com/"),
|
||||
DBMS.ACCESS: (ACCESS_ALIASES, "python-pyodbc", "http://pyodbc.googlecode.com/"),
|
||||
DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/"),
|
||||
DBMS.MAXDB: (MAXDB_ALIASES, None, None),
|
||||
DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/"),
|
||||
DBMS.DB2: (DB2_ALIASES, "python ibm-db", "http://code.google.com/p/ibm-db/")
|
||||
}
|
||||
|
||||
REFERER_ALIASES = ( "ref", "referer", "referrer" )
|
||||
|
@ -258,10 +258,10 @@ GENERAL_IP_ADDRESS_REGEX = r'\A\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\Z'
|
|||
SOAP_REGEX = r"\A(<\?xml[^>]+>)?\s*<soap.+</soap"
|
||||
|
||||
# Reference: http://www.cs.ru.nl/bachelorscripties/2010/Martin_Devillers___0437999___Analyzing_password_strength.pdf
|
||||
COMMON_PASSWORD_SUFFIXES = ["1", "123", "2", "12", "3", "13", "7", "11", "5", "22", "23", "01", "4", "07", "21", "14", "10", "06", "08", "8", "15", "69", "16", "6", "18"]
|
||||
COMMON_PASSWORD_SUFFIXES = ("1", "123", "2", "12", "3", "13", "7", "11", "5", "22", "23", "01", "4", "07", "21", "14", "10", "06", "08", "8", "15", "69", "16", "6", "18")
|
||||
|
||||
# Reference: http://www.the-interweb.com/serendipity/index.php?/archives/94-A-brief-analysis-of-40,000-leaked-MySpace-passwords.html
|
||||
COMMON_PASSWORD_SUFFIXES += ["!", ".", "*", "!!", "?", ";", "..", "!!!", ",", "@"]
|
||||
COMMON_PASSWORD_SUFFIXES += ("!", ".", "*", "!!", "?", ";", "..", "!!!", ",", "@")
|
||||
|
||||
# Splitter used between requests in WebScarab log files
|
||||
WEBSCARAB_SPLITTER = "### Conversation"
|
||||
|
|
|
@ -147,7 +147,7 @@ def __setRequestParams():
|
|||
# No need for url encoding/decoding the user agent
|
||||
conf.parameters[PLACE.UA] = urldecode(headerValue)
|
||||
|
||||
condition = any([not conf.testParameter, intersect(conf.testParameter, USER_AGENT_ALIASES)])
|
||||
condition = any((not conf.testParameter, intersect(conf.testParameter, USER_AGENT_ALIASES)))
|
||||
|
||||
if condition:
|
||||
conf.paramDict[PLACE.UA] = { PLACE.UA: headerValue }
|
||||
|
@ -157,7 +157,7 @@ def __setRequestParams():
|
|||
# No need for url encoding/decoding the referer
|
||||
conf.parameters[PLACE.REFERER] = urldecode(headerValue)
|
||||
|
||||
condition = any([not conf.testParameter, intersect(conf.testParameter, REFERER_ALIASES)])
|
||||
condition = any((not conf.testParameter, intersect(conf.testParameter, REFERER_ALIASES)))
|
||||
|
||||
if condition:
|
||||
conf.paramDict[PLACE.REFERER] = { PLACE.REFERER: headerValue }
|
||||
|
|
|
@ -630,8 +630,8 @@ def cmdLineParser():
|
|||
expandMnemonics(sys.argv[i+1], parser, args)
|
||||
break
|
||||
|
||||
if not any([args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, \
|
||||
args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.realTest, args.wizard, args.dependencies]):
|
||||
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, \
|
||||
args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.realTest, args.wizard, args.dependencies)):
|
||||
errMsg = "missing a mandatory parameter (-d, -u, -l, -m, -r, -g, -c, --wizard, --update or --dependencies), "
|
||||
errMsg += "-h for help"
|
||||
parser.error(errMsg)
|
||||
|
|
|
@ -266,7 +266,7 @@ class Connect:
|
|||
if not req.has_header(HTTPHEADER.ACCEPT_ENCODING):
|
||||
requestHeaders += "%s: identity\n" % HTTPHEADER.ACCEPT_ENCODING
|
||||
|
||||
requestHeaders += "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items()])
|
||||
requestHeaders += "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items())
|
||||
|
||||
if not req.has_header(HTTPHEADER.COOKIE) and cookieStr:
|
||||
requestHeaders += "\n%s" % cookieStr[:-2]
|
||||
|
@ -396,7 +396,7 @@ class Connect:
|
|||
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
||||
|
||||
if responseHeaders:
|
||||
logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||
logHeaders = "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items())
|
||||
|
||||
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page if isinstance(page, unicode) else getUnicode(page)))
|
||||
|
||||
|
@ -486,7 +486,7 @@ class Connect:
|
|||
|
||||
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
||||
if responseHeaders:
|
||||
logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||
logHeaders = "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items())
|
||||
|
||||
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page if isinstance(page, unicode) else getUnicode(page)))
|
||||
|
||||
|
|
|
@ -319,7 +319,7 @@ def __goInferenceProxy(expression, fromUser=False, expected=None, batch=False, r
|
|||
expression += FROM_TABLE[Backend.getIdentifiedDbms()]
|
||||
|
||||
outputs = __goInferenceFields(expression, expressionFields, expressionFieldsList, payload, expected, resumeValue=resumeValue, charsetType=charsetType, firstChar=firstChar, lastChar=lastChar, dump=dump)
|
||||
returnValue = ", ".join([output for output in outputs])
|
||||
returnValue = ", ".join(output for output in outputs)
|
||||
|
||||
return returnValue
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
|||
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, getUnicode(msg))
|
||||
|
||||
if headers:
|
||||
logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in headers.items()])
|
||||
logHeaders = "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in headers.items())
|
||||
else:
|
||||
logHeaders = ""
|
||||
|
||||
|
|
|
@ -394,7 +394,7 @@ def hashRecognition(value):
|
|||
elif isMySQL and regex == HASH.ORACLE_OLD:
|
||||
continue
|
||||
elif regex == HASH.CRYPT_GENERIC:
|
||||
if any([getCompiledRegex(GENERAL_IP_ADDRESS_REGEX).match(value), value.lower() == value, value.upper() == value, value.isdigit()]):
|
||||
if any((getCompiledRegex(GENERAL_IP_ADDRESS_REGEX).match(value), value.lower() == value, value.upper() == value, value.isdigit())):
|
||||
continue
|
||||
elif getCompiledRegex(regex).match(value):
|
||||
retVal = regex
|
||||
|
|
|
@ -124,7 +124,7 @@ def resume(expression, payload):
|
|||
|
||||
if logValue:
|
||||
if kb.technique == PAYLOAD.TECHNIQUE.UNION:
|
||||
logValue = ", ".join([value.replace(DUMP_DEL_MARKER, ", ") for value in logValue])
|
||||
logValue = ", ".join(value.replace(DUMP_DEL_MARKER, ", ") for value in logValue)
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
|
|
|
@ -67,7 +67,7 @@ class Syntax(GenericSyntax):
|
|||
oldUpper = oldUpper.lstrip("CHR(").rstrip(")")
|
||||
oldUpper = oldUpper.split("&")
|
||||
|
||||
escaped = "'%s'" % "".join([chr(int(char)) for char in oldUpper])
|
||||
escaped = "'%s'" % "".join(chr(int(char)) for char in oldUpper)
|
||||
expression = expression.replace(old, escaped).replace("'&'", "")
|
||||
|
||||
return expression
|
||||
|
|
|
@ -66,7 +66,7 @@ class Syntax(GenericSyntax):
|
|||
oldUpper = oldUpper.lstrip("CHR(").rstrip(")")
|
||||
oldUpper = oldUpper.split("||")
|
||||
|
||||
escaped = "'%s'" % "".join([chr(int(char)) for char in oldUpper])
|
||||
escaped = "'%s'" % "".join(chr(int(char)) for char in oldUpper)
|
||||
expression = expression.replace(old, escaped)
|
||||
|
||||
return expression
|
||||
|
|
|
@ -69,7 +69,7 @@ class Syntax(GenericSyntax):
|
|||
oldUpper = oldUpper.lstrip("ASCII_CHAR(").rstrip(")")
|
||||
oldUpper = oldUpper.split("||")
|
||||
|
||||
escaped = "'%s'" % "".join([chr(int(char)) for char in oldUpper])
|
||||
escaped = "'%s'" % "".join(chr(int(char)) for char in oldUpper)
|
||||
expression = expression.replace(old, escaped).replace("'||'", "")
|
||||
|
||||
return expression
|
||||
|
|
|
@ -47,7 +47,7 @@ class Syntax(GenericSyntax):
|
|||
oldUpper = oldUpper.lstrip("CHAR(").rstrip(")")
|
||||
oldUpper = oldUpper.split(",")
|
||||
|
||||
escaped = "'%s'" % "".join([chr(int(char)) for char in oldUpper])
|
||||
escaped = "'%s'" % "".join(chr(int(char)) for char in oldUpper)
|
||||
expression = expression.replace(old, escaped)
|
||||
|
||||
original = expression
|
||||
|
|
|
@ -64,7 +64,7 @@ class Syntax(GenericSyntax):
|
|||
oldUpper = oldUpper.replace("CHR(", "").replace(")", "")
|
||||
oldUpper = oldUpper.split("||")
|
||||
|
||||
escaped = "'%s'" % "".join([chr(int(char)) for char in oldUpper])
|
||||
escaped = "'%s'" % "".join(chr(int(char)) for char in oldUpper)
|
||||
expression = expression.replace(old, escaped)
|
||||
|
||||
return expression
|
||||
|
|
|
@ -65,7 +65,7 @@ class Syntax(GenericSyntax):
|
|||
oldUpper = oldUpper.replace("CHR(", "").replace(")", "")
|
||||
oldUpper = oldUpper.split("||")
|
||||
|
||||
escaped = "'%s'" % "".join([chr(int(char)) for char in oldUpper])
|
||||
escaped = "'%s'" % "".join(chr(int(char)) for char in oldUpper)
|
||||
expression = expression.replace(old, escaped)
|
||||
|
||||
return expression
|
||||
|
|
|
@ -65,7 +65,7 @@ class Syntax(GenericSyntax):
|
|||
oldUpper = oldUpper.replace("CHAR(", "").replace(")", "")
|
||||
oldUpper = oldUpper.split("+")
|
||||
|
||||
escaped = "'%s'" % "".join([chr(int(char)) for char in oldUpper])
|
||||
escaped = "'%s'" % "".join(chr(int(char)) for char in oldUpper)
|
||||
expression = expression.replace(old, escaped)
|
||||
|
||||
return expression
|
||||
|
|
|
@ -897,7 +897,7 @@ class Enumeration:
|
|||
value = filter(None, arrayizeValue(value))
|
||||
|
||||
if len(value) > 0 and not isinstance(value[0], (list, tuple)):
|
||||
value = zip([conf.db for i in xrange(len(value))], value)
|
||||
value = zip((conf.db for i in xrange(len(value))), value)
|
||||
|
||||
for db, table in filterPairValues(value):
|
||||
db = safeSQLIdentificatorNaming(db)
|
||||
|
|
Loading…
Reference in New Issue
Block a user