#!/usr/bin/env python """ $Id$ Copyright (c) 2006-2012 sqlmap developers (http://www.sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import logging import os import subprocess import sys from lib.core.enums import CUSTOM_LOGGING from lib.core.enums import DBMS from lib.core.enums import DBMS_DIRECTORY_NAME from lib.core.revision import getRevisionNumber # sqlmap version and site VERSION = "1.0-dev" REVISION = getRevisionNumber() VERSION_STRING = "sqlmap/%s%s" % (VERSION, " (r%s)" % REVISION if REVISION else "") DESCRIPTION = "automatic SQL injection and database takeover tool" SITE = "http://www.sqlmap.org" ML = "sqlmap-users@lists.sourceforge.net" # minimum distance of ratio from kb.matchRatio to result in True DIFF_TOLERANCE = 0.05 CONSTANT_RATIO = 0.9 # lower and upper values for match ratio in case of stable page LOWER_RATIO_BOUND = 0.02 UPPER_RATIO_BOUND = 0.98 # sqlmap logger logging.addLevelName(CUSTOM_LOGGING.PAYLOAD, "PAYLOAD") logging.addLevelName(CUSTOM_LOGGING.TRAFFIC_OUT, "TRAFFIC OUT") logging.addLevelName(CUSTOM_LOGGING.TRAFFIC_IN, "TRAFFIC IN") LOGGER = logging.getLogger("sqlmapLog") LOGGER_HANDLER = logging.StreamHandler(sys.stdout) FORMATTER = logging.Formatter("\r[%(asctime)s] [%(levelname)s] %(message)s", "%H:%M:%S") LOGGER_HANDLER.setFormatter(FORMATTER) LOGGER.addHandler(LOGGER_HANDLER) LOGGER.setLevel(logging.WARN) # dump markers DUMP_NEWLINE_MARKER = "__NEWLINE__" DUMP_CR_MARKER = "__CARRIAGE_RETURN__" DUMP_TAB_MARKER = "__TAB__" DUMP_DEL_MARKER = "__DEL__" # markers for special cases when parameter values contain html encoded characters PARAMETER_AMP_MARKER = "__AMP__" PARAMETER_SEMICOLON_MARKER = "__SEMICOLON__" PARTIAL_VALUE_MARKER = "__PARTIAL__" URI_QUESTION_MARKER = "__QUESTION_MARK__" PAYLOAD_DELIMITER = "\x00" CHAR_INFERENCE_MARK = "%c" PRINTABLE_CHAR_REGEX = r"[^\x00-\x1f\x7e-\xff]" # regular expression used for extracting results from google search GOOGLE_REGEX = r"url\?q=(http[^>]+)&sa=U&" # regular expression used for extracting content from "textual" tags TEXT_TAG_REGEX = r"(?si)<(abbr|acronym|b|blockquote|br|center|cite|code|dt|em|font|h\d|i|li|p|pre|q|strong|sub|sup|td|th|title|tt|u)(?!\w).*?>(?P[^<]+)" # dumping characters used in GROUP_CONCAT MySQL technique CONCAT_ROW_DELIMITER = ',' CONCAT_VALUE_DELIMITER = '|' # coefficient used for a time-based query delay checking (must be >= 7) TIME_STDEV_COEFF = 7 # standard deviation after which a warning message should be displayed about connection lags WARN_TIME_STDEV = 0.5 # minimum length of usable union injected response (quick defense against substr fields) UNION_MIN_RESPONSE_CHARS = 10 # coefficient used for a union-based number of columns checking (must be >= 7) UNION_STDEV_COEFF = 7 # length of queue for candidates for time delay adjustment TIME_DELAY_CANDIDATES = 3 # standard value for HTTP Accept header HTTP_ACCEPT_HEADER_VALUE = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" # HTTP timeout in silent mode HTTP_SILENT_TIMEOUT = 3 # maximum number of techniques used in inject.py/getValue() per one value MAX_TECHNIQUES_PER_VALUE = 2 # suffix used for naming meta databases in DBMS(es) without explicit database name METADB_SUFFIX = "_masterdb" # minimum time response set needed for time-comparison based on standard deviation MIN_TIME_RESPONSES = 10 # minimum comparison ratio set needed for searching valid union column number based on standard deviation MIN_UNION_RESPONSES = 5 # after these number of blanks at the end inference should stop (just in case) INFERENCE_BLANK_BREAK = 10 # use this replacement character for cases when inference is not able to retrieve the proper character value INFERENCE_UNKNOWN_CHAR = '?' # character used for operation "greater" in inference INFERENCE_GREATER_CHAR = ">" # character used for operation "equals" in inference INFERENCE_EQUALS_CHAR = "=" # character used for operation "not-equals" in inference INFERENCE_NOT_EQUALS_CHAR = "!=" # string used for representation of unknown dbms version UNKNOWN_DBMS_VERSION = "Unknown" # dynamicity mark length used in dynamicity removal engine DYNAMICITY_MARK_LENGTH = 32 # length of FIFO buffer for removing possible duplicates in union/inband data retrieval UNION_UNIQUE_FIFO_LENGTH = 10 # dummy user prefix used in dictionary attack DUMMY_USER_PREFIX = "__dummy__" # Reference: http://en.wikipedia.org/wiki/ISO/IEC_8859-1 DEFAULT_PAGE_ENCODING = "iso-8859-1" # System variables IS_WIN = subprocess.mswindows # The name of the operating system dependent module imported. The following # names have currently been registered: 'posix', 'nt', 'mac', 'os2', 'ce', # 'java', 'riscos' PLATFORM = os.name PYVERSION = sys.version.split()[0] # Database management system specific variables MSSQL_SYSTEM_DBS = ( "Northwind", "master", "model", "msdb", "pubs", "tempdb" ) MYSQL_SYSTEM_DBS = ( "information_schema", "mysql" ) # Before MySQL 5.0 only "mysql" PGSQL_SYSTEM_DBS = ( "information_schema", "pg_catalog", "pg_toast" ) ORACLE_SYSTEM_DBS = ( "SYSTEM", "SYSAUX", "SYS" ) # These are TABLESPACE_NAME SQLITE_SYSTEM_DBS = ( "sqlite_master", "sqlite_temp_master" ) ACCESS_SYSTEM_DBS = ( "MSysAccessObjects", "MSysACEs", "MSysObjects", "MSysQueries", "MSysRelationships", "MSysAccessStorage",\ "MSysAccessXML", "MSysModules", "MSysModules2" ) FIREBIRD_SYSTEM_DBS = ( "RDB$BACKUP_HISTORY", "RDB$CHARACTER_SETS", "RDB$CHECK_CONSTRAINTS", "RDB$COLLATIONS", "RDB$DATABASE",\ "RDB$DEPENDENCIES", "RDB$EXCEPTIONS", "RDB$FIELDS", "RDB$FIELD_DIMENSIONS", " RDB$FILES", "RDB$FILTERS",\ "RDB$FORMATS", "RDB$FUNCTIONS", "RDB$FUNCTION_ARGUMENTS", "RDB$GENERATORS", "RDB$INDEX_SEGMENTS", "RDB$INDICES",\ "RDB$LOG_FILES", "RDB$PAGES", "RDB$PROCEDURES", "RDB$PROCEDURE_PARAMETERS", "RDB$REF_CONSTRAINTS", "RDB$RELATIONS",\ "RDB$RELATION_CONSTRAINTS", "RDB$RELATION_FIELDS", "RDB$ROLES", "RDB$SECURITY_CLASSES", "RDB$TRANSACTIONS", "RDB$TRIGGERS",\ "RDB$TRIGGER_MESSAGES", "RDB$TYPES", "RDB$USER_PRIVILEGES", "RDB$VIEW_RELATIONS" ) MAXDB_SYSTEM_DBS = ( "SYSINFO", "DOMAIN" ) SYBASE_SYSTEM_DBS = ( "master", "model", "sybsystemdb", "sybsystemprocs" ) DB2_SYSTEM_DBS = ( "NULLID", "SQLJ", "SYSCAT", "SYSFUN", "SYSIBM", "SYSIBMADM", "SYSIBMINTERNAL", "SYSIBMTS",\ "SYSPROC", "SYSPUBLIC", "SYSSTAT", "SYSTOOLS" ) MSSQL_ALIASES = ( "microsoft sql server", "mssqlserver", "mssql", "ms" ) MYSQL_ALIASES = ( "mysql", "my" ) PGSQL_ALIASES = ( "postgresql", "postgres", "pgsql", "psql", "pg" ) ORACLE_ALIASES = ( "oracle", "orcl", "ora", "or" ) SQLITE_ALIASES = ( "sqlite", "sqlite3" ) ACCESS_ALIASES = ( "msaccess", "access", "jet", "microsoft access" ) FIREBIRD_ALIASES = ( "firebird", "mozilla firebird", "interbase", "ibase", "fb" ) MAXDB_ALIASES = ( "maxdb", "sap maxdb", "sap db" ) SYBASE_ALIASES = ( "sybase", "sybase sql server" ) DB2_ALIASES = ( "db2", "ibm db2", "ibmdb2" ) DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) for _ in dir(DBMS) if not _.startswith("_")) SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES SUPPORTED_OS = ( "linux", "windows" ) DBMS_DICT = { DBMS.MSSQL: (MSSQL_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/"), DBMS.MYSQL: (MYSQL_ALIASES, "python pymysql", "http://code.google.com/p/pymysql/"), DBMS.PGSQL: (PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/"), DBMS.ORACLE: (ORACLE_ALIASES, "python cx_Oracle", "http://cx-oracle.sourceforge.net/"), DBMS.SQLITE: (SQLITE_ALIASES, "python-pysqlite2", "http://pysqlite.googlecode.com/"), DBMS.ACCESS: (ACCESS_ALIASES, "python-pyodbc", "http://pyodbc.googlecode.com/"), DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/"), DBMS.MAXDB: (MAXDB_ALIASES, None, None), DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/"), DBMS.DB2: (DB2_ALIASES, "python ibm-db", "http://code.google.com/p/ibm-db/") } USER_AGENT_ALIASES = ( "ua", "useragent", "user-agent" ) REFERER_ALIASES = ( "ref", "referer", "referrer" ) HOST_ALIASES = ( "host", ) FROM_DUMMY_TABLE = { DBMS.ORACLE: " FROM DUAL", DBMS.ACCESS: " FROM MSysAccessObjects", DBMS.FIREBIRD: " FROM RDB$DATABASE", DBMS.MAXDB: " FROM VERSIONS", DBMS.DB2: " FROM SYSIBM.SYSDUMMY1" } SQL_STATEMENTS = { "SQL SELECT statement": ( "select ", "show ", " top ", " distinct ", " from ", " from dual", " where ", " group by ", " order by ", " having ", " limit ", " offset ", " union all ", " rownum as ", "(case ", ), "SQL data definition": ( "create ", "declare ", "drop ", "truncate ", "alter ", ), "SQL data manipulation": ( "bulk ", "insert ", "update ", "delete ", "merge ", "load ", ), "SQL data control": ( "grant ", "revoke ", ), "SQL data execution": ( "exec ", "execute ", ), "SQL transaction": ( "start transaction ", "begin work ", "begin transaction ", "commit ", "rollback ", ), } # string representation for NULL value NULL = "NULL" # string representation for blank ('') value BLANK = "" # string representation for current database CURRENT_DB = "CD" # Regular expressions used for parsing error messages (--parse-errors) ERROR_PARSING_REGEXES = ( r"[^<]*(fatal|error|warning|exception)[^<]*:?\s*(?P.+?)", r"
  • Error Type:
    (?P.+?)
  • ", r"error '[0-9a-f]{8}'((<[^>]+>)|\s)+(?P[^<>]+)" ) # Regular expression used for parsing charset info from meta html headers META_CHARSET_REGEX = r']+charset=(?P[^">]+)' # Regular expression used for parsing refresh info from meta html headers META_REFRESH_REGEX = r']+content="?[^">]+url=(?P[^">]+)' # Regular expression used for parsing empty fields in tested form data EMPTY_FORM_FIELDS_REGEX = r'(?P[^=]+=(&|\Z))' # Regular expression for soap message recognition SOAP_REGEX = r"\A(<\?xml[^>]+>)?\s*<]" # Extensions skipped by crawler CRAWL_EXCLUDE_EXTENSIONS = ("gif","jpg","jar","tif","bmp","war","ear","mpg","wmv","mpeg","scm","iso","dmp","dll","cab","so","avi","bin","exe","iso","tar","png","pdf","ps","mp3","zip","rar","gz") # Template used for common table existence check BRUTE_TABLE_EXISTS_TEMPLATE = "EXISTS(SELECT %d FROM %s)" # Template used for common column existence check BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)" # Payload used for checking of existence of IDS/WAF (dummier the better) IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,2,3,table_name FROM information_schema.tables" # Used for status representation in dictionary attack phase ROTATING_CHARS = ('\\', '|', '|', '/', '-') # Chunk length (in items) used by BigArray objects (only last chunk and cached one are held in memory) BIGARRAY_CHUNK_LENGTH = 4096 # Only console display last n table rows TRIM_STDOUT_DUMP_SIZE = 256 # Parse response headers only first couple of times PARSE_HEADERS_LIMIT = 3 # Step used in ORDER BY technique used for finding the right number of columns in UNION query injections ORDER_BY_STEP = 10 # Maximum number of times for revalidation of a character in time-based injections MAX_TIME_REVALIDATION_STEPS = 5 # Characters that can be used to split parameter values in provided command line (e.g. in --tamper) PARAMETER_SPLITTING_REGEX = r'[,|;]' # Regular expression describing possible union char value (e.g. used in --union-char) UNION_CHAR_REGEX = r'\A\w+\Z' # Attribute used for storing original parameter value in special cases (e.g. POST) UNENCODED_ORIGINAL_VALUE = 'original' # Common column names containing usernames (used for hash cracking in some cases) COMMON_USER_COLUMNS = ('user', 'username', 'user_name', 'benutzername', 'benutzer', 'utilisateur', 'usager', 'consommateur', 'utente', 'utilizzatore', 'usufrutuario', 'korisnik', 'usuario', 'consumidor') # Default delimiter in GET/POST values DEFAULT_GET_POST_DELIMITER = '&' # Default delimiter in cookie values DEFAULT_COOKIE_DELIMITER = ';' # Skip unforced HashDB flush requests below the threshold number of cached items HASHDB_FLUSH_THRESHOLD = 32 # Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism) HASHDB_MILESTONE_VALUE = "EfjamfhMVw" # r4856 # Warn user of possible delay due to large page dump in full UNION query injections LARGE_OUTPUT_THRESHOLD = 1024**2 # On huge tables there is a considerable slowdown if every row retrieval requires ORDER BY (most noticable in table dumping using ERROR injections) SLOW_ORDER_COUNT_THRESHOLD = 10000 # Give up on hash recognition if nothing was found in first given number of rows HASH_RECOGNITION_QUIT_THRESHOLD = 10000 # Size of a buffer used for log file output BUFFERED_LOG_SIZE = 10000 # Maximum number of redirections to any single URL - this is needed because of the state that cookies introduce MAX_SINGLE_URL_REDIRECTIONS = 4 # Maximum total number of redirections (regardless of URL) - before assuming we're in a loop MAX_TOTAL_REDIRECTIONS = 10 # Reference: http://www.tcpipguide.com/free/t_DNSLabelsNamesandSyntaxRules.htm MAX_DNS_LABEL = 63 # Connection chunk size (processing large responses in chunks to avoid MemoryError crashes - e.g. large table dump in full UNION/inband injections) MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024 # Mark used for trimming unnecessary content in large chunks LARGE_CHUNK_TRIM_MARKER = "__TRIMMED_CONTENT__" # Generic SQL comment formation GENERIC_SQL_COMMENT = "-- "