mirror of
				https://github.com/sqlmapproject/sqlmap.git
				synced 2025-10-31 07:57:47 +03:00 
			
		
		
		
	Fixes #4012
This commit is contained in:
		
							parent
							
								
									95c5c20de4
								
							
						
					
					
						commit
						0d140b60f1
					
				|  | @ -336,7 +336,11 @@ def _setCrawler(): | ||||||
|         return |         return | ||||||
| 
 | 
 | ||||||
|     if not conf.bulkFile: |     if not conf.bulkFile: | ||||||
|  |         if conf.url: | ||||||
|             crawl(conf.url) |             crawl(conf.url) | ||||||
|  |         elif conf.requestFile and kb.targets: | ||||||
|  |             target = list(kb.targets)[0] | ||||||
|  |             crawl(target[0], target[2], target[3]) | ||||||
| 
 | 
 | ||||||
| def _doSearch(): | def _doSearch(): | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|  | @ -18,7 +18,7 @@ from lib.core.enums import OS | ||||||
| from thirdparty.six import unichr as _unichr | from thirdparty.six import unichr as _unichr | ||||||
| 
 | 
 | ||||||
| # sqlmap version (<major>.<minor>.<month>.<monthly commit>) | # sqlmap version (<major>.<minor>.<month>.<monthly commit>) | ||||||
| VERSION = "1.3.11.52" | VERSION = "1.3.11.53" | ||||||
| TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable" | TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable" | ||||||
| TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34} | TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34} | ||||||
| VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE) | VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE) | ||||||
|  |  | ||||||
|  | @ -42,7 +42,7 @@ from thirdparty.beautifulsoup.beautifulsoup import BeautifulSoup | ||||||
| from thirdparty.six.moves import http_client as _http_client | from thirdparty.six.moves import http_client as _http_client | ||||||
| from thirdparty.six.moves import urllib as _urllib | from thirdparty.six.moves import urllib as _urllib | ||||||
| 
 | 
 | ||||||
| def crawl(target): | def crawl(target, post=None, cookie=None): | ||||||
|     if not target: |     if not target: | ||||||
|         return |         return | ||||||
| 
 | 
 | ||||||
|  | @ -73,7 +73,7 @@ def crawl(target): | ||||||
|                 content = None |                 content = None | ||||||
|                 try: |                 try: | ||||||
|                     if current: |                     if current: | ||||||
|                         content = Request.getPage(url=current, crawling=True, raise404=False)[0] |                         content = Request.getPage(url=current, post=post, cookie=None, crawling=True, raise404=False)[0] | ||||||
|                 except SqlmapConnectionException as ex: |                 except SqlmapConnectionException as ex: | ||||||
|                     errMsg = "connection exception detected ('%s'). skipping " % getSafeExString(ex) |                     errMsg = "connection exception detected ('%s'). skipping " % getSafeExString(ex) | ||||||
|                     errMsg += "URL '%s'" % current |                     errMsg += "URL '%s'" % current | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue
	
	Block a user