This commit is contained in:
Miroslav Stampar 2017-07-04 12:14:17 +02:00
parent 1678b606a2
commit 614f290217
6 changed files with 69 additions and 46 deletions

View File

@ -2597,13 +2597,13 @@ def runningAsAdmin():
return isAdmin return isAdmin
def logHTTPTraffic(requestLogMsg, responseLogMsg): def logHTTPTraffic(requestLogMsg, responseLogMsg, startTime=None, endTime=None):
""" """
Logs HTTP traffic to the output file Logs HTTP traffic to the output file
""" """
if conf.harFile: if conf.harFile:
conf.httpCollector.collectRequest(requestLogMsg, responseLogMsg) conf.httpCollector.collectRequest(requestLogMsg, responseLogMsg, startTime, endTime)
if not conf.trafficFile: if not conf.trafficFile:
with kb.locks.log: with kb.locks.log:

View File

@ -19,7 +19,7 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
from lib.core.enums import OS from lib.core.enums import OS
# sqlmap version (<major>.<minor>.<month>.<monthly commit>) # sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.1.7.3" VERSION = "1.1.7.4"
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable" TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34} TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE) VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)

View File

@ -223,6 +223,8 @@ class Connect(object):
the target URL page content the target URL page content
""" """
start = time.time()
if isinstance(conf.delay, (int, float)) and conf.delay > 0: if isinstance(conf.delay, (int, float)) and conf.delay > 0:
time.sleep(conf.delay) time.sleep(conf.delay)
@ -288,7 +290,7 @@ class Connect(object):
status = None status = None
_ = urlparse.urlsplit(url) _ = urlparse.urlsplit(url)
requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET)) requestMsg = u"HTTP request [#%d]:\r\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET))
requestMsg += getUnicode(("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling, checking)) else url) requestMsg += getUnicode(("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling, checking)) else url)
responseMsg = u"HTTP response " responseMsg = u"HTTP response "
requestHeaders = u"" requestHeaders = u""
@ -413,13 +415,13 @@ class Connect(object):
responseHeaders = _(ws.getheaders()) responseHeaders = _(ws.getheaders())
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()] responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()]) requestHeaders += "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
requestMsg += "\n%s" % requestHeaders requestMsg += "\r\n%s" % requestHeaders
if post is not None: if post is not None:
requestMsg += "\n\n%s" % getUnicode(post) requestMsg += "\r\n\r\n%s" % getUnicode(post)
requestMsg += "\n" requestMsg += "\r\n"
threadData.lastRequestMsg = requestMsg threadData.lastRequestMsg = requestMsg
@ -432,26 +434,26 @@ class Connect(object):
else: else:
req = urllib2.Request(url, post, headers) req = urllib2.Request(url, post, headers)
requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()]) requestHeaders += "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()])
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj: if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
conf.cj._policy._now = conf.cj._now = int(time.time()) conf.cj._policy._now = conf.cj._now = int(time.time())
cookies = conf.cj._cookies_for_request(req) cookies = conf.cj._cookies_for_request(req)
requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) requestHeaders += "\r\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))
if post is not None: if post is not None:
if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH): if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH):
requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post)) requestHeaders += "\r\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
if not getRequestHeader(req, HTTP_HEADER.CONNECTION): if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
requestHeaders += "\n%s: %s" % (HTTP_HEADER.CONNECTION, "close" if not conf.keepAlive else "keep-alive") requestHeaders += "\r\n%s: %s" % (HTTP_HEADER.CONNECTION, "close" if not conf.keepAlive else "keep-alive")
requestMsg += "\n%s" % requestHeaders requestMsg += "\r\n%s" % requestHeaders
if post is not None: if post is not None:
requestMsg += "\n\n%s" % getUnicode(post) requestMsg += "\r\n\r\n%s" % getUnicode(post)
requestMsg += "\n" requestMsg += "\r\n"
if not multipart: if not multipart:
threadData.lastRequestMsg = requestMsg threadData.lastRequestMsg = requestMsg
@ -576,19 +578,19 @@ class Connect(object):
threadData.lastHTTPError = (threadData.lastRequestUID, code, status) threadData.lastHTTPError = (threadData.lastRequestUID, code, status)
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) responseMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, code, status)
if responseHeaders: if responseHeaders:
logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()]) logHeaders = "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) logHTTPTraffic(requestMsg, "%s%s\r\n\r\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]), start, time.time())
skipLogTraffic = True skipLogTraffic = True
if conf.verbose <= 5: if conf.verbose <= 5:
responseMsg += getUnicode(logHeaders) responseMsg += getUnicode(logHeaders)
elif conf.verbose > 5: elif conf.verbose > 5:
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) responseMsg += "%s\r\n\r\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
if not multipart: if not multipart:
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
@ -736,20 +738,20 @@ class Connect(object):
requestMsg = re.sub("(?i)Content-length: \d+\n", "", requestMsg) requestMsg = re.sub("(?i)Content-length: \d+\n", "", requestMsg)
requestMsg = re.sub("(?s)\n\n.+", "\n", requestMsg) requestMsg = re.sub("(?s)\n\n.+", "\n", requestMsg)
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, conn.code, status) responseMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, conn.code, status)
else: else:
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) responseMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, code, status)
if responseHeaders: if responseHeaders:
logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()]) logHeaders = "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
if not skipLogTraffic: if not skipLogTraffic:
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) logHTTPTraffic(requestMsg, "%s%s\r\n\r\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]), start, time.time())
if conf.verbose <= 5: if conf.verbose <= 5:
responseMsg += getUnicode(logHeaders) responseMsg += getUnicode(logHeaders)
elif conf.verbose > 5: elif conf.verbose > 5:
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) responseMsg += "%s\r\n\r\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
if not multipart: if not multipart:
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)

View File

@ -6,6 +6,7 @@ See the file 'doc/COPYING' for copying permission
""" """
import re import re
import time
import types import types
import urllib2 import urllib2
import urlparse import urlparse
@ -69,6 +70,7 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
return urllib2.Request(newurl, data=req.data, headers=req.headers, origin_req_host=req.get_origin_req_host()) return urllib2.Request(newurl, data=req.data, headers=req.headers, origin_req_host=req.get_origin_req_host())
def http_error_302(self, req, fp, code, msg, headers): def http_error_302(self, req, fp, code, msg, headers):
start = time.time()
content = None content = None
redurl = self._get_header_redirect(headers) if not conf.ignoreRedirects else None redurl = self._get_header_redirect(headers) if not conf.ignoreRedirects else None
@ -92,18 +94,18 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
threadData.lastRedirectMsg = (threadData.lastRequestUID, content) threadData.lastRedirectMsg = (threadData.lastRequestUID, content)
redirectMsg = "HTTP redirect " redirectMsg = "HTTP redirect "
redirectMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, getUnicode(msg)) redirectMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, code, getUnicode(msg))
if headers: if headers:
logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in headers.items()) logHeaders = "\r\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in headers.items())
else: else:
logHeaders = "" logHeaders = ""
redirectMsg += logHeaders redirectMsg += logHeaders
if content: if content:
redirectMsg += "\n\n%s" % getUnicode(content[:MAX_CONNECTION_CHUNK_SIZE]) redirectMsg += "\r\n\r\n%s" % getUnicode(content[:MAX_CONNECTION_CHUNK_SIZE])
logHTTPTraffic(threadData.lastRequestMsg, redirectMsg) logHTTPTraffic(threadData.lastRequestMsg, redirectMsg, start, time.time())
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, redirectMsg) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, redirectMsg)
if redurl: if redurl:

View File

@ -7,28 +7,31 @@ See the file 'doc/COPYING' for copying permission
import base64 import base64
import BaseHTTPServer import BaseHTTPServer
import datetime
import httplib import httplib
import re import re
import StringIO import StringIO
import time
from lib.core.data import logger from lib.core.data import logger
from lib.core.settings import VERSION from lib.core.settings import VERSION
# Reference: https://dvcs.w3.org/hg/webperf/raw-file/tip/specs/HAR/Overview.html
# http://www.softwareishard.com/har/viewer/
class HTTPCollectorFactory: class HTTPCollectorFactory:
def __init__(self, harFile=False): def __init__(self, harFile=False):
self.harFile = harFile self.harFile = harFile
def create(self): def create(self):
collector = HTTPCollector() return HTTPCollector()
return collector
class HTTPCollector: class HTTPCollector:
def __init__(self): def __init__(self):
self.messages = [] self.messages = []
def collectRequest(self, requestMessage, responseMessage): def collectRequest(self, requestMessage, responseMessage, startTime=None, endTime=None):
self.messages.append(RawPair(requestMessage, responseMessage)) self.messages.append(RawPair(requestMessage, responseMessage, startTime, endTime))
def obtain(self): def obtain(self):
return {"log": { return {"log": {
@ -38,23 +41,30 @@ class HTTPCollector:
}} }}
class RawPair: class RawPair:
def __init__(self, request, response): def __init__(self, request, response, startTime=None, endTime=None):
self.request = request self.request = request
self.response = response self.response = response
self.startTime = startTime
self.endTime = endTime
def toEntry(self): def toEntry(self):
return Entry(request=Request.parse(self.request), return Entry(request=Request.parse(self.request), response=Response.parse(self.response), startTime=self.startTime, endTime=self.endTime)
response=Response.parse(self.response))
class Entry: class Entry:
def __init__(self, request, response): def __init__(self, request, response, startTime, endTime):
self.request = request self.request = request
self.response = response self.response = response
self.startTime = startTime or 0
self.endTime = endTime or 0
def toDict(self): def toDict(self):
return { return {
"request": self.request.toDict(), "request": self.request.toDict(),
"response": self.response.toDict(), "response": self.response.toDict(),
"cache": {},
"timings": [],
"time": int(1000 * (self.endTime - self.startTime)),
"startedDateTime": "%s%s" % (datetime.datetime.fromtimestamp(self.startTime).isoformat(), time.strftime("%z")) if self.startTime else None
} }
class Request: class Request:
@ -64,7 +74,7 @@ class Request:
self.httpVersion = httpVersion self.httpVersion = httpVersion
self.headers = headers or {} self.headers = headers or {}
self.postBody = postBody self.postBody = postBody
self.comment = comment self.comment = comment.strip() if comment else comment
self.raw = raw self.raw = raw
@classmethod @classmethod
@ -89,6 +99,10 @@ class Request:
"method": self.method, "method": self.method,
"url": self.url, "url": self.url,
"headers": [dict(name=key.capitalize(), value=value) for key, value in self.headers.items()], "headers": [dict(name=key.capitalize(), value=value) for key, value in self.headers.items()],
"cookies": [],
"queryString": [],
"headersSize": -1,
"bodySize": -1,
"comment": self.comment, "comment": self.comment,
} }
@ -111,7 +125,7 @@ class Response:
self.statusText = statusText self.statusText = statusText
self.headers = headers self.headers = headers
self.content = content self.content = content
self.comment = comment self.comment = comment.strip() if comment else comment
@classmethod @classmethod
def parse(cls, raw): def parse(cls, raw):
@ -124,7 +138,7 @@ class Response:
parts = cls.extract_status.search(first_line) parts = cls.extract_status.search(first_line)
status_line = "HTTP/1.0 %s %s" % (parts.group(1), parts.group(2)) status_line = "HTTP/1.0 %s %s" % (parts.group(1), parts.group(2))
remain = io.read() remain = io.read()
altered = status_line + "\n" + remain altered = status_line + "\r\n" + remain
comment = first_line comment = first_line
response = httplib.HTTPResponse(FakeSocket(altered)) response = httplib.HTTPResponse(FakeSocket(altered))
@ -133,7 +147,7 @@ class Response:
try: try:
content = response.read(-1) content = response.read(-1)
except httplib.IncompleteRead: except httplib.IncompleteRead:
content = raw[raw.find("\n\n") + 2:].rstrip("\r\n") content = raw[raw.find("\r\n\r\n") + 4:].rstrip("\r\n")
return cls(httpVersion="HTTP/1.1" if response.version == 11 else "HTTP/1.0", return cls(httpVersion="HTTP/1.1" if response.version == 11 else "HTTP/1.0",
status=response.status, status=response.status,
@ -147,6 +161,7 @@ class Response:
content = { content = {
"mimeType": self.headers.get("Content-Type"), "mimeType": self.headers.get("Content-Type"),
"text": self.content, "text": self.content,
"size": len(self.content or "")
} }
binary = set(['\0', '\1']) binary = set(['\0', '\1'])
@ -159,7 +174,11 @@ class Response:
"status": self.status, "status": self.status,
"statusText": self.statusText, "statusText": self.statusText,
"headers": [dict(name=key.capitalize(), value=value) for key, value in self.headers.items() if key.lower() != "uri"], "headers": [dict(name=key.capitalize(), value=value) for key, value in self.headers.items() if key.lower() != "uri"],
"cookies": [],
"content": content, "content": content,
"headersSize": -1,
"bodySize": -1,
"redirectURL": "",
"comment": self.comment, "comment": self.comment,
} }

View File

@ -27,7 +27,7 @@ a97df93b552ee4e4ba3692eae870de7c lib/controller/handler.py
310efc965c862cfbd7b0da5150a5ad36 lib/controller/__init__.py 310efc965c862cfbd7b0da5150a5ad36 lib/controller/__init__.py
d58e85ffeac2471ef3af729076b3b5f7 lib/core/agent.py d58e85ffeac2471ef3af729076b3b5f7 lib/core/agent.py
6cc95a117fbd34ef31b9aa25520f0e31 lib/core/bigarray.py 6cc95a117fbd34ef31b9aa25520f0e31 lib/core/bigarray.py
d40de0812b667b7be9d1755f82e18f17 lib/core/common.py ebf31aa9c5af54188e999719593e8ba4 lib/core/common.py
5065a4242a8cccf72f91e22e1007ae63 lib/core/convert.py 5065a4242a8cccf72f91e22e1007ae63 lib/core/convert.py
a8143dab9d3a27490f7d49b6b29ea530 lib/core/data.py a8143dab9d3a27490f7d49b6b29ea530 lib/core/data.py
7936d78b1a7f1f008ff92bf2f88574ba lib/core/datatype.py 7936d78b1a7f1f008ff92bf2f88574ba lib/core/datatype.py
@ -46,7 +46,7 @@ b9ff4e622c416116bee6024c0f050349 lib/core/enums.py
d8e9250f3775119df07e9070eddccd16 lib/core/replication.py d8e9250f3775119df07e9070eddccd16 lib/core/replication.py
785f86e3f963fa3798f84286a4e83ff2 lib/core/revision.py 785f86e3f963fa3798f84286a4e83ff2 lib/core/revision.py
40c80b28b3a5819b737a5a17d4565ae9 lib/core/session.py 40c80b28b3a5819b737a5a17d4565ae9 lib/core/session.py
191a4cb7eea0a46315c894abd9491bcf lib/core/settings.py 4538abe3e7f78f73fe3cd85dc4715e7f lib/core/settings.py
d91291997d2bd2f6028aaf371bf1d3b6 lib/core/shell.py d91291997d2bd2f6028aaf371bf1d3b6 lib/core/shell.py
2ad85c130cc5f2b3701ea85c2f6bbf20 lib/core/subprocessng.py 2ad85c130cc5f2b3701ea85c2f6bbf20 lib/core/subprocessng.py
baa3f47efa6701076d026e43a6874a51 lib/core/target.py baa3f47efa6701076d026e43a6874a51 lib/core/target.py
@ -68,7 +68,7 @@ ad74fc58fc7214802fd27067bce18dd2 lib/core/unescaper.py
403d873f1d2fd0c7f73d83f104e41850 lib/request/basicauthhandler.py 403d873f1d2fd0c7f73d83f104e41850 lib/request/basicauthhandler.py
3ba1c71e68953d34fc526a9d79d5a457 lib/request/basic.py 3ba1c71e68953d34fc526a9d79d5a457 lib/request/basic.py
ef48de622b0a6b4a71df64b0d2785ef8 lib/request/comparison.py ef48de622b0a6b4a71df64b0d2785ef8 lib/request/comparison.py
4b056460279e65eef5f4f4fe293e657b lib/request/connect.py bfd08465f7bc259cc9af008da0ffb4c3 lib/request/connect.py
fb6b788d0016ab4ec5e5f661f0f702ad lib/request/direct.py fb6b788d0016ab4ec5e5f661f0f702ad lib/request/direct.py
cc1163d38e9b7ee5db2adac6784c02bb lib/request/dns.py cc1163d38e9b7ee5db2adac6784c02bb lib/request/dns.py
5dcdb37823a0b5eff65cd1018bcf09e4 lib/request/httpshandler.py 5dcdb37823a0b5eff65cd1018bcf09e4 lib/request/httpshandler.py
@ -77,7 +77,7 @@ cc1163d38e9b7ee5db2adac6784c02bb lib/request/dns.py
dc1e0af84ee8eb421797d61c8cb8f172 lib/request/methodrequest.py dc1e0af84ee8eb421797d61c8cb8f172 lib/request/methodrequest.py
bb9c165b050f7696b089b96b5947fac3 lib/request/pkihandler.py bb9c165b050f7696b089b96b5947fac3 lib/request/pkihandler.py
602d4338a9fceaaee40c601410d8ac0b lib/request/rangehandler.py 602d4338a9fceaaee40c601410d8ac0b lib/request/rangehandler.py
111b3ee936f23167b5654a5f72e9731b lib/request/redirecthandler.py 3ba71e1571d105386d4d30746f5d6ab2 lib/request/redirecthandler.py
b373770137dc885889e495de95169b93 lib/request/templates.py b373770137dc885889e495de95169b93 lib/request/templates.py
992a02767d12254784f15501a7ab8dd8 lib/takeover/abstraction.py 992a02767d12254784f15501a7ab8dd8 lib/takeover/abstraction.py
c6bc7961a186baabe0a9f5b7e0d8974b lib/takeover/icmpsh.py c6bc7961a186baabe0a9f5b7e0d8974b lib/takeover/icmpsh.py
@ -103,7 +103,7 @@ a73c3ddd0de359507a8ad59b363aa963 lib/utils/api.py
ed70f1ca9113664043ec9e6778e48078 lib/utils/crawler.py ed70f1ca9113664043ec9e6778e48078 lib/utils/crawler.py
ba12c69a90061aa14d848b8396e79191 lib/utils/deps.py ba12c69a90061aa14d848b8396e79191 lib/utils/deps.py
3b9fd519164e0bf275d5fd361c3f11ff lib/utils/getch.py 3b9fd519164e0bf275d5fd361c3f11ff lib/utils/getch.py
3b93150eea78ea84fa0461a55e3e48ec lib/utils/har.py 40e987b76120f0327f891d1cc7866f4e lib/utils/har.py
ccfdad414ce2ec0c394c3deaa39a82bf lib/utils/hashdb.py ccfdad414ce2ec0c394c3deaa39a82bf lib/utils/hashdb.py
12e0e0ab70c6fe5786bc561c35dc067f lib/utils/hash.py 12e0e0ab70c6fe5786bc561c35dc067f lib/utils/hash.py
e76a08237ee6a4cd6855af79610ea8a5 lib/utils/htmlentities.py e76a08237ee6a4cd6855af79610ea8a5 lib/utils/htmlentities.py