This PR introduces the --update-sums parameter, which updates the SHA-256 hashes of modified files in the DIGEST_FILE. This parameter must be provided after --smoke.

This commit is contained in:
tanaydin 2025-02-20 20:31:19 +01:00
parent 327f98aaa3
commit c2311395c8
4 changed files with 51 additions and 18 deletions

View File

@ -166,7 +166,7 @@ de2b0220db1c79d8720b636d267b11e117151f5f99740567096e9b4cbb7cc9d5 lib/controller
1d6e741e19e467650dce2ca84aa824d6df68ff74aedbe4afa8dbdb0193d94918 lib/controller/__init__.py
41c7fb7e486c4383a114c851f0c32c81c53c2b4f1d2a0fd99f70885072646387 lib/core/agent.py
f848dcfdacb5143f803f4e9474cf3eef939039c26c522ca09777c425661300f0 lib/core/bigarray.py
eaf9d2d47305764213ada74b7a83721fc5f49578f2d8afa78799855068acb416 lib/core/common.py
541d4877dcd0e9881893a7fb17f725a5ffe60fe4c8246be59373e1dfdb0b2941 lib/core/common.py
88fbbe7c41511b17d7ef449d675a84eaa80cac6ebf457a18577eadd62f6f1330 lib/core/compat.py
5ce8f2292f99d17d69bfc40ded206bfdfd06e2e3660ff9d1b3c56163793f8d1c lib/core/convert.py
f561310b3cea570cc13d9f0aff16cce8b097d51275f8b947e7fff4876ac65c32 lib/core/data.py
@ -199,7 +199,7 @@ b1071f449a66b4ceacd4b84b33a73d9e0a3197d271d72daaa406ba473a8bb625 lib/core/testi
12cbead4e9e563b970fafb891127927445bd53bada1fac323b9cd27da551ba30 lib/core/wordlist.py
1d6e741e19e467650dce2ca84aa824d6df68ff74aedbe4afa8dbdb0193d94918 lib/__init__.py
a027f4c44811cb74aa367525f353706de3d3fc719e6c6162f7a61dc838acf0c2 lib/parse/banner.py
f8d1701df33a31920e2ebf9a23fa7b6f4ccd2aff22b4ae1e14b495e51e5939fe lib/parse/cmdline.py
23e641e25764f4a3bac8c62d1d6c8eb072f4e794b7fe8511f0f223310eb89478 lib/parse/cmdline.py
3907765df08c31f8d59350a287e826bd315a7714dc0e87496f67c8a0879c86ac lib/parse/configfile.py
ced03337edd5a16b56a379c9ac47775895e1053003c25f6ba5bec721b6e3aa64 lib/parse/handler.py
3704a02dcf00b0988b101e30b2e0d48acdd20227e46d8b552e46c55d7e9bf28c lib/parse/headers.py
@ -477,7 +477,7 @@ b3d9d0644197ecb864e899c04ee9c7cd63891ecf2a0d3c333aad563eef735294 plugins/generi
8c4fd81d84598535643cf0ef1b2d350cd92977cb55287e23993b76eaa2215c30 sqlmapapi.py
168309215af7dd5b0b71070e1770e72f1cbb29a3d8025143fb8aa0b88cd56b62 sqlmapapi.yaml
4037f1c78180550c1896543581c0c2423e970086bae46f175397f2b4c54b7323 sqlmap.conf
3795c6d03bc341a0e3aef3d7990ea8c272d91a4c307e1498e850594375af39f7 sqlmap.py
4b1905d382dfb21184abc18736e7734f42d7f1e812da93e20bb73c8831b41d85 sqlmap.py
9d408612a6780f7f50a7f7887f923ff3f40be5bfa09a951c6dc273ded05b56c0 tamper/0eunion.py
c1c2eaa7df016cc7786ccee0ae4f4f363b1dce139c61fb3e658937cb0d18fc54 tamper/apostrophemask.py
19023093ab22aec3bce9523f28e8111e8f6125973e6d9c82adb60da056bdf617 tamper/apostrophenullencode.py

View File

@ -2554,7 +2554,7 @@ def initCommonOutputs():
if line not in kb.commonOutputs[key]:
kb.commonOutputs[key].add(line)
def getFileItems(filename, commentPrefix='#', unicoded=True, lowercase=False, unique=False):
def getFileItems(filename, commentPrefix='#', unicoded=True, lowercase=False, unique=False, raiseOnError=True):
"""
Returns newline delimited items contained inside file
@ -2567,7 +2567,7 @@ def getFileItems(filename, commentPrefix='#', unicoded=True, lowercase=False, un
if filename:
filename = filename.strip('"\'')
checkFile(filename)
checkFile(filename, raiseOnError=raiseOnError)
try:
with openFile(filename, 'r', errors="ignore") if unicoded else open(filename, 'r') as f:
@ -5599,18 +5599,45 @@ def checkSums():
retVal = True
if paths.get("DIGEST_FILE"):
for entry in getFileItems(paths.DIGEST_FILE):
match = re.search(r"([0-9a-f]+)\s+([^\s]+)", entry)
if match:
expected, filename = match.groups()
filepath = os.path.join(paths.SQLMAP_ROOT_PATH, filename).replace('/', os.path.sep)
if not checkFile(filepath, False):
continue
with open(filepath, "rb") as f:
content = f.read()
if not hashlib.sha256(content).hexdigest() == expected:
retVal &= False
break
for entry in getFileItems(paths.DIGEST_FILE, raiseOnError=False):
try:
(file_hash, file_name) = entry.split()
except ValueError:
retVal &= False
break
if len(file_hash) == 64:
if not hashlib.sha256(
openFile(
os.path.join(
paths.SQLMAP_ROOT_PATH, file_name.encode('utf-8').decode('utf-8')
).replace('/', os.path.sep),
'rb', None).read()).hexdigest() == file_hash:
retVal &= False
break
return retVal
def updateSums():
# Read existing entries to maintain file order
entries = ""
for entry in getFileItems(paths.DIGEST_FILE, raiseOnError=False):
try:
(file_hash, file_name) = entry.split()
except ValueError:
break
if len(file_hash) == 64:
entries += "%s %s\n" % (
hashlib.sha256(
openFile(
os.path.join(
paths.SQLMAP_ROOT_PATH, file_name.encode('utf-8').decode('utf-8')
).replace('/', os.path.sep), 'rb', None).read()
).hexdigest(),
file_name.encode('utf-8').decode('utf-8'),
)
with open(paths.DIGEST_FILE, "w") as f:
f.write(entries)
else:
pass

View File

@ -857,6 +857,9 @@ def cmdLineParser(argv=None):
parser.add_argument("--smoke-test", dest="smokeTest", action="store_true",
help=SUPPRESS)
parser.add_argument("--update-sums", dest="updateSums", action="store_true",
help=SUPPRESS)
parser.add_argument("--vuln-test", dest="vulnTest", action="store_true",
help=SUPPRESS)

View File

@ -179,6 +179,9 @@ def main():
if not conf.updateAll:
# Postponed imports (faster start)
if conf.smokeTest:
if conf.updateSums:
from lib.core.common import updateSums
updateSums()
from lib.core.testing import smokeTest
os._exitcode = 1 - (smokeTest() or 0)
elif conf.vulnTest: