Skip to content

Commit

Permalink
fix: set usedforsecurity=False for md5 operations
Browse files Browse the repository at this point in the history
To work on machines with FIPS enfored, md5 can't be used for security,
and Python enforces this by default, but can be used with an extra
function argument.

Re-use md5_hexdigest as it wasn't be used so we can determine if this
function argument is available, as its only available on Python 3.9
and newer.

Bug: #4479
  • Loading branch information
jasonish committed Jul 8, 2024
1 parent a81f805 commit 933521b
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 11 deletions.
11 changes: 5 additions & 6 deletions suricata/update/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,8 +114,7 @@ def check_checksum(self, tmp_filename, url, checksum_url=None):
if not isinstance(checksum_url, str):
checksum_url = url[0] + ".md5"
net_arg=(checksum_url,url[1])
local_checksum = hashlib.md5(
open(tmp_filename, "rb").read()).hexdigest().strip()
local_checksum = util.md5_hexdigest(open(tmp_filename, "rb").read())
remote_checksum_buf = io.BytesIO()
logger.info("Checking %s." % (checksum_url))
net.get(net_arg, remote_checksum_buf)
Expand Down Expand Up @@ -154,7 +153,7 @@ def url_basename(self, url):
return filename

def get_tmp_filename(self, url):
url_hash = hashlib.md5(url.encode("utf-8")).hexdigest()
url_hash = util.md5_hexdigest(url.encode("utf-8"))
return os.path.join(
config.get_cache_dir(),
"%s-%s" % (url_hash, self.url_basename(url)))
Expand Down Expand Up @@ -470,7 +469,7 @@ def handle_dataset_files(rule, dep_files):
return
dataset_contents = dep_files[source_filename]

source_filename_hash = hashlib.md5(source_filename.encode()).hexdigest()
source_filename_hash = util.md5_hexdigest(source_filename.encode())
new_rule = re.sub(r"(dataset.*?load\s+){}".format(dataset_filename), r"\g<1>datasets/{}".format(source_filename_hash), rule.format())
dest_filename = os.path.join(config.get_output_dir(), "datasets", source_filename_hash)
dest_dir = os.path.dirname(dest_filename)
Expand Down Expand Up @@ -783,7 +782,7 @@ def md5(self, filename):
if not os.path.exists(filename):
return ""
else:
return hashlib.md5(open(filename, "rb").read()).hexdigest()
return util.md5_hexdigest(open(filename, "rb").read())

def any_modified(self):
for filename in self.hashes:
Expand Down Expand Up @@ -1000,7 +999,7 @@ def load_sources(suricata_version):
for url in urls:

# To de-duplicate filenames, add a prefix that is a hash of the URL.
prefix = hashlib.md5(url[0].encode()).hexdigest()
prefix = util.md5_hexdigest(url[0].encode())
source_files = Fetch().run(url)
for key in source_files:
content = source_files[key]
Expand Down
11 changes: 7 additions & 4 deletions suricata/update/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,18 @@
import atexit
import shutil
import zipfile
import sys

def md5_hexdigest(filename):
""" Compute the MD5 checksum for the contents of the provided filename.

:param filename: Filename to computer MD5 checksum of.
def md5_hexdigest(buf):
""" Compute the MD5 checksum for the provided buffer.
:returns: A string representing the hex value of the computed MD5.
"""
return hashlib.md5(open(filename).read().encode()).hexdigest()
if sys.version_info.major < 3 or (sys.version_info.major == 3 and sys.version_info.minor < 9):
return hashlib.md5(buf).hexdigest().strip()
else:
return hashlib.md5(buf, usedforsecurity=False).hexdigest().strip()

def mktempdir(delete_on_exit=True):
""" Create a temporary directory that is removed on exit. """
Expand Down
2 changes: 1 addition & 1 deletion tests/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@ def test_hexdigest(self):
test_file.flush()
self.assertEqual(
"120ea8a25e5d487bf68b5f7096440019",
util.md5_hexdigest(test_file.name))
util.md5_hexdigest(open(test_file.name).read().encode()))

0 comments on commit 933521b

Please sign in to comment.