Skip to content

Commit

Permalink
Adds hashing and build changes for FIPs support. Fixes anchore#882
Browse files Browse the repository at this point in the history
* Adds usedforsecurity=false for all hashlib invocations for fips support
* Switch from hashlib.md5 to hashlib.new("md5",...) for MacOS and linux support of usedforsecurity=False option
* Switch to use psycopg2 instead of psycopg2-binary for FIPs compatibility

MacOS users will need to install postgres and openssl (brew install postgres openssl) and
setup LIBRARY_PATH and DYLD_LIBRARY_PATH to point to the openssl lib dir
in order for the install of psycopg2 to work from source instead of
using psycopg2-binary.

Signed-off-by: Zach Hill <[email protected]>
  • Loading branch information
zhill committed Apr 23, 2021
1 parent ccc22ed commit c92b555
Show file tree
Hide file tree
Showing 13 changed files with 63 additions and 38 deletions.
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ RUN set -ex && \
RUN set -ex && \
echo "installing OS dependencies" && \
yum update -y && \
yum install -y gcc make python38 git python38-wheel python38-devel go
yum install -y gcc make python38 git python38-wheel python38-devel go libpq libpq-devel

# create anchore binaries
RUN set -ex && \
Expand Down Expand Up @@ -141,7 +141,7 @@ EXPOSE ${ANCHORE_SERVICE_PORT}

RUN set -ex && \
yum update -y && \
yum install -y python38 python38-wheel procps psmisc
yum install -y python38 python38-wheel procps psmisc libpq

# Setup container default configs and directories

Expand Down
19 changes: 6 additions & 13 deletions anchore_engine/analyzers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,9 @@ def init_analyzer_cmdline(argv, name):
ret["name"] = name

with open(argv[0], "r") as FH:
ret["selfcsum"] = hashlib.md5(FH.read().encode("utf-8")).hexdigest()
ret["selfcsum"] = hashlib.new(
"md5", FH.read().encode("utf-8"), usedforsecurity=False
).hexdigest()

ret["imgid"] = argv[2]

Expand Down Expand Up @@ -479,11 +481,6 @@ def _get_extractable_member(
def _checksum_member_function(tfl, member, csums=["sha256", "md5"], memberhash={}):
ret = {}

funcmap = {
"sha256": hashlib.sha256,
"sha1": hashlib.sha1,
"md5": hashlib.md5,
}
if member.isreg():
extractable_member = member
elif member.islnk():
Expand All @@ -496,7 +493,9 @@ def _checksum_member_function(tfl, member, csums=["sha256", "md5"], memberhash={
for ctype in csums:
if extractable_member:
with tfl.extractfile(extractable_member) as mfd:
ret[ctype] = funcmap[ctype](mfd.read()).hexdigest()
ret[ctype] = hashlib.new(
ctype, mfd.read(), usedforsecurity=False
).hexdigest()
else:
ret[ctype] = "DIRECTORY_OR_OTHER"

Expand All @@ -506,12 +505,6 @@ def _checksum_member_function(tfl, member, csums=["sha256", "md5"], memberhash={
def get_checksums_from_squashtar(squashtar, csums=["sha256", "md5"]):
allfiles = {}

funcmap = {
"sha256": hashlib.sha256,
"sha1": hashlib.sha1,
"md5": hashlib.md5,
}

try:
results = anchore_engine.analyzers.utils.run_tarfile_member_function(
squashtar, func=_checksum_member_function, csums=csums
Expand Down
18 changes: 12 additions & 6 deletions anchore_engine/clients/services/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,10 +306,12 @@ def delete_subscription(
self, subscription_key=None, subscription_type=None, subscription_id=None
):
if subscription_key and subscription_type:
subscription_id = hashlib.md5(
subscription_id = hashlib.new(
"md5",
"+".join(
[self.request_namespace, subscription_key, subscription_type]
).encode("utf8")
).encode("utf8"),
usedforsecurity=False,
).hexdigest()

return self.call_api(
Expand All @@ -326,22 +328,26 @@ def update_subscription(
if subscription_id:
pass
elif subscription_key and subscription_type:
subscription_id = hashlib.md5(
subscription_id = hashlib.new(
"md5",
"+".join(
[self.request_namespace, subscription_key, subscription_type]
).encode("utf8")
).encode("utf8"),
usedforsecurity=False,
).hexdigest()
elif subscriptiondata.get("subscription_key", None) and subscriptiondata.get(
"subscription_type", None
):
subscription_id = hashlib.md5(
subscription_id = hashlib.new(
"md5",
"+".join(
[
self.request_namespace,
subscriptiondata.get("subscription_key"),
subscriptiondata.get("subscription_type"),
]
).encode("utf8")
).encode("utf8"),
usedforsecurity=False,
).hexdigest()
else:
raise Exception(
Expand Down
4 changes: 3 additions & 1 deletion anchore_engine/db/db_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,9 @@ def create(

def generate_dataId(inobj):
datajson = json.dumps(inobj)
dataId = hashlib.md5(datajson.encode("utf-8")).hexdigest()
dataId = hashlib.new(
"md5", datajson.encode("utf-8"), usedforsecurity=False
).hexdigest()
return dataId, datajson


Expand Down
6 changes: 4 additions & 2 deletions anchore_engine/db/db_subscriptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,10 @@


def _compute_subscription_id(userId, subscription_key, subscription_type):
return hashlib.md5(
"+".join([userId, subscription_key, subscription_type]).encode("utf-8")
return hashlib.new(
"md5",
"+".join([userId, subscription_key, subscription_type]).encode("utf-8"),
usedforsecurity=False,
).hexdigest()


Expand Down
6 changes: 4 additions & 2 deletions anchore_engine/services/apiext/api/controllers/policies.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,8 +164,10 @@ def add_policy(bundle):
if "id" in jsondata and jsondata["id"]:
policyId = jsondata["id"]
else:
policyId = hashlib.md5(
str(userId + ":" + jsondata["name"]).encode("utf8")
policyId = hashlib.new(
"md5",
str(userId + ":" + jsondata["name"]).encode("utf8"),
usedforsecurity=False,
).hexdigest()
jsondata["id"] = policyId

Expand Down
8 changes: 5 additions & 3 deletions anchore_engine/services/catalog/catalog_impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -1720,10 +1720,12 @@ def perform_policy_evaluation(
curr_evaluation_result["status"] = "fail"

# set up the newest evaluation
evalId = hashlib.md5(
evalId = hashlib.new(
"md5",
":".join(
[policyId, userId, imageDigest, fulltag, str(curr_final_action)]
).encode("utf8")
[policyId, userId, imageDigest, fulltag, str(curr_final_action)],
).encode("utf8"),
usedforsecurity=False,
).hexdigest()
curr_evaluation_record = anchore_engine.common.helpers.make_eval_record(
userId,
Expand Down
6 changes: 4 additions & 2 deletions anchore_engine/services/policy_engine/engine/policy/gate.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,8 @@ def __init__(self, trigger, match_instance_id=None, msg=None):
# Compute a hash-based trigger_id for matching purposes (this is legacy from Anchore CLI)
if not self.id:
gate_id = self.trigger.gate_cls.__gate_name__
self.id = hashlib.md5(
self.id = hashlib.new(
"md5",
ensure_bytes(
"".join(
[
Expand All @@ -112,7 +113,8 @@ def __init__(self, trigger, match_instance_id=None, msg=None):
self.msg if self.msg else "",
]
)
)
),
usedforsecurity=False,
).hexdigest()

def json(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,13 @@ def _trigger_id(scanner, file, signature):
:return:
"""
return "{}+{}+{}".format(
scanner, signature, ensure_str(hashlib.md5(ensure_bytes(file)).hexdigest())
scanner,
signature,
ensure_str(
hashlib.new(
"md5", ensure_bytes(file), usedforsecurity=False
).hexdigest()
),
)

def evaluate(self, image_obj, context):
Expand Down
8 changes: 6 additions & 2 deletions anchore_engine/subsys/object_store/drivers/filesystem.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,11 +179,15 @@ def delete(self, userId, bucket, key):
raise err

def _get_archive_filepath(self, userId, bucket, key):
filehash = hashlib.md5(key.encode("utf8")).hexdigest()
filehash = hashlib.new(
"md5", key.encode("utf8"), usedforsecurity=False
).hexdigest()
fkey = filehash[0:2]
archive_path = os.path.join(
self.data_volume,
hashlib.md5(userId.encode("utf8")).hexdigest(),
hashlib.new(
"md5", userId.encode("utf8"), usedforsecurity=False
).hexdigest(),
bucket,
fkey,
)
Expand Down
8 changes: 6 additions & 2 deletions anchore_engine/subsys/object_store/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,9 @@ def put(self, userId, bucket, archiveid, data):
final_payload, is_compressed = self._do_compress(data)

size = len(final_payload)
digest = hashlib.md5(final_payload).hexdigest()
digest = hashlib.new(
"md5", final_payload, usedforsecurity=False
).hexdigest()

url = self.primary_client.put(userId, bucket, archiveid, final_payload)
with session_scope() as dbsession:
Expand Down Expand Up @@ -233,7 +235,9 @@ def get(self, userId, bucket, archiveid):
found_size = len(content)

if expected:
found = hashlib.md5(content).hexdigest()
found = hashlib.new(
"md5", content, usedforsecurity=False
).hexdigest()
else:
found = None

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ prettytable==0.7.2
prometheus-client==0.7.1
prometheus-flask-exporter==0.12.1
psutil==5.6.7
psycopg2-binary==2.8.4
psycopg2==2.8.4
python-dateutil==2.8.1
python-swiftclient==3.8.1
pytz==2019.3
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,9 @@ def test_malware_gate_single_finding(
assert scan_trigger.did_fire
assert len(scan_trigger.fired) == 1
assert scan_trigger.fired[0].id == "clamav+" + finding.get("signature") + "+" + str(
hashlib.md5(bytes(finding.get("path"), "utf-8")).hexdigest()
hashlib.new(
"md5", bytes(finding.get("path"), "utf-8"), usedforsecurity=False
).hexdigest()
)


Expand Down

0 comments on commit c92b555

Please sign in to comment.