Skip to content

Commit

Permalink
sepinf-inc#1849: Hash each digest algo in separated threads
Browse files Browse the repository at this point in the history
  • Loading branch information
aberenguel committed Aug 30, 2023
1 parent e3fb3ec commit c51d21a
Showing 1 changed file with 56 additions and 15 deletions.
71 changes: 56 additions & 15 deletions iped-engine/src/main/java/iped/engine/task/HashTask.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicReference;

import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.slf4j.Logger;
Expand All @@ -36,7 +40,6 @@
import iped.engine.config.ConfigurationManager;
import iped.engine.config.HashTaskConfig;
import iped.parsers.whatsapp.WhatsAppParser;
import iped.utils.IOUtil;

/**
* Classe para calcular e manipular hashes.
Expand All @@ -45,6 +48,8 @@ public class HashTask extends AbstractTask {

private static Logger LOGGER = LoggerFactory.getLogger(HashTask.class);

private static final int HASH_BUFFER_LEN = 1024 * 1024;

public enum HASH {
MD5("md5"), //$NON-NLS-1$
SHA1("sha-1"), //$NON-NLS-1$
Expand All @@ -65,6 +70,8 @@ public String toString() {
}

private HashMap<String, MessageDigest> digestMap = new LinkedHashMap<String, MessageDigest>();

private ExecutorService executorService;

private HashTaskConfig hashConfig;

Expand Down Expand Up @@ -95,12 +102,13 @@ public void init(ConfigurationManager configurationManager) throws Exception {
}
}

executorService = Executors.newFixedThreadPool(digestMap.size());

}

@Override
public void finish() throws Exception {
// TODO Auto-generated method stub

executorService.shutdown();
}

public void process(IItem evidence) {
Expand All @@ -119,19 +127,54 @@ public void process(IItem evidence) {
return;
}

InputStream in = null;
try {
in = evidence.getBufferedInputStream();
byte[] buf = new byte[1024 * 1024];
try (InputStream in = evidence.getBufferedInputStream()) {

byte[] readBuf = new byte[HASH_BUFFER_LEN];
byte[] hashBuf = new byte[HASH_BUFFER_LEN];
byte[] tempBuf = null;
int len;
while ((len = in.read(buf)) >= 0 && !Thread.currentThread().isInterrupted()) {

AtomicReference<CountDownLatch> countDown = new AtomicReference<>(null);
AtomicReference<Exception> ex = new AtomicReference<Exception>(null);

while ((len = in.read(readBuf)) >= 0 && !Thread.currentThread().isInterrupted()) {

if (countDown.get() != null) {
countDown.get().await();
}

countDown.set(new CountDownLatch(digestMap.size()));

// swap hashBuf <-> readBuf
tempBuf = hashBuf;
hashBuf = readBuf;
readBuf = tempBuf;

final int currLen = len;
final byte[] currHashBuf = hashBuf;
for (String algo : digestMap.keySet()) {
if (!algo.equals(HASH.EDONKEY.toString())) {
digestMap.get(algo).update(buf, 0, len);
} else {
updateEd2k(buf, len);
}
executorService.execute(() -> {
try {
if (!algo.equals(HASH.EDONKEY.toString())) {
digestMap.get(algo).update(currHashBuf, 0, currLen);
} else {
updateEd2k(currHashBuf, currLen);
}
} catch (Exception e) {
ex.set(e);
} finally {
countDown.get().countDown();
}
});
}

if (ex.get() != null) {
throw ex.get();
}
}

if (countDown.get() != null) {
countDown.get().await();
}

boolean defaultHash = true;
Expand Down Expand Up @@ -161,8 +204,6 @@ public void process(IItem evidence) {
e.toString());
// e.printStackTrace();

} finally {
IOUtil.closeQuietly(in);
}

}
Expand Down

0 comments on commit c51d21a

Please sign in to comment.