From 9d78293437104fe7c6a839567c4b83fd8ec58e39 Mon Sep 17 00:00:00 2001 From: Jeremy Long Date: Mon, 1 Sep 2014 07:30:04 -0400 Subject: [PATCH] fix for issue #128 - the application will no longer throw an exception on large files when generating the hash digest Former-commit-id: aae811e5d10ca0ee5ac7316fa992b5c45e43d2be --- .../owasp/dependencycheck/utils/Checksum.java | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/dependency-check-utils/src/main/java/org/owasp/dependencycheck/utils/Checksum.java b/dependency-check-utils/src/main/java/org/owasp/dependencycheck/utils/Checksum.java index bc497b8d3..0224943a2 100644 --- a/dependency-check-utils/src/main/java/org/owasp/dependencycheck/utils/Checksum.java +++ b/dependency-check-utils/src/main/java/org/owasp/dependencycheck/utils/Checksum.java @@ -63,8 +63,22 @@ public final class Checksum { try { fis = new FileInputStream(file); FileChannel ch = fis.getChannel(); - MappedByteBuffer byteBuffer = ch.map(FileChannel.MapMode.READ_ONLY, 0, file.length()); - digest.update(byteBuffer); + long remainingToRead = file.length(); + long start = 0; + while (remainingToRead > 0) { + long amountToRead; + if (remainingToRead > Integer.MAX_VALUE) { + remainingToRead -= Integer.MAX_VALUE; + amountToRead = Integer.MAX_VALUE; + } else { + amountToRead = remainingToRead; + remainingToRead = 0; + } + MappedByteBuffer byteBuffer = ch.map(FileChannel.MapMode.READ_ONLY, start, amountToRead); + digest.update(byteBuffer); + start += amountToRead; + } + // BufferedInputStream bis = new BufferedInputStream(fis); // DigestInputStream dis = new DigestInputStream(bis, digest); // //yes, we are reading in a buffer for performance reasons - 1 byte at a time is SLOW