mirror of
https://github.com/ysoftdevs/DependencyCheck.git
synced 2026-01-15 08:13:43 +01:00
fix for issue #128 - the application will no longer throw an exception on large files when generating the hash digest
Former-commit-id: aae811e5d10ca0ee5ac7316fa992b5c45e43d2be
This commit is contained in:
@@ -63,8 +63,22 @@ public final class Checksum {
|
||||
try {
|
||||
fis = new FileInputStream(file);
|
||||
FileChannel ch = fis.getChannel();
|
||||
MappedByteBuffer byteBuffer = ch.map(FileChannel.MapMode.READ_ONLY, 0, file.length());
|
||||
digest.update(byteBuffer);
|
||||
long remainingToRead = file.length();
|
||||
long start = 0;
|
||||
while (remainingToRead > 0) {
|
||||
long amountToRead;
|
||||
if (remainingToRead > Integer.MAX_VALUE) {
|
||||
remainingToRead -= Integer.MAX_VALUE;
|
||||
amountToRead = Integer.MAX_VALUE;
|
||||
} else {
|
||||
amountToRead = remainingToRead;
|
||||
remainingToRead = 0;
|
||||
}
|
||||
MappedByteBuffer byteBuffer = ch.map(FileChannel.MapMode.READ_ONLY, start, amountToRead);
|
||||
digest.update(byteBuffer);
|
||||
start += amountToRead;
|
||||
}
|
||||
|
||||
// BufferedInputStream bis = new BufferedInputStream(fis);
|
||||
// DigestInputStream dis = new DigestInputStream(bis, digest);
|
||||
// //yes, we are reading in a buffer for performance reasons - 1 byte at a time is SLOW
|
||||
|
||||
Reference in New Issue
Block a user