You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@commons.apache.org by ah...@apache.org on 2019/12/30 20:52:26 UTC
[commons-codec] branch master updated: Test incremental hash with
huge length array added to unprocessed bytes.
This is an automated email from the ASF dual-hosted git repository.
aherbert pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/commons-codec.git
The following commit(s) were added to refs/heads/master by this push:
new a387ac8 Test incremental hash with huge length array added to unprocessed bytes.
a387ac8 is described below
commit a387ac85030cf161b41472212dafc41e85fc393f
Author: Alex Herbert <ah...@apache.org>
AuthorDate: Mon Dec 30 20:52:22 2019 +0000
Test incremental hash with huge length array added to unprocessed bytes.
---
.../apache/commons/codec/digest/MurmurHash3.java | 1 +
.../commons/codec/digest/MurmurHash3Test.java | 34 ++++++++++++++++++++++
2 files changed, 35 insertions(+)
diff --git a/src/main/java/org/apache/commons/codec/digest/MurmurHash3.java b/src/main/java/org/apache/commons/codec/digest/MurmurHash3.java
index 4509a8f..eeab953 100644
--- a/src/main/java/org/apache/commons/codec/digest/MurmurHash3.java
+++ b/src/main/java/org/apache/commons/codec/digest/MurmurHash3.java
@@ -1068,6 +1068,7 @@ public final class MurmurHash3 {
// Check if the unprocessed bytes and new bytes can fill a block of 4.
// Make this overflow safe in the event that length is Integer.MAX_VALUE.
+ // Equivalent to: (unprocessedLength + length < BLOCK_SIZE)
if (unprocessedLength + length - BLOCK_SIZE < 0) {
// Not enough so add to the unprocessed bytes
System.arraycopy(data, offset, unprocessed, unprocessedLength, length);
diff --git a/src/test/java/org/apache/commons/codec/digest/MurmurHash3Test.java b/src/test/java/org/apache/commons/codec/digest/MurmurHash3Test.java
index 61df7f2..1d6ccf3 100644
--- a/src/test/java/org/apache/commons/codec/digest/MurmurHash3Test.java
+++ b/src/test/java/org/apache/commons/codec/digest/MurmurHash3Test.java
@@ -18,6 +18,7 @@
package org.apache.commons.codec.digest;
import org.junit.Assert;
+import org.junit.Assume;
import java.nio.ByteBuffer;
import java.util.Arrays;
@@ -890,4 +891,37 @@ public class MurmurHash3Test {
}
return Arrays.copyOf(blocks, count);
}
+
+ /**
+ * This test hits an edge case where a very large number of bytes is added to the incremental
+ * hash. The data is constructed so that an integer counter of unprocessed bytes will
+ * overflow. If this is not handled correctly then the code throws an exception when it
+ * copies more data into the unprocessed bytes array.
+ */
+ @Test
+ public void testIncrementalHashWithUnprocessedBytesAndHugeLengthArray() {
+ // Assert the test precondition that a large array added to unprocessed bytes
+ // will overflow an integer counter. We use the smallest hugeLength possible
+ // as some VMs cannot allocate maximum length arrays.
+ final int unprocessedSize = 3;
+ final int hugeLength = Integer.MAX_VALUE - 2;
+ Assert.assertTrue("This should overflow to negative", unprocessedSize + hugeLength < 4);
+
+ // Check the test can be run
+ byte[] bytes = null;
+ try {
+ bytes = new byte[hugeLength];
+ } catch (OutOfMemoryError ignore) {
+ // Some VMs cannot allocate an array this large.
+ // Some test environments may not have enough available memory for this.
+ }
+ Assume.assumeTrue("Cannot allocate array of length " + hugeLength, bytes != null);
+
+ final IncrementalHash32x86 inc = new IncrementalHash32x86();
+ inc.start(0);
+ // Add bytes that should be unprocessed
+ inc.add(bytes, 0, unprocessedSize);
+ // Add a huge number of bytes to overflow an integer counter of unprocessed bytes.
+ inc.add(bytes, 0, hugeLength);
+ }
}