1 /*
2 * Licensed to the Apache Software Foundation (ASF) under one or more
3 * contributor license agreements. See the NOTICE file distributed with
4 * this work for additional information regarding copyright ownership.
5 * The ASF licenses this file to You under the Apache License, Version 2.0
6 * (the "License"); you may not use this file except in compliance with
7 * the License. You may obtain a copy of the License at
8 *
9 * https://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17 package org.apache.commons.codec.digest;
18
19 import static org.junit.jupiter.api.Assertions.assertTrue;
20 import static org.junit.jupiter.api.Assumptions.assumeTrue;
21
22 import org.junit.jupiter.api.Test;
23
24 class XXHash32OverflowTest {
25
26 /**
27 * This test hits an edge case where a very large number of bytes is added to the incremental
28 * hash. The data is constructed so that an integer counter of unprocessed bytes will
29 * overflow. If this is not handled correctly then the code throws an exception when it
30 * copies more data into the unprocessed bytes array.
31 */
32 @Test
33 void testIncrementalHashWithUnprocessedBytesAndHugeLengthArray() {
34 // Assert the test precondition that a large array added to unprocessed bytes
35 // will overflow an integer counter. We use the smallest hugeLength possible
36 // as some VMs cannot allocate maximum length arrays.
37 final int bufferSize = 16;
38 final int unprocessedSize = bufferSize - 1;
39 final int hugeLength = Integer.MAX_VALUE - (unprocessedSize - 1);
40 assertTrue(unprocessedSize + hugeLength < bufferSize, "This should overflow to negative");
41
42 // Check the test can be run
43 byte[] bytes = null;
44 try {
45 bytes = new byte[hugeLength];
46 } catch (final OutOfMemoryError ignore) {
47 // Some VMs cannot allocate an array this large.
48 // Some test environments may not have enough available memory for this.
49 }
50 assumeTrue(bytes != null, "Cannot allocate array of length " + hugeLength);
51
52 final XXHash32 inc = new XXHash32();
53 // Add bytes that should be unprocessed
54 inc.update(bytes, 0, unprocessedSize);
55 // Add a huge number of bytes to overflow an integer counter of unprocessed bytes.
56 inc.update(bytes, 0, hugeLength);
57 }
58 }