Skip to content

Commit 138553b

Browse files
committed
Handled ArrayIndexOutOfBoundsException in TestCoderBase
1 parent 03d600f commit 138553b

File tree

3 files changed

+32
-7
lines changed

3 files changed

+32
-7
lines changed

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/DecodingState.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ <T> void checkParameters(T[] inputs, int[] erasedIndexes,
3939
T[] outputs) {
4040
if (inputs.length != decoder.getNumParityUnits() +
4141
decoder.getNumDataUnits()) {
42-
throw new IllegalArgumentException("Invalid inputs length");
42+
throw new HadoopIllegalArgumentException("Invalid inputs length");
4343
}
4444

4545
if (erasedIndexes.length != outputs.length) {

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java

Lines changed: 17 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
*/
1818
package org.apache.hadoop.io.erasurecode;
1919

20+
import org.apache.hadoop.HadoopIllegalArgumentException;
2021
import org.apache.hadoop.conf.Configuration;
2122
import org.apache.hadoop.io.erasurecode.BufferAllocator.SimpleBufferAllocator;
2223
import org.apache.hadoop.io.erasurecode.BufferAllocator.SlicedBufferAllocator;
@@ -224,13 +225,25 @@ protected ECChunk[] backupAndEraseChunks(ECChunk[] dataChunks,
224225
int idx = 0;
225226

226227
for (int i = 0; i < erasedDataIndexes.length; i++) {
227-
toEraseChunks[idx ++] = dataChunks[erasedDataIndexes[i]];
228-
dataChunks[erasedDataIndexes[i]] = null;
228+
if (erasedDataIndexes[i] < dataChunks.length) {
229+
toEraseChunks[idx ++] = dataChunks[erasedDataIndexes[i]];
230+
dataChunks[erasedDataIndexes[i]] = null;
231+
} else {
232+
throw new HadoopIllegalArgumentException(
233+
"The erased index is out of bound: erasedDataIndex="
234+
+ erasedDataIndexes[i]);
235+
}
229236
}
230237

231238
for (int i = 0; i < erasedParityIndexes.length; i++) {
232-
toEraseChunks[idx ++] = parityChunks[erasedParityIndexes[i]];
233-
parityChunks[erasedParityIndexes[i]] = null;
239+
if (erasedParityIndexes[i] < parityChunks.length) {
240+
toEraseChunks[idx ++] = parityChunks[erasedParityIndexes[i]];
241+
parityChunks[erasedParityIndexes[i]] = null;
242+
} else {
243+
throw new HadoopIllegalArgumentException(
244+
"The erased index is out of bound: erasedParityIndex="
245+
+ erasedParityIndexes[i]);
246+
}
234247
}
235248

236249
return toEraseChunks;

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDecodingValidator.java

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
*/
1818
package org.apache.hadoop.io.erasurecode.rawcoder;
1919

20+
import org.apache.hadoop.HadoopIllegalArgumentException;
2021
import org.apache.hadoop.io.erasurecode.ECChunk;
2122
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
2223
import org.apache.hadoop.test.GenericTestUtils;
@@ -50,6 +51,7 @@ public static Collection<Object[]> data() {
5051
{RSRawErasureCoderFactory.class, 6, 3, new int[]{1}, new int[]{}},
5152
{RSRawErasureCoderFactory.class, 6, 3, new int[]{3}, new int[]{0}},
5253
{RSRawErasureCoderFactory.class, 6, 3, new int[]{2, 4}, new int[]{1}},
54+
{RSRawErasureCoderFactory.class, 6, 1, new int[]{0}, new int[]{1}},
5355
{NativeRSRawErasureCoderFactory.class, 6, 3, new int[]{0}, new int[]{}},
5456
{XORRawErasureCoderFactory.class, 10, 1, new int[]{0}, new int[]{}},
5557
{NativeXORRawErasureCoderFactory.class, 10, 1, new int[]{0},
@@ -123,7 +125,12 @@ protected void performTestValidate(int chunkSize) {
123125
}
124126

125127
// decode
126-
backupAndEraseChunks(clonedDataChunks, parityChunks);
128+
try {
129+
backupAndEraseChunks(clonedDataChunks, parityChunks);
130+
} catch (HadoopIllegalArgumentException e) {
131+
String expected = "The erased index is out of bound";
132+
Assume.assumeTrue(expected, !e.toString().contains(expected));
133+
}
127134
ECChunk[] inputChunks =
128135
prepareInputChunksForDecoding(clonedDataChunks, parityChunks);
129136
markChunks(inputChunks);
@@ -210,7 +217,12 @@ public void testValidateWithBadDecoding() throws IOException {
210217
}
211218

212219
// decode
213-
backupAndEraseChunks(clonedDataChunks, parityChunks);
220+
try {
221+
backupAndEraseChunks(clonedDataChunks, parityChunks);
222+
} catch (HadoopIllegalArgumentException e) {
223+
String expected = "The erased index is out of bound";
224+
Assume.assumeTrue(expected, !e.toString().contains(expected));
225+
}
214226
ECChunk[] inputChunks =
215227
prepareInputChunksForDecoding(clonedDataChunks, parityChunks);
216228
markChunks(inputChunks);

0 commit comments

Comments
 (0)