1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.io.hfile.bucket;
20
21 import static org.junit.Assert.assertTrue;
22
23 import java.nio.ByteBuffer;
24
25 import org.apache.hadoop.hbase.SmallTests;
26 import org.junit.Test;
27 import org.junit.experimental.categories.Category;
28
29
30
31
32 @Category(SmallTests.class)
33 public class TestByteBufferIOEngine {
34
35 @Test
36 public void testByteBufferIOEngine() throws Exception {
37 int capacity = 32 * 1024 * 1024;
38 int testNum = 100;
39 int maxBlockSize = 64 * 1024;
40 ByteBufferIOEngine ioEngine = new ByteBufferIOEngine(capacity, false);
41 int testOffsetAtStartNum = testNum / 10;
42 int testOffsetAtEndNum = testNum / 10;
43 for (int i = 0; i < testNum; i++) {
44 byte val = (byte) (Math.random() * 255);
45 int blockSize = (int) (Math.random() * maxBlockSize);
46 if (blockSize == 0) {
47 blockSize = 1;
48 }
49 byte[] byteArray = new byte[blockSize];
50 for (int j = 0; j < byteArray.length; ++j) {
51 byteArray[j] = val;
52 }
53 ByteBuffer srcBuffer = ByteBuffer.wrap(byteArray);
54 int offset = 0;
55 if (testOffsetAtStartNum > 0) {
56 testOffsetAtStartNum--;
57 offset = 0;
58 } else if (testOffsetAtEndNum > 0) {
59 testOffsetAtEndNum--;
60 offset = capacity - blockSize;
61 } else {
62 offset = (int) (Math.random() * (capacity - maxBlockSize));
63 }
64 ioEngine.write(srcBuffer, offset);
65 ByteBuffer dstBuffer = ByteBuffer.allocate(blockSize);
66 ioEngine.read(dstBuffer, offset);
67 byte[] byteArray2 = dstBuffer.array();
68 for (int j = 0; j < byteArray.length; ++j) {
69 assertTrue(byteArray[j] == byteArray2[j]);
70 }
71 }
72 assert testOffsetAtStartNum == 0;
73 assert testOffsetAtEndNum == 0;
74 }
75 }