1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.io.hfile.bucket;
20
21 import static org.junit.Assert.assertTrue;
22
23 import java.io.File;
24 import java.io.IOException;
25 import java.nio.ByteBuffer;
26
27 import org.apache.hadoop.hbase.SmallTests;
28 import org.apache.hadoop.hbase.io.hfile.bucket.FileIOEngine;
29 import org.junit.Test;
30 import org.junit.experimental.categories.Category;
31
32
33
34
35 @Category(SmallTests.class)
36 public class TestFileIOEngine {
37 @Test
38 public void testFileIOEngine() throws IOException {
39 int size = 2 * 1024 * 1024;
40 String filePath = "testFileIOEngine";
41 try {
42 FileIOEngine fileIOEngine = new FileIOEngine(filePath, size);
43 for (int i = 0; i < 50; i++) {
44 int len = (int) Math.floor(Math.random() * 100);
45 long offset = (long) Math.floor(Math.random() * size % (size - len));
46 byte[] data1 = new byte[len];
47 for (int j = 0; j < data1.length; ++j) {
48 data1[j] = (byte) (Math.random() * 255);
49 }
50 byte[] data2 = new byte[len];
51 fileIOEngine.write(ByteBuffer.wrap(data1), offset);
52 fileIOEngine.read(ByteBuffer.wrap(data2), offset);
53 for (int j = 0; j < data1.length; ++j) {
54 assertTrue(data1[j] == data2[j]);
55 }
56 }
57 } finally {
58 File file = new File(filePath);
59 if (file.exists()) {
60 file.delete();
61 }
62 }
63
64 }
65 }