1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17 package org.apache.hadoop.hbase.io.encoding;
18
19 import static org.junit.Assert.assertEquals;
20 import static org.junit.Assert.assertTrue;
21
22 import java.io.IOException;
23 import java.util.ArrayList;
24 import java.util.Collection;
25 import java.util.List;
26 import java.util.Map;
27
28 import org.apache.hadoop.hbase.HBaseTestingUtility;
29 import org.apache.hadoop.hbase.HColumnDescriptor;
30 import org.apache.hadoop.hbase.HConstants;
31 import org.apache.hadoop.hbase.KeyValue;
32 import org.apache.hadoop.hbase.MediumTests;
33 import org.apache.hadoop.hbase.Tag;
34 import org.apache.hadoop.hbase.client.Get;
35 import org.apache.hadoop.hbase.client.Put;
36 import org.apache.hadoop.hbase.client.Result;
37 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
38 import org.apache.hadoop.hbase.io.hfile.HFile;
39 import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
40 import org.apache.hadoop.hbase.regionserver.BloomType;
41 import org.apache.hadoop.hbase.regionserver.HRegion;
42 import org.apache.hadoop.hbase.util.Bytes;
43 import org.apache.hadoop.hbase.util.Strings;
44 import org.apache.hadoop.hbase.util.test.LoadTestKVGenerator;
45 import org.junit.Test;
46 import org.junit.experimental.categories.Category;
47 import org.junit.runner.RunWith;
48 import org.junit.runners.Parameterized;
49 import org.junit.runners.Parameterized.Parameters;
50
51
52
53
54 @Category(MediumTests.class)
55 @RunWith(Parameterized.class)
56 public class TestEncodedSeekers {
57
58 private static final String TABLE_NAME = "encodedSeekersTable";
59 private static final String CF_NAME = "encodedSeekersCF";
60 private static final byte[] CF_BYTES = Bytes.toBytes(CF_NAME);
61 private static final int MAX_VERSIONS = 5;
62
63 private static final int BLOCK_SIZE = 64 * 1024;
64 private static final int MIN_VALUE_SIZE = 30;
65 private static final int MAX_VALUE_SIZE = 60;
66 private static final int NUM_ROWS = 1003;
67 private static final int NUM_COLS_PER_ROW = 20;
68 private static final int NUM_HFILES = 4;
69 private static final int NUM_ROWS_PER_FLUSH = NUM_ROWS / NUM_HFILES;
70
71 private final HBaseTestingUtility testUtil = HBaseTestingUtility.createLocalHTU();
72 private final DataBlockEncoding encoding;
73 private final boolean includeTags;
74 private final boolean compressTags;
75
76
77 private static final boolean VERBOSE = false;
78
79 @Parameters
80 public static Collection<Object[]> parameters() {
81 List<Object[]> paramList = new ArrayList<Object[]>();
82 for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
83 for (boolean includeTags : new boolean[] { false, true }) {
84 for (boolean compressTags : new boolean[] { false, true }) {
85 paramList.add(new Object[] { encoding, includeTags, compressTags });
86 }
87 }
88 }
89 return paramList;
90 }
91
92 public TestEncodedSeekers(DataBlockEncoding encoding, boolean includeTags, boolean compressTags) {
93 this.encoding = encoding;
94 this.includeTags = includeTags;
95 this.compressTags = compressTags;
96 }
97
98 @Test
99 public void testEncodedSeeker() throws IOException {
100 System.err.println("Testing encoded seekers for encoding : " + encoding + ", includeTags : "
101 + includeTags + ", compressTags : " + compressTags);
102 if(includeTags) {
103 testUtil.getConfiguration().setInt(HFile.FORMAT_VERSION_KEY, 3);
104 }
105 LruBlockCache cache =
106 (LruBlockCache)new CacheConfig(testUtil.getConfiguration()).getBlockCache();
107 cache.clearCache();
108
109 HColumnDescriptor hcd = (new HColumnDescriptor(CF_NAME)).setMaxVersions(MAX_VERSIONS).
110 setDataBlockEncoding(encoding).
111 setBlocksize(BLOCK_SIZE).
112 setBloomFilterType(BloomType.NONE).
113 setCompressTags(compressTags);
114 HRegion region = testUtil.createTestRegion(TABLE_NAME, hcd);
115
116
117 doPuts(region);
118
119
120 doGets(region);
121
122
123 region.compactStores();
124 doGets(region);
125
126
127 Map<DataBlockEncoding, Integer> encodingCounts = cache.getEncodingCountsForTest();
128
129
130
131 System.err.println("encodingCounts=" + encodingCounts);
132 assertEquals(1, encodingCounts.size());
133 DataBlockEncoding encodingInCache = encodingCounts.keySet().iterator().next();
134 assertEquals(encoding, encodingInCache);
135 assertTrue(encodingCounts.get(encodingInCache) > 0);
136 }
137
138
139 private void doPuts(HRegion region) throws IOException{
140 LoadTestKVGenerator dataGenerator = new LoadTestKVGenerator(MIN_VALUE_SIZE, MAX_VALUE_SIZE);
141 for (int i = 0; i < NUM_ROWS; ++i) {
142 byte[] key = LoadTestKVGenerator.md5PrefixedKey(i).getBytes();
143 for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
144 Put put = new Put(key);
145 byte[] col = Bytes.toBytes(String.valueOf(j));
146 byte[] value = dataGenerator.generateRandomSizeValue(key, col);
147 if (includeTags) {
148 Tag[] tag = new Tag[1];
149 tag[0] = new Tag((byte) 1, "Visibility");
150 KeyValue kv = new KeyValue(key, CF_BYTES, col, HConstants.LATEST_TIMESTAMP, value, tag);
151 put.add(kv);
152 } else {
153 put.add(CF_BYTES, col, value);
154 }
155 if(VERBOSE){
156 KeyValue kvPut = new KeyValue(key, CF_BYTES, col, value);
157 System.err.println(Strings.padFront(i+"", ' ', 4)+" "+kvPut);
158 }
159 region.put(put);
160 }
161 if (i % NUM_ROWS_PER_FLUSH == 0) {
162 region.flushcache();
163 }
164 }
165 }
166
167
168 private void doGets(HRegion region) throws IOException{
169 for (int i = 0; i < NUM_ROWS; ++i) {
170 final byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes();
171 for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
172 final String qualStr = String.valueOf(j);
173 if (VERBOSE) {
174 System.err.println("Reading row " + i + ", column " + j + " " + Bytes.toString(rowKey)+"/"
175 +qualStr);
176 }
177 final byte[] qualBytes = Bytes.toBytes(qualStr);
178 Get get = new Get(rowKey);
179 get.addColumn(CF_BYTES, qualBytes);
180 Result result = region.get(get);
181 assertEquals(1, result.size());
182 byte[] value = result.getValue(CF_BYTES, qualBytes);
183 assertTrue(LoadTestKVGenerator.verify(value, rowKey, qualBytes));
184 }
185 }
186 }
187
188 }