View Javadoc

1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one or more
3    * contributor license agreements. See the NOTICE file distributed with this
4    * work for additional information regarding copyright ownership. The ASF
5    * licenses this file to you under the Apache License, Version 2.0 (the
6    * "License"); you may not use this file except in compliance with the License.
7    * You may obtain a copy of the License at
8    *
9    * http://www.apache.org/licenses/LICENSE-2.0
10   *
11   * Unless required by applicable law or agreed to in writing, software
12   * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13   * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14   * License for the specific language governing permissions and limitations
15   * under the License.
16   */
17  package org.apache.hadoop.hbase.io.encoding;
18  
19  import static org.junit.Assert.assertEquals;
20  import static org.junit.Assert.assertTrue;
21  
22  import java.io.IOException;
23  import java.util.ArrayList;
24  import java.util.Collection;
25  import java.util.List;
26  import java.util.Map;
27  
28  import org.apache.hadoop.hbase.HBaseTestingUtility;
29  import org.apache.hadoop.hbase.HColumnDescriptor;
30  import org.apache.hadoop.hbase.HConstants;
31  import org.apache.hadoop.hbase.KeyValue;
32  import org.apache.hadoop.hbase.testclassification.MediumTests;
33  import org.apache.hadoop.hbase.Tag;
34  import org.apache.hadoop.hbase.client.Durability;
35  import org.apache.hadoop.hbase.client.Get;
36  import org.apache.hadoop.hbase.client.Put;
37  import org.apache.hadoop.hbase.client.Result;
38  import org.apache.hadoop.hbase.io.hfile.CacheConfig;
39  import org.apache.hadoop.hbase.io.hfile.HFile;
40  import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
41  import org.apache.hadoop.hbase.regionserver.BloomType;
42  import org.apache.hadoop.hbase.regionserver.Region;
43  import org.apache.hadoop.hbase.util.Bytes;
44  import org.apache.hadoop.hbase.util.Strings;
45  import org.apache.hadoop.hbase.util.test.LoadTestKVGenerator;
46  import org.junit.Test;
47  import org.junit.experimental.categories.Category;
48  import org.junit.runner.RunWith;
49  import org.junit.runners.Parameterized;
50  import org.junit.runners.Parameterized.Parameters;
51  
52  /**
53   * Tests encoded seekers by loading and reading values.
54   */
55  @Category(MediumTests.class)
56  @RunWith(Parameterized.class)
57  public class TestEncodedSeekers {
58  
59    private static final String TABLE_NAME = "encodedSeekersTable";
60    private static final String CF_NAME = "encodedSeekersCF";
61    private static final byte[] CF_BYTES = Bytes.toBytes(CF_NAME);
62    private static final int MAX_VERSIONS = 5;
63  
64    private static final int BLOCK_SIZE = 64 * 1024;
65    private static final int MIN_VALUE_SIZE = 30;
66    private static final int MAX_VALUE_SIZE = 60;
67    private static final int NUM_ROWS = 1003;
68    private static final int NUM_COLS_PER_ROW = 20;
69    private static final int NUM_HFILES = 4;
70    private static final int NUM_ROWS_PER_FLUSH = NUM_ROWS / NUM_HFILES;
71  
72    private final HBaseTestingUtility testUtil = HBaseTestingUtility.createLocalHTU();
73    private final DataBlockEncoding encoding;
74    private final boolean includeTags;
75    private final boolean compressTags;
76  
77    /** Enable when debugging */
78    private static final boolean VERBOSE = false;
79  
80    @Parameters
81    public static Collection<Object[]> parameters() {
82      List<Object[]> paramList = new ArrayList<Object[]>();
83      for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
84        for (boolean includeTags : new boolean[] { false, true }) {
85          for (boolean compressTags : new boolean[] { false, true }) {
86            paramList.add(new Object[] { encoding, includeTags, compressTags });
87          }
88        }
89      }
90      return paramList;
91    }
92  
93    public TestEncodedSeekers(DataBlockEncoding encoding, boolean includeTags, boolean compressTags) {
94      this.encoding = encoding;
95      this.includeTags = includeTags;
96      this.compressTags = compressTags;
97    }
98  
99    @Test
100   public void testEncodedSeeker() throws IOException {
101     System.err.println("Testing encoded seekers for encoding : " + encoding + ", includeTags : "
102         + includeTags + ", compressTags : " + compressTags);
103     if(includeTags) {
104       testUtil.getConfiguration().setInt(HFile.FORMAT_VERSION_KEY, 3);
105     }
106     LruBlockCache cache =
107       (LruBlockCache)new CacheConfig(testUtil.getConfiguration()).getBlockCache();
108     cache.clearCache();
109     // Need to disable default row bloom filter for this test to pass.
110     HColumnDescriptor hcd = (new HColumnDescriptor(CF_NAME)).setMaxVersions(MAX_VERSIONS).
111         setDataBlockEncoding(encoding).
112         setBlocksize(BLOCK_SIZE).
113         setBloomFilterType(BloomType.NONE).
114         setCompressTags(compressTags);
115     Region region = testUtil.createTestRegion(TABLE_NAME, hcd);
116 
117     //write the data, but leave some in the memstore
118     doPuts(region);
119 
120     //verify correctness when memstore contains data
121     doGets(region);
122 
123     //verify correctness again after compacting
124     region.compact(false);
125     doGets(region);
126 
127     Map<DataBlockEncoding, Integer> encodingCounts = cache.getEncodingCountsForTest();
128 
129     // Ensure that compactions don't pollute the cache with unencoded blocks
130     // in case of in-cache-only encoding.
131     System.err.println("encodingCounts=" + encodingCounts);
132     assertEquals(1, encodingCounts.size());
133     DataBlockEncoding encodingInCache = encodingCounts.keySet().iterator().next();
134     assertEquals(encoding, encodingInCache);
135     assertTrue(encodingCounts.get(encodingInCache) > 0);
136   }
137 
138 
139   private void doPuts(Region region) throws IOException{
140     LoadTestKVGenerator dataGenerator = new LoadTestKVGenerator(MIN_VALUE_SIZE, MAX_VALUE_SIZE);
141      for (int i = 0; i < NUM_ROWS; ++i) {
142       byte[] key = LoadTestKVGenerator.md5PrefixedKey(i).getBytes();
143       for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
144         Put put = new Put(key);
145         put.setDurability(Durability.ASYNC_WAL);
146         byte[] col = Bytes.toBytes(String.valueOf(j));
147         byte[] value = dataGenerator.generateRandomSizeValue(key, col);
148         if (includeTags) {
149           Tag[] tag = new Tag[1];
150           tag[0] = new Tag((byte) 1, "Visibility");
151           KeyValue kv = new KeyValue(key, CF_BYTES, col, HConstants.LATEST_TIMESTAMP, value, tag);
152           put.add(kv);
153         } else {
154           put.add(CF_BYTES, col, value);
155         }
156         if(VERBOSE){
157           KeyValue kvPut = new KeyValue(key, CF_BYTES, col, value);
158           System.err.println(Strings.padFront(i+"", ' ', 4)+" "+kvPut);
159         }
160         region.put(put);
161       }
162       if (i % NUM_ROWS_PER_FLUSH == 0) {
163         region.flush(true);
164       }
165     }
166   }
167 
168 
169   private void doGets(Region region) throws IOException{
170     for (int i = 0; i < NUM_ROWS; ++i) {
171       final byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes();
172       for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
173         final String qualStr = String.valueOf(j);
174         if (VERBOSE) {
175           System.err.println("Reading row " + i + ", column " + j + " " + Bytes.toString(rowKey)+"/"
176               +qualStr);
177         }
178         final byte[] qualBytes = Bytes.toBytes(qualStr);
179         Get get = new Get(rowKey);
180         get.addColumn(CF_BYTES, qualBytes);
181         Result result = region.get(get);
182         assertEquals(1, result.size());
183         byte[] value = result.getValue(CF_BYTES, qualBytes);
184         assertTrue(LoadTestKVGenerator.verify(value, rowKey, qualBytes));
185       }
186     }
187   }
188 
189 }