View Javadoc

1   /*
2    * Copyright The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  package org.apache.hadoop.hbase.regionserver;
21  
22  import static junit.framework.Assert.assertEquals;
23  
24  import java.io.IOException;
25  import java.util.ArrayList;
26  import java.util.Iterator;
27  import java.util.List;
28  
29  import org.apache.hadoop.hbase.HBaseTestingUtility;
30  import org.apache.hadoop.hbase.HColumnDescriptor;
31  import org.apache.hadoop.hbase.HTableDescriptor;
32  import org.apache.hadoop.hbase.MediumTests;
33  import org.apache.hadoop.hbase.TableExistsException;
34  import org.apache.hadoop.hbase.TableName;
35  import org.apache.hadoop.hbase.TableNotFoundException;
36  import org.apache.hadoop.hbase.client.Durability;
37  import org.apache.hadoop.hbase.client.HBaseAdmin;
38  import org.apache.hadoop.hbase.client.HTable;
39  import org.apache.hadoop.hbase.client.Put;
40  import org.apache.hadoop.hbase.client.Result;
41  import org.apache.hadoop.hbase.client.ResultScanner;
42  import org.apache.hadoop.hbase.client.Scan;
43  import org.apache.hadoop.hbase.filter.BinaryComparator;
44  import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
45  import org.apache.hadoop.hbase.filter.Filter;
46  import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
47  import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
48  import org.apache.hadoop.hbase.util.Bytes;
49  import org.junit.AfterClass;
50  import org.junit.BeforeClass;
51  import org.junit.Test;
52  import org.junit.experimental.categories.Category;
53  
54  @Category(MediumTests.class)
55  /*
56   * This test verifies that the scenarios illustrated by HBASE-10850 work
57   * w.r.t. essential column family optimization
58   */
59  public class TestSCVFWithMiniCluster {
60    private static final String HBASE_TABLE_NAME = "TestSCVFWithMiniCluster";
61  
62    private static final byte[] FAMILY_A = Bytes.toBytes("a");
63    private static final byte[] FAMILY_B = Bytes.toBytes("b");
64  
65    private static final byte[] QUALIFIER_FOO = Bytes.toBytes("foo");
66    private static final byte[] QUALIFIER_BAR = Bytes.toBytes("bar");
67  
68    private static HTable htable;
69  
70    private static Filter scanFilter;
71  
72    private int expected = 1;
73  
74    @BeforeClass
75    public static void setUp() throws Exception {
76      HBaseTestingUtility util = new HBaseTestingUtility();
77  
78      util.startMiniCluster(1);
79  
80      HBaseAdmin admin = util.getHBaseAdmin();
81      destroy(admin, HBASE_TABLE_NAME);
82      create(admin, HBASE_TABLE_NAME, FAMILY_A, FAMILY_B);
83      admin.close();
84      htable = new HTable(util.getConfiguration(), HBASE_TABLE_NAME);
85  
86      /* Add some values */
87      List<Put> puts = new ArrayList<Put>();
88  
89      /* Add a row with 'a:foo' = false */
90      Put put = new Put(Bytes.toBytes("1"));
91      put.setDurability(Durability.SKIP_WAL);
92      put.add(FAMILY_A, QUALIFIER_FOO, Bytes.toBytes("false"));
93      put.add(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
94      put.add(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_"));
95      put.add(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
96      puts.add(put);
97  
98      /* Add a row with 'a:foo' = true */
99      put = new Put(Bytes.toBytes("2"));
100     put.setDurability(Durability.SKIP_WAL);
101     put.add(FAMILY_A, QUALIFIER_FOO, Bytes.toBytes("true"));
102     put.add(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
103     put.add(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_"));
104     put.add(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
105     puts.add(put);
106 
107     /* Add a row with 'a:foo' qualifier not set */
108     put = new Put(Bytes.toBytes("3"));
109     put.setDurability(Durability.SKIP_WAL);
110     put.add(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
111     put.add(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_"));
112     put.add(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
113     puts.add(put);
114 
115     htable.put(puts);
116     /*
117      * We want to filter out from the scan all rows that do not have the column 'a:foo' with value
118      * 'false'. Only row with key '1' should be returned in the scan.
119      */
120     scanFilter = new SingleColumnValueFilter(FAMILY_A, QUALIFIER_FOO, CompareOp.EQUAL,
121       new BinaryComparator(Bytes.toBytes("false")));
122     ((SingleColumnValueFilter) scanFilter).setFilterIfMissing(true);
123   }
124   
125   @AfterClass
126   public static void tearDown() throws Exception {
127     htable.close();
128   }
129 
130   private void verify(Scan scan) throws IOException {
131     ResultScanner scanner = htable.getScanner(scan);
132     Iterator<Result> it = scanner.iterator();
133 
134     /* Then */
135     int count = 0;
136     try {
137       while (it.hasNext()) {
138         it.next();
139         count++;
140       }
141     } finally {
142       scanner.close();
143     }
144     assertEquals(expected, count);
145   }
146   /**
147    * Test the filter by adding all columns of family A in the scan. (OK)
148    */
149   @Test
150   public void scanWithAllQualifiersOfFamiliyA() throws IOException {
151     /* Given */
152     Scan scan = new Scan();
153     scan.addFamily(FAMILY_A);
154     scan.setFilter(scanFilter);
155 
156     verify(scan);
157   }
158 
159   /**
160    * Test the filter by adding all columns of family A and B in the scan. (KO: row '3' without
161    * 'a:foo' qualifier is returned)
162    */
163   @Test
164   public void scanWithAllQualifiersOfBothFamilies() throws IOException {
165     /* When */
166     Scan scan = new Scan();
167     scan.setFilter(scanFilter);
168 
169     verify(scan);
170   }
171 
172   /**
173    * Test the filter by adding 2 columns of family A and 1 column of family B in the scan. (KO: row
174    * '3' without 'a:foo' qualifier is returned)
175    */
176   @Test
177   public void scanWithSpecificQualifiers1() throws IOException {
178     /* When */
179     Scan scan = new Scan();
180     scan.addColumn(FAMILY_A, QUALIFIER_FOO);
181     scan.addColumn(FAMILY_A, QUALIFIER_BAR);
182     scan.addColumn(FAMILY_B, QUALIFIER_BAR);
183     scan.addColumn(FAMILY_B, QUALIFIER_FOO);
184     scan.setFilter(scanFilter);
185 
186     verify(scan);
187   }
188 
189   /**
190    * Test the filter by adding 1 column of family A (the one used in the filter) and 1 column of
191    * family B in the scan. (OK)
192    */
193   @Test
194   public void scanWithSpecificQualifiers2() throws IOException {
195     /* When */
196     Scan scan = new Scan();
197     scan.addColumn(FAMILY_A, QUALIFIER_FOO);
198     scan.addColumn(FAMILY_B, QUALIFIER_BAR);
199     scan.setFilter(scanFilter);
200 
201     verify(scan);
202   }
203 
204   /**
205    * Test the filter by adding 2 columns of family A in the scan. (OK)
206    */
207   @Test
208   public void scanWithSpecificQualifiers3() throws IOException {
209     /* When */
210     Scan scan = new Scan();
211     scan.addColumn(FAMILY_A, QUALIFIER_FOO);
212     scan.addColumn(FAMILY_A, QUALIFIER_BAR);
213     scan.setFilter(scanFilter);
214 
215     verify(scan);
216   }
217 
218   private static void create(HBaseAdmin admin, String tableName, byte[]... families)
219       throws IOException {
220     HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
221     for (byte[] family : families) {
222       HColumnDescriptor colDesc = new HColumnDescriptor(family);
223       colDesc.setMaxVersions(1);
224       colDesc.setCompressionType(Algorithm.GZ);
225       desc.addFamily(colDesc);
226     }
227     try {
228       admin.createTable(desc);
229     } catch (TableExistsException tee) {
230       /* Ignore */
231     }
232   }
233 
234   private static void destroy(HBaseAdmin admin, String tableName) throws IOException {
235     try {
236       admin.disableTable(tableName);
237       admin.deleteTable(tableName);
238     } catch (TableNotFoundException tnfe) {
239       /* Ignore */
240     }
241   }
242 }