View Javadoc

1   /**
2    *
3    * Licensed to the Apache Software Foundation (ASF) under one
4    * or more contributor license agreements.  See the NOTICE file
5    * distributed with this work for additional information
6    * regarding copyright ownership.  The ASF licenses this file
7    * to you under the Apache License, Version 2.0 (the
8    * "License"); you may not use this file except in compliance
9    * with the License.  You may obtain a copy of the License at
10   *
11   *     http://www.apache.org/licenses/LICENSE-2.0
12   *
13   * Unless required by applicable law or agreed to in writing, software
14   * distributed under the License is distributed on an "AS IS" BASIS,
15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16   * See the License for the specific language governing permissions and
17   * limitations under the License.
18   */
19  package org.apache.hadoop.hbase.mapreduce;
20  
21  import static org.junit.Assert.assertEquals;
22  import static org.junit.Assert.assertTrue;
23  
24  import java.io.IOException;
25  import java.util.ArrayList;
26  import java.util.Arrays;
27  import java.util.HashSet;
28  import java.util.List;
29  import java.util.Set;
30  import java.util.UUID;
31  
32  import org.apache.commons.logging.Log;
33  import org.apache.commons.logging.LogFactory;
34  import org.apache.hadoop.conf.Configurable;
35  import org.apache.hadoop.conf.Configuration;
36  import org.apache.hadoop.fs.FSDataOutputStream;
37  import org.apache.hadoop.fs.FileStatus;
38  import org.apache.hadoop.fs.FileSystem;
39  import org.apache.hadoop.fs.Path;
40  import org.apache.hadoop.hbase.Cell;
41  import org.apache.hadoop.hbase.CellUtil;
42  import org.apache.hadoop.hbase.HBaseTestingUtility;
43  import org.apache.hadoop.hbase.HConstants;
44  import org.apache.hadoop.hbase.LargeTests;
45  import org.apache.hadoop.hbase.client.HTable;
46  import org.apache.hadoop.hbase.client.Result;
47  import org.apache.hadoop.hbase.client.ResultScanner;
48  import org.apache.hadoop.hbase.client.Scan;
49  import org.apache.hadoop.hbase.util.Bytes;
50  import org.apache.hadoop.io.Text;
51  import org.apache.hadoop.mapred.Utils.OutputFileUtils.OutputFilesFilter;
52  import org.apache.hadoop.mapreduce.Job;
53  import org.apache.hadoop.util.GenericOptionsParser;
54  import org.apache.hadoop.util.Tool;
55  import org.apache.hadoop.util.ToolRunner;
56  import org.junit.AfterClass;
57  import org.junit.BeforeClass;
58  import org.junit.Test;
59  import org.junit.experimental.categories.Category;
60  
61  @Category(LargeTests.class)
62  public class TestImportTsv implements Configurable {
63  
64    protected static final Log LOG = LogFactory.getLog(TestImportTsv.class);
65    protected static final String NAME = TestImportTsv.class.getSimpleName();
66    protected static HBaseTestingUtility util = new HBaseTestingUtility();
67  
68    /**
69     * Delete the tmp directory after running doMROnTableTest. Boolean. Default is
70     * false.
71     */
72    protected static final String DELETE_AFTER_LOAD_CONF = NAME + ".deleteAfterLoad";
73  
74    /**
75     * Force use of combiner in doMROnTableTest. Boolean. Default is true.
76     */
77    protected static final String FORCE_COMBINER_CONF = NAME + ".forceCombiner";
78  
79    private final String FAMILY = "FAM";
80  
81    public Configuration getConf() {
82      return util.getConfiguration();
83    }
84  
85    public void setConf(Configuration conf) {
86      throw new IllegalArgumentException("setConf not supported");
87    }
88  
89    @BeforeClass
90    public static void provisionCluster() throws Exception {
91      util.startMiniCluster();
92      util.startMiniMapReduceCluster();
93    }
94  
95    @AfterClass
96    public static void releaseCluster() throws Exception {
97      util.shutdownMiniMapReduceCluster();
98      util.shutdownMiniCluster();
99    }
100 
101   @Test
102   public void testMROnTable() throws Exception {
103     String table = "test-" + UUID.randomUUID();
104 
105     // Prepare the arguments required for the test.
106     String[] args = new String[] {
107         "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
108         "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
109         table
110     };
111 
112     util.createTable(table, FAMILY);
113     doMROnTableTest(util, FAMILY, null, args, 1);
114     util.deleteTable(table);
115   }
116   
117   @Test
118   public void testMROnTableWithTimestamp() throws Exception {
119     String table = "test-" + UUID.randomUUID();
120 
121     // Prepare the arguments required for the test.
122     String[] args = new String[] {
123         "-D" + ImportTsv.COLUMNS_CONF_KEY
124             + "=HBASE_ROW_KEY,HBASE_TS_KEY,FAM:A,FAM:B",
125         "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,",
126         table
127     };
128     String data = "KEY,1234,VALUE1,VALUE2\n";
129 
130     util.createTable(table, FAMILY);
131     doMROnTableTest(util, FAMILY, data, args, 1);
132     util.deleteTable(table);
133   }
134   
135 
136   @Test
137   public void testMROnTableWithCustomMapper()
138   throws Exception {
139     String table = "test-" + UUID.randomUUID();
140 
141     // Prepare the arguments required for the test.
142     String[] args = new String[] {
143         "-D" + ImportTsv.MAPPER_CONF_KEY + "=org.apache.hadoop.hbase.mapreduce.TsvImporterCustomTestMapper",
144         table
145     };
146 
147     util.createTable(table, FAMILY);
148     doMROnTableTest(util, FAMILY, null, args, 3);
149     util.deleteTable(table);
150   }
151   
152   @Test
153   public void testBulkOutputWithoutAnExistingTable() throws Exception {
154     String table = "test-" + UUID.randomUUID();
155 
156     // Prepare the arguments required for the test.
157     Path hfiles = new Path(util.getDataTestDirOnTestFS(table), "hfiles");
158     String[] args = new String[] {
159         "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
160         "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
161         "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
162         table
163     };
164 
165     doMROnTableTest(util, FAMILY, null, args, 3);
166     util.deleteTable(table);
167   }
168 
169   @Test
170   public void testBulkOutputWithAnExistingTable() throws Exception {
171     String table = "test-" + UUID.randomUUID();
172 
173     // Prepare the arguments required for the test.
174     Path hfiles = new Path(util.getDataTestDirOnTestFS(table), "hfiles");
175     String[] args = new String[] {
176         "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
177         "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
178         "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
179         table
180     };
181 
182     util.createTable(table, FAMILY);
183     doMROnTableTest(util, FAMILY, null, args, 3);
184     util.deleteTable(table);
185   }
186 
187   @Test
188   public void testJobConfigurationsWithTsvImporterTextMapper() throws Exception {
189     String table = "test-" + UUID.randomUUID();
190     Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table),"hfiles");
191     String INPUT_FILE = "InputFile1.csv";
192     // Prepare the arguments required for the test.
193     String[] args =
194         new String[] {
195             "-D" + ImportTsv.MAPPER_CONF_KEY
196                 + "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
197             "-D" + ImportTsv.COLUMNS_CONF_KEY
198                 + "=HBASE_ROW_KEY,FAM:A,FAM:B",
199             "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,",
200             "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), table,
201             INPUT_FILE
202             };
203     GenericOptionsParser opts = new GenericOptionsParser(util.getConfiguration(), args);
204     args = opts.getRemainingArgs();
205     Job job = ImportTsv.createSubmittableJob(util.getConfiguration(), args);
206     assertTrue(job.getMapperClass().equals(TsvImporterTextMapper.class));
207     assertTrue(job.getReducerClass().equals(TextSortReducer.class));
208     assertTrue(job.getMapOutputValueClass().equals(Text.class));
209   }
210 
211   @Test
212   public void testBulkOutputWithTsvImporterTextMapper() throws Exception {
213     String table = "test-" + UUID.randomUUID();
214     String FAMILY = "FAM";
215     Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table),"hfiles");
216     // Prepare the arguments required for the test.
217     String[] args =
218         new String[] {
219             "-D" + ImportTsv.MAPPER_CONF_KEY
220                 + "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
221             "-D" + ImportTsv.COLUMNS_CONF_KEY
222                 + "=HBASE_ROW_KEY,FAM:A,FAM:B",
223             "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
224             "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), table 
225             };
226     String data = "KEY\u001bVALUE4\u001bVALUE8\n";
227     doMROnTableTest(util, FAMILY, data, args, 4);
228   }
229   
230   protected static Tool doMROnTableTest(HBaseTestingUtility util, String family,
231       String data, String[] args) throws Exception {
232     return doMROnTableTest(util, family, data, args, 1);
233   }
234 
235   /**
236    * Run an ImportTsv job and perform basic validation on the results.
237    * Returns the ImportTsv <code>Tool</code> instance so that other tests can
238    * inspect it for further validation as necessary. This method is static to
239    * insure non-reliance on instance's util/conf facilities.
240    * @param args Any arguments to pass BEFORE inputFile path is appended.
241    * @return The Tool instance used to run the test.
242    */
243   protected static Tool doMROnTableTest(HBaseTestingUtility util, String family,
244       String data, String[] args, int valueMultiplier)
245   throws Exception {
246     String table = args[args.length - 1];
247     Configuration conf = new Configuration(util.getConfiguration());
248 
249     // populate input file
250     FileSystem fs = FileSystem.get(conf);
251     Path inputPath = fs.makeQualified(new Path(util.getDataTestDirOnTestFS(table), "input.dat"));
252     FSDataOutputStream op = fs.create(inputPath, true);
253     if (data == null) {
254       data = "KEY\u001bVALUE1\u001bVALUE2\n";
255     }
256     op.write(Bytes.toBytes(data));
257     op.close();
258     LOG.debug(String.format("Wrote test data to file: %s", inputPath));
259 
260     if (conf.getBoolean(FORCE_COMBINER_CONF, true)) {
261       LOG.debug("Forcing combiner.");
262       conf.setInt("min.num.spills.for.combine", 1);
263     }
264 
265     // run the import
266     List<String> argv = new ArrayList<String>(Arrays.asList(args));
267     argv.add(inputPath.toString());
268     Tool tool = new ImportTsv();
269     LOG.debug("Running ImportTsv with arguments: " + argv);
270     assertEquals(0, ToolRunner.run(conf, tool, argv.toArray(args)));
271 
272     // Perform basic validation. If the input args did not include
273     // ImportTsv.BULK_OUTPUT_CONF_KEY then validate data in the table.
274     // Otherwise, validate presence of hfiles.
275     boolean createdHFiles = false;
276     String outputPath = null;
277     for (String arg : argv) {
278       if (arg.contains(ImportTsv.BULK_OUTPUT_CONF_KEY)) {
279         createdHFiles = true;
280         // split '-Dfoo=bar' on '=' and keep 'bar'
281         outputPath = arg.split("=")[1];
282         break;
283       }
284     }
285 
286     if (createdHFiles)
287       validateHFiles(fs, outputPath, family);
288     else
289       validateTable(conf, table, family, valueMultiplier);
290 
291     if (conf.getBoolean(DELETE_AFTER_LOAD_CONF, true)) {
292       LOG.debug("Deleting test subdirectory");
293       util.cleanupDataTestDirOnTestFS(table);
294     }
295     return tool;
296   }
297 
298   /**
299    * Confirm ImportTsv via data in online table.
300    */
301   private static void validateTable(Configuration conf, String tableName,
302       String family, int valueMultiplier) throws IOException {
303 
304     LOG.debug("Validating table.");
305     HTable table = new HTable(conf, tableName);
306     boolean verified = false;
307     long pause = conf.getLong("hbase.client.pause", 5 * 1000);
308     int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
309     for (int i = 0; i < numRetries; i++) {
310       try {
311         Scan scan = new Scan();
312         // Scan entire family.
313         scan.addFamily(Bytes.toBytes(family));
314         ResultScanner resScanner = table.getScanner(scan);
315         for (Result res : resScanner) {
316           assertTrue(res.size() == 2);
317           List<Cell> kvs = res.listCells();
318           assertTrue(CellUtil.matchingRow(kvs.get(0), Bytes.toBytes("KEY")));
319           assertTrue(CellUtil.matchingRow(kvs.get(1), Bytes.toBytes("KEY")));
320           assertTrue(CellUtil.matchingValue(kvs.get(0), Bytes.toBytes("VALUE" + valueMultiplier)));
321           assertTrue(CellUtil.matchingValue(kvs.get(1), Bytes.toBytes("VALUE" + 2 * valueMultiplier)));
322           // Only one result set is expected, so let it loop.
323         }
324         verified = true;
325         break;
326       } catch (NullPointerException e) {
327         // If here, a cell was empty. Presume its because updates came in
328         // after the scanner had been opened. Wait a while and retry.
329       }
330       try {
331         Thread.sleep(pause);
332       } catch (InterruptedException e) {
333         // continue
334       }
335     }
336     table.close();
337     assertTrue(verified);
338   }
339 
340   /**
341    * Confirm ImportTsv via HFiles on fs.
342    */
343   private static void validateHFiles(FileSystem fs, String outputPath, String family)
344       throws IOException {
345 
346     // validate number and content of output columns
347     LOG.debug("Validating HFiles.");
348     Set<String> configFamilies = new HashSet<String>();
349     configFamilies.add(family);
350     Set<String> foundFamilies = new HashSet<String>();
351     for (FileStatus cfStatus : fs.listStatus(new Path(outputPath), new OutputFilesFilter())) {
352       String[] elements = cfStatus.getPath().toString().split(Path.SEPARATOR);
353       String cf = elements[elements.length - 1];
354       foundFamilies.add(cf);
355       assertTrue(
356         String.format(
357           "HFile ouput contains a column family (%s) not present in input families (%s)",
358           cf, configFamilies),
359           configFamilies.contains(cf));
360       for (FileStatus hfile : fs.listStatus(cfStatus.getPath())) {
361         assertTrue(
362           String.format("HFile %s appears to contain no data.", hfile.getPath()),
363           hfile.getLen() > 0);
364       }
365     }
366   }
367 }
368