1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.mapreduce;
19
20 import static org.junit.Assert.assertEquals;
21 import static org.junit.Assert.assertTrue;
22
23 import java.io.IOException;
24 import java.util.ArrayList;
25 import java.util.Arrays;
26 import java.util.List;
27 import java.util.UUID;
28
29 import org.apache.commons.logging.Log;
30 import org.apache.commons.logging.LogFactory;
31 import org.apache.hadoop.conf.Configurable;
32 import org.apache.hadoop.conf.Configuration;
33 import org.apache.hadoop.fs.FSDataOutputStream;
34 import org.apache.hadoop.fs.FileSystem;
35 import org.apache.hadoop.fs.Path;
36 import org.apache.hadoop.hbase.CategoryBasedTimeout;
37 import org.apache.hadoop.hbase.Cell;
38 import org.apache.hadoop.hbase.CellUtil;
39 import org.apache.hadoop.hbase.HBaseTestingUtility;
40 import org.apache.hadoop.hbase.HConstants;
41 import org.apache.hadoop.hbase.testclassification.LargeTests;
42 import org.apache.hadoop.hbase.TableName;
43 import org.apache.hadoop.hbase.client.Admin;
44 import org.apache.hadoop.hbase.client.Durability;
45 import org.apache.hadoop.hbase.client.HBaseAdmin;
46 import org.apache.hadoop.hbase.client.HTable;
47 import org.apache.hadoop.hbase.client.Put;
48 import org.apache.hadoop.hbase.client.Result;
49 import org.apache.hadoop.hbase.client.ResultScanner;
50 import org.apache.hadoop.hbase.client.Scan;
51 import org.apache.hadoop.hbase.client.Table;
52 import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
53 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
54 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
55 import org.apache.hadoop.hbase.regionserver.Region;
56 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
57 import org.apache.hadoop.hbase.util.Bytes;
58 import org.apache.hadoop.util.Tool;
59 import org.apache.hadoop.util.ToolRunner;
60 import org.junit.AfterClass;
61 import org.junit.BeforeClass;
62 import org.junit.Rule;
63 import org.junit.Test;
64 import org.junit.experimental.categories.Category;
65 import org.junit.rules.TestRule;
66
67 @Category(LargeTests.class)
68 public class TestImportTSVWithOperationAttributes implements Configurable {
69 @Rule public final TestRule timeout = CategoryBasedTimeout.builder().
70 withTimeout(this.getClass()).withLookingForStuckThread(true).build();
71 private static final Log LOG = LogFactory.getLog(TestImportTSVWithOperationAttributes.class);
72 protected static final String NAME = TestImportTsv.class.getSimpleName();
73 protected static HBaseTestingUtility util = new HBaseTestingUtility();
74
75
76
77
78
79 protected static final String DELETE_AFTER_LOAD_CONF = NAME + ".deleteAfterLoad";
80
81
82
83
84 protected static final String FORCE_COMBINER_CONF = NAME + ".forceCombiner";
85
86 private static Configuration conf;
87
88 private static final String TEST_ATR_KEY = "test";
89
90 private final String FAMILY = "FAM";
91
92 public Configuration getConf() {
93 return util.getConfiguration();
94 }
95
96 public void setConf(Configuration conf) {
97 throw new IllegalArgumentException("setConf not supported");
98 }
99
100 @BeforeClass
101 public static void provisionCluster() throws Exception {
102 conf = util.getConfiguration();
103 conf.set("hbase.coprocessor.master.classes", OperationAttributesTestController.class.getName());
104 conf.set("hbase.coprocessor.region.classes", OperationAttributesTestController.class.getName());
105 util.setJobWithoutMRCluster();
106 util.startMiniCluster();
107 }
108
109 @AfterClass
110 public static void releaseCluster() throws Exception {
111 util.shutdownMiniCluster();
112 }
113
114 @Test
115 public void testMROnTable() throws Exception {
116 String tableName = "test-" + UUID.randomUUID();
117
118
119 String[] args = new String[] {
120 "-D" + ImportTsv.MAPPER_CONF_KEY
121 + "=org.apache.hadoop.hbase.mapreduce.TsvImporterCustomTestMapperForOprAttr",
122 "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_ATTRIBUTES_KEY",
123 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
124 String data = "KEY\u001bVALUE1\u001bVALUE2\u001btest=>myvalue\n";
125 util.createTable(TableName.valueOf(tableName), FAMILY);
126 doMROnTableTest(util, FAMILY, data, args, 1, true);
127 util.deleteTable(tableName);
128 }
129
130 @Test
131 public void testMROnTableWithInvalidOperationAttr() throws Exception {
132 String tableName = "test-" + UUID.randomUUID();
133
134
135 String[] args = new String[] {
136 "-D" + ImportTsv.MAPPER_CONF_KEY
137 + "=org.apache.hadoop.hbase.mapreduce.TsvImporterCustomTestMapperForOprAttr",
138 "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_ATTRIBUTES_KEY",
139 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
140 String data = "KEY\u001bVALUE1\u001bVALUE2\u001btest1=>myvalue\n";
141 util.createTable(TableName.valueOf(tableName), FAMILY);
142 doMROnTableTest(util, FAMILY, data, args, 1, false);
143 util.deleteTable(tableName);
144 }
145
146
147
148
149
150
151
152
153
154
155
156
157 private Tool doMROnTableTest(HBaseTestingUtility util, String family, String data, String[] args,
158 int valueMultiplier, boolean dataAvailable) throws Exception {
159 String table = args[args.length - 1];
160 Configuration conf = new Configuration(util.getConfiguration());
161
162
163 FileSystem fs = FileSystem.get(conf);
164 Path inputPath = fs.makeQualified(new Path(util.getDataTestDirOnTestFS(table), "input.dat"));
165 FSDataOutputStream op = fs.create(inputPath, true);
166 op.write(Bytes.toBytes(data));
167 op.close();
168 LOG.debug(String.format("Wrote test data to file: %s", inputPath));
169
170 if (conf.getBoolean(FORCE_COMBINER_CONF, true)) {
171 LOG.debug("Forcing combiner.");
172 conf.setInt("mapreduce.map.combine.minspills", 1);
173 }
174
175
176 List<String> argv = new ArrayList<String>(Arrays.asList(args));
177 argv.add(inputPath.toString());
178 Tool tool = new ImportTsv();
179 LOG.debug("Running ImportTsv with arguments: " + argv);
180 assertEquals(0, ToolRunner.run(conf, tool, argv.toArray(args)));
181
182 validateTable(conf, TableName.valueOf(table), family, valueMultiplier, dataAvailable);
183
184 if (conf.getBoolean(DELETE_AFTER_LOAD_CONF, true)) {
185 LOG.debug("Deleting test subdirectory");
186 util.cleanupDataTestDirOnTestFS(table);
187 }
188 return tool;
189 }
190
191
192
193
194
195
196 private static void validateTable(Configuration conf, TableName tableName, String family,
197 int valueMultiplier, boolean dataAvailable) throws IOException {
198
199 LOG.debug("Validating table.");
200 Table table = new HTable(conf, tableName);
201 boolean verified = false;
202 long pause = conf.getLong("hbase.client.pause", 5 * 1000);
203 int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
204 for (int i = 0; i < numRetries; i++) {
205 try {
206 Scan scan = new Scan();
207
208 scan.addFamily(Bytes.toBytes(family));
209 if (dataAvailable) {
210 ResultScanner resScanner = table.getScanner(scan);
211 for (Result res : resScanner) {
212 LOG.debug("Getting results " + res.size());
213 assertTrue(res.size() == 2);
214 List<Cell> kvs = res.listCells();
215 assertTrue(CellUtil.matchingRow(kvs.get(0), Bytes.toBytes("KEY")));
216 assertTrue(CellUtil.matchingRow(kvs.get(1), Bytes.toBytes("KEY")));
217 assertTrue(CellUtil.matchingValue(kvs.get(0), Bytes.toBytes("VALUE" + valueMultiplier)));
218 assertTrue(CellUtil.matchingValue(kvs.get(1),
219 Bytes.toBytes("VALUE" + 2 * valueMultiplier)));
220
221 verified = true;
222 }
223 } else {
224 ResultScanner resScanner = table.getScanner(scan);
225 Result[] next = resScanner.next(2);
226 assertEquals(0, next.length);
227 verified = true;
228 }
229
230 break;
231 } catch (NullPointerException e) {
232
233
234 }
235 try {
236 Thread.sleep(pause);
237 } catch (InterruptedException e) {
238
239 }
240 }
241 table.close();
242 assertTrue(verified);
243 }
244
245 public static class OperationAttributesTestController extends BaseRegionObserver {
246
247 @Override
248 public void prePut(ObserverContext<RegionCoprocessorEnvironment> e, Put put, WALEdit edit,
249 Durability durability) throws IOException {
250 Region region = e.getEnvironment().getRegion();
251 if (!region.getRegionInfo().isMetaTable()
252 && !region.getRegionInfo().getTable().isSystemTable()) {
253 if (put.getAttribute(TEST_ATR_KEY) != null) {
254 LOG.debug("allow any put to happen " + region.getRegionInfo().getRegionNameAsString());
255 } else {
256 e.bypass();
257 }
258 }
259 }
260 }
261 }