1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.codec.prefixtree.builder;
20
21 import java.util.Collection;
22 import java.util.List;
23
24 import org.apache.hadoop.hbase.SmallTests;
25 import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
26 import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
27 import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerRowSearchResult;
28 import org.apache.hadoop.hbase.util.SimpleByteRange;
29 import org.apache.hadoop.hbase.util.Bytes;
30 import org.junit.Assert;
31 import org.junit.Test;
32 import org.junit.experimental.categories.Category;
33 import org.junit.runner.RunWith;
34 import org.junit.runners.Parameterized;
35 import org.junit.runners.Parameterized.Parameters;
36
37 @Category(SmallTests.class)
38 @RunWith(Parameterized.class)
39 public class TestTokenizer {
40
41 @Parameters
42 public static Collection<Object[]> parameters() {
43 return new TestTokenizerData.InMemory().getAllAsObjectArray();
44 }
45
46 private List<byte[]> inputs;
47 private Tokenizer builder;
48 private List<byte[]> roundTripped;
49
50 public TestTokenizer(TestTokenizerData sortedByteArrays) {
51 this.inputs = sortedByteArrays.getInputs();
52 this.builder = new Tokenizer();
53 for (byte[] array : inputs) {
54 builder.addSorted(new SimpleByteRange(array));
55 }
56 this.roundTripped = builder.getArrays();
57 }
58
59 @Test
60 public void testReaderRoundTrip() {
61 Assert.assertEquals(inputs.size(), roundTripped.size());
62 Assert.assertTrue(Bytes.isSorted(roundTripped));
63 Assert.assertTrue(Bytes.equals(inputs, roundTripped));
64 }
65
66 @Test
67 public void testSearching() {
68 for (byte[] input : inputs) {
69 TokenizerRowSearchResult resultHolder = new TokenizerRowSearchResult();
70 builder.getNode(resultHolder, input, 0, input.length);
71 TokenizerNode n = resultHolder.getMatchingNode();
72 byte[] output = n.getNewByteArray();
73 Assert.assertTrue(Bytes.equals(input, output));
74 }
75 }
76
77 }