View Javadoc

1   /*
2    *
3    * Licensed to the Apache Software Foundation (ASF) under one
4    * or more contributor license agreements.  See the NOTICE file
5    * distributed with this work for additional information
6    * regarding copyright ownership.  The ASF licenses this file
7    * to you under the Apache License, Version 2.0 (the
8    * "License"); you may not use this file except in compliance
9    * with the License.  You may obtain a copy of the License at
10   *
11   *     http://www.apache.org/licenses/LICENSE-2.0
12   *
13   * Unless required by applicable law or agreed to in writing, software
14   * distributed under the License is distributed on an "AS IS" BASIS,
15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16   * See the License for the specific language governing permissions and
17   * limitations under the License.
18   */
19  package org.apache.hadoop.hbase.io.hfile;
20  
21  import java.io.ByteArrayInputStream;
22  import java.io.ByteArrayOutputStream;
23  import java.io.DataInputStream;
24  import java.io.DataOutputStream;
25  import java.io.IOException;
26  import java.util.ArrayList;
27  import java.util.Collection;
28  import java.util.List;
29  
30  import org.apache.hadoop.hbase.*;
31  import org.apache.hadoop.hbase.util.Bytes;
32  import org.junit.Before;
33  import org.junit.Test;
34  import org.junit.experimental.categories.Category;
35  import org.junit.runner.RunWith;
36  import org.junit.runners.Parameterized;
37  import org.junit.runners.Parameterized.Parameters;
38  
39  import static org.junit.Assert.*;
40  
41  import org.apache.commons.logging.Log;
42  import org.apache.commons.logging.LogFactory;
43  import org.apache.hadoop.fs.FSDataInputStream;
44  import org.apache.hadoop.fs.FSDataOutputStream;
45  import org.apache.hadoop.fs.FileSystem;
46  import org.apache.hadoop.fs.Path;
47  
48  @RunWith(Parameterized.class)
49  @Category(SmallTests.class)
50  public class TestFixedFileTrailer {
51  
52    private static final Log LOG = LogFactory.getLog(TestFixedFileTrailer.class);
53    private static final int MAX_COMPARATOR_NAME_LENGTH = 128;
54  
55    /**
56     * The number of used fields by version. Indexed by version minus two. 
57     * Min version that we support is V2
58     */
59    private static final int[] NUM_FIELDS_BY_VERSION = new int[] { 14, 15 };
60  
61    private HBaseTestingUtility util = new HBaseTestingUtility();
62    private FileSystem fs;
63    private ByteArrayOutputStream baos = new ByteArrayOutputStream();
64    private int version;
65  
66    static {
67      assert NUM_FIELDS_BY_VERSION.length == HFile.MAX_FORMAT_VERSION
68          - HFile.MIN_FORMAT_VERSION + 1;
69    }
70  
71    public TestFixedFileTrailer(int version) {
72      this.version = version;
73    }
74  
75    @Parameters
76    public static Collection<Object[]> getParameters() {
77      List<Object[]> versionsToTest = new ArrayList<Object[]>();
78      for (int v = HFile.MIN_FORMAT_VERSION; v <= HFile.MAX_FORMAT_VERSION; ++v)
79        versionsToTest.add(new Integer[] { v } );
80      return versionsToTest;
81    }
82  
83    @Before
84    public void setUp() throws IOException {
85      fs = FileSystem.get(util.getConfiguration());
86    }
87  
88    @Test
89    public void testTrailer() throws IOException {
90      FixedFileTrailer t = new FixedFileTrailer(version, 
91          HFileReaderV2.PBUF_TRAILER_MINOR_VERSION);
92      t.setDataIndexCount(3);
93      t.setEntryCount(((long) Integer.MAX_VALUE) + 1);
94  
95      t.setLastDataBlockOffset(291);
96      t.setNumDataIndexLevels(3);
97      t.setComparatorClass(KeyValue.COMPARATOR.getClass());
98      t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic.
99      t.setUncompressedDataIndexSize(827398717L); // Something random.
100 
101     t.setLoadOnOpenOffset(128);
102     t.setMetaIndexCount(7);
103 
104     t.setTotalUncompressedBytes(129731987);
105 
106     {
107       DataOutputStream dos = new DataOutputStream(baos); // Limited scope.
108       t.serialize(dos);
109       dos.flush();
110       assertEquals(dos.size(), FixedFileTrailer.getTrailerSize(version));
111     }
112 
113     byte[] bytes = baos.toByteArray();
114     baos.reset();
115 
116     assertEquals(bytes.length, FixedFileTrailer.getTrailerSize(version));
117 
118     ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
119 
120     // Finished writing, trying to read.
121     {
122       DataInputStream dis = new DataInputStream(bais);
123       FixedFileTrailer t2 = new FixedFileTrailer(version, 
124           HFileReaderV2.PBUF_TRAILER_MINOR_VERSION);
125       t2.deserialize(dis);
126       assertEquals(-1, bais.read()); // Ensure we have read everything.
127       checkLoadedTrailer(version, t, t2);
128     }
129 
130     // Now check what happens if the trailer is corrupted.
131     Path trailerPath = new Path(util.getDataTestDir(), "trailer_" + version);
132 
133     {
134       for (byte invalidVersion : new byte[] { HFile.MIN_FORMAT_VERSION - 1,
135           HFile.MAX_FORMAT_VERSION + 1}) {
136         bytes[bytes.length - 1] = invalidVersion;
137         writeTrailer(trailerPath, null, bytes);
138         try {
139           readTrailer(trailerPath);
140           fail("Exception expected");
141         } catch (IllegalArgumentException ex) {
142           // Make it easy to debug this.
143           String msg = ex.getMessage();
144           String cleanMsg = msg.replaceAll(
145               "^(java(\\.[a-zA-Z]+)+:\\s+)?|\\s+\\(.*\\)\\s*$", "");
146           assertEquals("Actual exception message is \"" + msg + "\".\n" +
147               "Cleaned-up message", // will be followed by " expected: ..."
148               "Invalid HFile version: " + invalidVersion, cleanMsg);
149           LOG.info("Got an expected exception: " + msg);
150         }
151       }
152 
153     }
154 
155     // Now write the trailer into a file and auto-detect the version.
156     writeTrailer(trailerPath, t, null);
157 
158     FixedFileTrailer t4 = readTrailer(trailerPath);
159 
160     checkLoadedTrailer(version, t, t4);
161 
162     String trailerStr = t.toString();
163     assertEquals("Invalid number of fields in the string representation "
164         + "of the trailer: " + trailerStr, NUM_FIELDS_BY_VERSION[version - 2],
165         trailerStr.split(", ").length);
166     assertEquals(trailerStr, t4.toString());
167   }
168   
169   @Test
170   public void testTrailerForV2NonPBCompatibility() throws Exception {
171     if (version == 2) {
172       FixedFileTrailer t = new FixedFileTrailer(version,
173           HFileReaderV2.MINOR_VERSION_NO_CHECKSUM);
174       t.setDataIndexCount(3);
175       t.setEntryCount(((long) Integer.MAX_VALUE) + 1);
176       t.setLastDataBlockOffset(291);
177       t.setNumDataIndexLevels(3);
178       t.setComparatorClass(KeyValue.COMPARATOR.getClass());
179       t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic.
180       t.setUncompressedDataIndexSize(827398717L); // Something random.
181       t.setLoadOnOpenOffset(128);
182       t.setMetaIndexCount(7);
183       t.setTotalUncompressedBytes(129731987);
184 
185       {
186         DataOutputStream dos = new DataOutputStream(baos); // Limited scope.
187         serializeAsWritable(dos, t);
188         dos.flush();
189         assertEquals(FixedFileTrailer.getTrailerSize(version), dos.size());
190       }
191 
192       byte[] bytes = baos.toByteArray();
193       baos.reset();
194       assertEquals(bytes.length, FixedFileTrailer.getTrailerSize(version));
195 
196       ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
197       {
198         DataInputStream dis = new DataInputStream(bais);
199         FixedFileTrailer t2 = new FixedFileTrailer(version,
200             HFileReaderV2.MINOR_VERSION_NO_CHECKSUM);
201         t2.deserialize(dis);
202         assertEquals(-1, bais.read()); // Ensure we have read everything.
203         checkLoadedTrailer(version, t, t2);
204       }
205     }
206   }
207 
208   // Copied from FixedFileTrailer for testing the reading part of
209   // FixedFileTrailer of non PB
210   // serialized FFTs.
211   private void serializeAsWritable(DataOutputStream output, FixedFileTrailer fft)
212       throws IOException {
213     BlockType.TRAILER.write(output);
214     output.writeLong(fft.getFileInfoOffset());
215     output.writeLong(fft.getLoadOnOpenDataOffset());
216     output.writeInt(fft.getDataIndexCount());
217     output.writeLong(fft.getUncompressedDataIndexSize());
218     output.writeInt(fft.getMetaIndexCount());
219     output.writeLong(fft.getTotalUncompressedBytes());
220     output.writeLong(fft.getEntryCount());
221     output.writeInt(fft.getCompressionCodec().ordinal());
222     output.writeInt(fft.getNumDataIndexLevels());
223     output.writeLong(fft.getFirstDataBlockOffset());
224     output.writeLong(fft.getLastDataBlockOffset());
225     Bytes.writeStringFixedSize(output, fft.getComparatorClassName(), MAX_COMPARATOR_NAME_LENGTH);
226     output.writeInt(FixedFileTrailer.materializeVersion(fft.getMajorVersion(),
227         fft.getMinorVersion()));
228   }
229  
230 
231   private FixedFileTrailer readTrailer(Path trailerPath) throws IOException {
232     FSDataInputStream fsdis = fs.open(trailerPath);
233     FixedFileTrailer trailerRead = FixedFileTrailer.readFromStream(fsdis,
234         fs.getFileStatus(trailerPath).getLen());
235     fsdis.close();
236     return trailerRead;
237   }
238 
239   private void writeTrailer(Path trailerPath, FixedFileTrailer t,
240       byte[] useBytesInstead) throws IOException {
241     assert (t == null) != (useBytesInstead == null); // Expect one non-null.
242 
243     FSDataOutputStream fsdos = fs.create(trailerPath);
244     fsdos.write(135); // to make deserializer's job less trivial
245     if (useBytesInstead != null) {
246       fsdos.write(useBytesInstead);
247     } else {
248       t.serialize(fsdos);
249     }
250     fsdos.close();
251   }
252 
253   private void checkLoadedTrailer(int version, FixedFileTrailer expected,
254       FixedFileTrailer loaded) throws IOException {
255     assertEquals(version, loaded.getMajorVersion());
256     assertEquals(expected.getDataIndexCount(), loaded.getDataIndexCount());
257 
258     assertEquals(Math.min(expected.getEntryCount(),
259         version == 1 ? Integer.MAX_VALUE : Long.MAX_VALUE),
260         loaded.getEntryCount());
261 
262     if (version == 1) {
263       assertEquals(expected.getFileInfoOffset(), loaded.getFileInfoOffset());
264     }
265 
266     if (version == 2) {
267       assertEquals(expected.getLastDataBlockOffset(),
268           loaded.getLastDataBlockOffset());
269       assertEquals(expected.getNumDataIndexLevels(),
270           loaded.getNumDataIndexLevels());
271       assertEquals(expected.createComparator().getClass().getName(),
272           loaded.createComparator().getClass().getName());
273       assertEquals(expected.getFirstDataBlockOffset(),
274           loaded.getFirstDataBlockOffset());
275       assertTrue(
276           expected.createComparator() instanceof KeyValue.KVComparator);
277       assertEquals(expected.getUncompressedDataIndexSize(),
278           loaded.getUncompressedDataIndexSize());
279     }
280 
281     assertEquals(expected.getLoadOnOpenDataOffset(),
282         loaded.getLoadOnOpenDataOffset());
283     assertEquals(expected.getMetaIndexCount(), loaded.getMetaIndexCount());
284 
285     assertEquals(expected.getTotalUncompressedBytes(),
286         loaded.getTotalUncompressedBytes());
287   }
288 
289 
290 }
291