1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.io.hfile;
19
20 import java.io.ByteArrayInputStream;
21 import java.io.ByteArrayOutputStream;
22 import java.io.DataInputStream;
23 import java.io.DataOutput;
24 import java.io.DataOutputStream;
25 import java.io.IOException;
26 import java.io.InputStream;
27 import java.nio.ByteBuffer;
28 import java.util.concurrent.locks.Lock;
29 import java.util.concurrent.locks.ReentrantLock;
30
31 import org.apache.hadoop.classification.InterfaceAudience;
32 import org.apache.hadoop.fs.FSDataInputStream;
33 import org.apache.hadoop.fs.FSDataOutputStream;
34 import org.apache.hadoop.fs.Path;
35 import org.apache.hadoop.hbase.HConstants;
36 import org.apache.hadoop.hbase.fs.HFileSystem;
37 import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
38 import org.apache.hadoop.hbase.io.compress.Compression;
39 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
40 import org.apache.hadoop.hbase.io.crypto.Encryption;
41 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
42 import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
43 import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;
44 import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
45 import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
46 import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
47 import org.apache.hadoop.hbase.util.Bytes;
48 import org.apache.hadoop.hbase.util.ChecksumType;
49 import org.apache.hadoop.hbase.util.ClassSize;
50 import org.apache.hadoop.hbase.util.CompoundBloomFilter;
51 import org.apache.hadoop.io.IOUtils;
52
53 import com.google.common.base.Preconditions;
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86 @InterfaceAudience.Private
87 public class HFileBlock implements Cacheable {
88
89
90
91
92
93
94 static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3;
95
96 public static final boolean FILL_HEADER = true;
97 public static final boolean DONT_FILL_HEADER = false;
98
99
100
101
102
103 public static final int ENCODED_HEADER_SIZE = HConstants.HFILEBLOCK_HEADER_SIZE
104 + DataBlockEncoding.ID_SIZE;
105
106 static final byte[] DUMMY_HEADER_NO_CHECKSUM =
107 new byte[HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM];
108
109 public static final int BYTE_BUFFER_HEAP_SIZE = (int) ClassSize.estimateBase(
110 ByteBuffer.wrap(new byte[0], 0, 0).getClass(), false);
111
112
113 public static final int EXTRA_SERIALIZATION_SPACE = Bytes.SIZEOF_BYTE + Bytes.SIZEOF_INT
114 + Bytes.SIZEOF_LONG;
115
116
117
118
119 static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT;
120
121 private static final CacheableDeserializer<Cacheable> blockDeserializer =
122 new CacheableDeserializer<Cacheable>() {
123 public HFileBlock deserialize(ByteBuffer buf, boolean reuse) throws IOException{
124 buf.limit(buf.limit() - HFileBlock.EXTRA_SERIALIZATION_SPACE).rewind();
125 ByteBuffer newByteBuffer;
126 if (reuse) {
127 newByteBuffer = buf.slice();
128 } else {
129 newByteBuffer = ByteBuffer.allocate(buf.limit());
130 newByteBuffer.put(buf);
131 }
132 buf.position(buf.limit());
133 buf.limit(buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE);
134 boolean usesChecksum = buf.get() == (byte)1;
135 HFileBlock ourBuffer = new HFileBlock(newByteBuffer, usesChecksum);
136 ourBuffer.offset = buf.getLong();
137 ourBuffer.nextBlockOnDiskSizeWithHeader = buf.getInt();
138 return ourBuffer;
139 }
140
141 @Override
142 public int getDeserialiserIdentifier() {
143 return deserializerIdentifier;
144 }
145
146 @Override
147 public HFileBlock deserialize(ByteBuffer b) throws IOException {
148 return deserialize(b, false);
149 }
150 };
151 private static final int deserializerIdentifier;
152 static {
153 deserializerIdentifier = CacheableDeserializerIdManager
154 .registerDeserializer(blockDeserializer);
155 }
156
157 private BlockType blockType;
158
159
160 private int onDiskSizeWithoutHeader;
161
162
163 private final int uncompressedSizeWithoutHeader;
164
165
166 private final long prevBlockOffset;
167
168
169 private final int onDiskDataSizeWithHeader;
170
171
172 private ByteBuffer buf;
173
174 private HFileContext fileContext;
175
176
177
178
179
180 private long offset = -1;
181
182
183
184
185
186
187 private int nextBlockOnDiskSizeWithHeader = -1;
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213 HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,
214 int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer buf,
215 boolean fillHeader, long offset,
216 int onDiskDataSizeWithHeader, HFileContext fileContext) {
217 this.blockType = blockType;
218 this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;
219 this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;
220 this.prevBlockOffset = prevBlockOffset;
221 this.buf = buf;
222 if (fillHeader)
223 overwriteHeader();
224 this.offset = offset;
225 this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;
226 this.fileContext = fileContext;
227 }
228
229
230
231
232
233
234
235
236
237 HFileBlock(ByteBuffer b, boolean usesHBaseChecksum) throws IOException {
238 b.rewind();
239 blockType = BlockType.read(b);
240 onDiskSizeWithoutHeader = b.getInt();
241 uncompressedSizeWithoutHeader = b.getInt();
242 prevBlockOffset = b.getLong();
243 HFileContextBuilder contextBuilder = new HFileContextBuilder();
244 contextBuilder.withHBaseCheckSum(usesHBaseChecksum);
245 if (usesHBaseChecksum) {
246 contextBuilder.withChecksumType(ChecksumType.codeToType(b.get()));
247 contextBuilder.withBytesPerCheckSum(b.getInt());
248 this.onDiskDataSizeWithHeader = b.getInt();
249 } else {
250 contextBuilder.withChecksumType(ChecksumType.NULL);
251 contextBuilder.withBytesPerCheckSum(0);
252 this.onDiskDataSizeWithHeader = onDiskSizeWithoutHeader +
253 HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;
254 }
255 this.fileContext = contextBuilder.build();
256 buf = b;
257 buf.rewind();
258 }
259
260 public BlockType getBlockType() {
261 return blockType;
262 }
263
264
265 public short getDataBlockEncodingId() {
266 if (blockType != BlockType.ENCODED_DATA) {
267 throw new IllegalArgumentException("Querying encoder ID of a block " +
268 "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);
269 }
270 return buf.getShort(headerSize());
271 }
272
273
274
275
276
277 public int getOnDiskSizeWithHeader() {
278 return onDiskSizeWithoutHeader + headerSize();
279 }
280
281
282
283
284
285
286
287
288
289 public int getOnDiskSizeWithoutHeader() {
290 return onDiskSizeWithoutHeader;
291 }
292
293
294
295
296
297 public int getUncompressedSizeWithoutHeader() {
298 return uncompressedSizeWithoutHeader;
299 }
300
301
302
303
304
305 public long getPrevBlockOffset() {
306 return prevBlockOffset;
307 }
308
309
310
311
312
313 private void overwriteHeader() {
314 buf.rewind();
315 blockType.write(buf);
316 buf.putInt(onDiskSizeWithoutHeader);
317 buf.putInt(uncompressedSizeWithoutHeader);
318 buf.putLong(prevBlockOffset);
319 }
320
321
322
323
324
325
326
327
328 public ByteBuffer getBufferWithoutHeader() {
329 return ByteBuffer.wrap(buf.array(), buf.arrayOffset() + headerSize(),
330 buf.limit() - headerSize() - totalChecksumBytes()).slice();
331 }
332
333
334
335
336
337
338
339
340
341
342 public ByteBuffer getBufferReadOnly() {
343 return ByteBuffer.wrap(buf.array(), buf.arrayOffset(),
344 buf.limit() - totalChecksumBytes()).slice();
345 }
346
347
348
349
350
351
352
353
354 public ByteBuffer getBufferReadOnlyWithHeader() {
355 return ByteBuffer.wrap(buf.array(), buf.arrayOffset(), buf.limit()).slice();
356 }
357
358
359
360
361
362
363
364 ByteBuffer getBufferWithHeader() {
365 ByteBuffer dupBuf = buf.duplicate();
366 dupBuf.rewind();
367 return dupBuf;
368 }
369
370 private void sanityCheckAssertion(long valueFromBuf, long valueFromField,
371 String fieldName) throws IOException {
372 if (valueFromBuf != valueFromField) {
373 throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf
374 + ") is different from that in the field (" + valueFromField + ")");
375 }
376 }
377
378
379
380
381
382
383
384 void sanityCheck() throws IOException {
385 buf.rewind();
386
387 {
388 BlockType blockTypeFromBuf = BlockType.read(buf);
389 if (blockTypeFromBuf != blockType) {
390 throw new IOException("Block type stored in the buffer: " +
391 blockTypeFromBuf + ", block type field: " + blockType);
392 }
393 }
394
395 sanityCheckAssertion(buf.getInt(), onDiskSizeWithoutHeader,
396 "onDiskSizeWithoutHeader");
397
398 sanityCheckAssertion(buf.getInt(), uncompressedSizeWithoutHeader,
399 "uncompressedSizeWithoutHeader");
400
401 sanityCheckAssertion(buf.getLong(), prevBlockOffset, "prevBlocKOffset");
402 if (this.fileContext.isUseHBaseChecksum()) {
403 sanityCheckAssertion(buf.get(), this.fileContext.getChecksumType().getCode(), "checksumType");
404 sanityCheckAssertion(buf.getInt(), this.fileContext.getBytesPerChecksum(), "bytesPerChecksum");
405 sanityCheckAssertion(buf.getInt(), onDiskDataSizeWithHeader,
406 "onDiskDataSizeWithHeader");
407 }
408
409 int cksumBytes = totalChecksumBytes();
410 int hdrSize = headerSize();
411 int expectedBufLimit = uncompressedSizeWithoutHeader + headerSize() +
412 cksumBytes;
413 if (buf.limit() != expectedBufLimit) {
414 throw new AssertionError("Expected buffer limit " + expectedBufLimit
415 + ", got " + buf.limit());
416 }
417
418
419
420 int size = uncompressedSizeWithoutHeader + hdrSize + cksumBytes;
421 if (buf.capacity() != size &&
422 buf.capacity() != size + hdrSize) {
423 throw new AssertionError("Invalid buffer capacity: " + buf.capacity() +
424 ", expected " + size + " or " + (size + hdrSize));
425 }
426 }
427
428 @Override
429 public String toString() {
430 return "blockType="
431 + blockType
432 + ", onDiskSizeWithoutHeader="
433 + onDiskSizeWithoutHeader
434 + ", uncompressedSizeWithoutHeader="
435 + uncompressedSizeWithoutHeader
436 + ", prevBlockOffset="
437 + prevBlockOffset
438 + ", dataBeginsWith="
439 + Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),
440 Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()))
441 + ", fileOffset=" + offset;
442 }
443
444 private void validateOnDiskSizeWithoutHeader(
445 int expectedOnDiskSizeWithoutHeader) throws IOException {
446 if (onDiskSizeWithoutHeader != expectedOnDiskSizeWithoutHeader) {
447 String blockInfoMsg =
448 "Block offset: " + offset + ", data starts with: "
449 + Bytes.toStringBinary(buf.array(), buf.arrayOffset(),
450 buf.arrayOffset() + Math.min(32, buf.limit()));
451 throw new IOException("On-disk size without header provided is "
452 + expectedOnDiskSizeWithoutHeader + ", but block "
453 + "header contains " + onDiskSizeWithoutHeader + ". " +
454 blockInfoMsg);
455 }
456 }
457
458
459
460
461
462
463
464
465
466 private void allocateBuffer(boolean extraBytes) {
467 int cksumBytes = totalChecksumBytes();
468 int capacityNeeded = headerSize() + uncompressedSizeWithoutHeader +
469 cksumBytes +
470 (extraBytes ? headerSize() : 0);
471
472 ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);
473
474
475 System.arraycopy(buf.array(), buf.arrayOffset(), newBuf.array(),
476 newBuf.arrayOffset(), headerSize());
477
478 buf = newBuf;
479 buf.limit(headerSize() + uncompressedSizeWithoutHeader + cksumBytes);
480 }
481
482
483 public void assumeUncompressed() throws IOException {
484 if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader +
485 totalChecksumBytes()) {
486 throw new IOException("Using no compression but "
487 + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "
488 + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader
489 + ", numChecksumbytes=" + totalChecksumBytes());
490 }
491 }
492
493
494
495
496
497 public void expectType(BlockType expectedType) throws IOException {
498 if (blockType != expectedType) {
499 throw new IOException("Invalid block type: expected=" + expectedType
500 + ", actual=" + blockType);
501 }
502 }
503
504
505 public long getOffset() {
506 if (offset < 0) {
507 throw new IllegalStateException(
508 "HFile block offset not initialized properly");
509 }
510 return offset;
511 }
512
513
514
515
516 public DataInputStream getByteStream() {
517 return new DataInputStream(new ByteArrayInputStream(buf.array(),
518 buf.arrayOffset() + headerSize(), buf.limit() - headerSize()));
519 }
520
521 @Override
522 public long heapSize() {
523 long size = ClassSize.align(
524 ClassSize.OBJECT +
525
526 3 * ClassSize.REFERENCE +
527
528
529 4 * Bytes.SIZEOF_INT +
530
531 2 * Bytes.SIZEOF_LONG +
532
533 fileContext.heapSize()
534 );
535
536 if (buf != null) {
537
538 size += ClassSize.align(buf.capacity() + BYTE_BUFFER_HEAP_SIZE);
539 }
540
541 return ClassSize.align(size);
542 }
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559 public static boolean readWithExtra(InputStream in, byte buf[],
560 int bufOffset, int necessaryLen, int extraLen) throws IOException {
561 int bytesRemaining = necessaryLen + extraLen;
562 while (bytesRemaining > 0) {
563 int ret = in.read(buf, bufOffset, bytesRemaining);
564 if (ret == -1 && bytesRemaining <= extraLen) {
565
566 break;
567 }
568
569 if (ret < 0) {
570 throw new IOException("Premature EOF from inputStream (read "
571 + "returned " + ret + ", was trying to read " + necessaryLen
572 + " necessary bytes and " + extraLen + " extra bytes, "
573 + "successfully read "
574 + (necessaryLen + extraLen - bytesRemaining));
575 }
576 bufOffset += ret;
577 bytesRemaining -= ret;
578 }
579 return bytesRemaining <= 0;
580 }
581
582
583
584
585
586 public int getNextBlockOnDiskSizeWithHeader() {
587 return nextBlockOnDiskSizeWithHeader;
588 }
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604 public static class Writer {
605
606 private enum State {
607 INIT,
608 WRITING,
609 BLOCK_READY
610 };
611
612
613 private State state = State.INIT;
614
615
616 private final HFileDataBlockEncoder dataBlockEncoder;
617
618 private HFileBlockEncodingContext dataBlockEncodingCtx;
619
620
621 private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;
622
623
624
625
626
627
628
629 private ByteArrayOutputStream baosInMemory;
630
631
632
633
634
635
636 private BlockType blockType;
637
638
639
640
641
642 private DataOutputStream userDataStream;
643
644
645
646
647
648
649 private byte[] onDiskBytesWithHeader;
650
651
652
653
654
655
656
657 private byte[] onDiskChecksum;
658
659
660
661
662
663
664
665 private byte[] uncompressedBytesWithHeader;
666
667
668
669
670
671 private long startOffset;
672
673
674
675
676
677 private long[] prevOffsetByType;
678
679
680 private long prevOffset;
681
682 private HFileContext fileContext;
683
684
685
686
687 public Writer(HFileDataBlockEncoder dataBlockEncoder, HFileContext fileContext) {
688 this.dataBlockEncoder = dataBlockEncoder != null
689 ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE;
690 defaultBlockEncodingCtx = new HFileBlockDefaultEncodingContext(null,
691 HConstants.HFILEBLOCK_DUMMY_HEADER, fileContext);
692 dataBlockEncodingCtx = this.dataBlockEncoder
693 .newDataBlockEncodingContext(HConstants.HFILEBLOCK_DUMMY_HEADER, fileContext);
694
695 if (fileContext.getBytesPerChecksum() < HConstants.HFILEBLOCK_HEADER_SIZE) {
696 throw new RuntimeException("Unsupported value of bytesPerChecksum. " +
697 " Minimum is " + HConstants.HFILEBLOCK_HEADER_SIZE + " but the configured value is " +
698 fileContext.getBytesPerChecksum());
699 }
700
701 baosInMemory = new ByteArrayOutputStream();
702
703 prevOffsetByType = new long[BlockType.values().length];
704 for (int i = 0; i < prevOffsetByType.length; ++i)
705 prevOffsetByType[i] = -1;
706
707 this.fileContext = fileContext;
708 }
709
710
711
712
713
714
715
716 public DataOutputStream startWriting(BlockType newBlockType)
717 throws IOException {
718 if (state == State.BLOCK_READY && startOffset != -1) {
719
720
721 prevOffsetByType[blockType.getId()] = startOffset;
722 }
723
724 startOffset = -1;
725 blockType = newBlockType;
726
727 baosInMemory.reset();
728 baosInMemory.write(HConstants.HFILEBLOCK_DUMMY_HEADER);
729
730 state = State.WRITING;
731
732
733 userDataStream = new DataOutputStream(baosInMemory);
734 return userDataStream;
735 }
736
737
738
739
740
741
742
743
744 DataOutputStream getUserDataStream() {
745 expectState(State.WRITING);
746 return userDataStream;
747 }
748
749
750
751
752
753 private void ensureBlockReady() throws IOException {
754 Preconditions.checkState(state != State.INIT,
755 "Unexpected state: " + state);
756
757 if (state == State.BLOCK_READY)
758 return;
759
760
761 finishBlock();
762 }
763
764
765
766
767
768
769
770 private void finishBlock() throws IOException {
771 userDataStream.flush();
772
773 uncompressedBytesWithHeader = baosInMemory.toByteArray();
774 prevOffset = prevOffsetByType[blockType.getId()];
775
776
777
778
779 state = State.BLOCK_READY;
780 if (blockType == BlockType.DATA) {
781 encodeDataBlockForDisk();
782 } else {
783 defaultBlockEncodingCtx.compressAfterEncodingWithBlockType(
784 uncompressedBytesWithHeader, blockType);
785 onDiskBytesWithHeader =
786 defaultBlockEncodingCtx.getOnDiskBytesWithHeader();
787 }
788
789 int numBytes = (int) ChecksumUtil.numBytes(
790 onDiskBytesWithHeader.length,
791 fileContext.getBytesPerChecksum());
792
793
794 putHeader(onDiskBytesWithHeader, 0,
795 onDiskBytesWithHeader.length + numBytes,
796 uncompressedBytesWithHeader.length, onDiskBytesWithHeader.length);
797
798 putHeader(uncompressedBytesWithHeader, 0,
799 onDiskBytesWithHeader.length + numBytes,
800 uncompressedBytesWithHeader.length, onDiskBytesWithHeader.length);
801
802 onDiskChecksum = new byte[numBytes];
803 ChecksumUtil.generateChecksums(
804 onDiskBytesWithHeader, 0, onDiskBytesWithHeader.length,
805 onDiskChecksum, 0, fileContext.getChecksumType(), fileContext.getBytesPerChecksum());
806 }
807
808
809
810
811
812 private void encodeDataBlockForDisk() throws IOException {
813
814 ByteBuffer rawKeyValues =
815 ByteBuffer.wrap(uncompressedBytesWithHeader, HConstants.HFILEBLOCK_HEADER_SIZE,
816 uncompressedBytesWithHeader.length - HConstants.HFILEBLOCK_HEADER_SIZE).slice();
817
818
819 dataBlockEncoder.beforeWriteToDisk(rawKeyValues, dataBlockEncodingCtx, blockType);
820
821 uncompressedBytesWithHeader =
822 dataBlockEncodingCtx.getUncompressedBytesWithHeader();
823 onDiskBytesWithHeader =
824 dataBlockEncodingCtx.getOnDiskBytesWithHeader();
825 blockType = dataBlockEncodingCtx.getBlockType();
826 }
827
828
829
830
831
832
833
834
835
836 private void putHeader(byte[] dest, int offset, int onDiskSize,
837 int uncompressedSize, int onDiskDataSize) {
838 offset = blockType.put(dest, offset);
839 offset = Bytes.putInt(dest, offset, onDiskSize - HConstants.HFILEBLOCK_HEADER_SIZE);
840 offset = Bytes.putInt(dest, offset, uncompressedSize - HConstants.HFILEBLOCK_HEADER_SIZE);
841 offset = Bytes.putLong(dest, offset, prevOffset);
842 offset = Bytes.putByte(dest, offset, fileContext.getChecksumType().getCode());
843 offset = Bytes.putInt(dest, offset, fileContext.getBytesPerChecksum());
844 Bytes.putInt(dest, offset, onDiskDataSize);
845 }
846
847
848
849
850
851
852
853
854
855 public void writeHeaderAndData(FSDataOutputStream out) throws IOException {
856 long offset = out.getPos();
857 if (startOffset != -1 && offset != startOffset) {
858 throw new IOException("A " + blockType + " block written to a "
859 + "stream twice, first at offset " + startOffset + ", then at "
860 + offset);
861 }
862 startOffset = offset;
863
864 finishBlockAndWriteHeaderAndData((DataOutputStream) out);
865 }
866
867
868
869
870
871
872
873
874
875
876 private void finishBlockAndWriteHeaderAndData(DataOutputStream out)
877 throws IOException {
878 ensureBlockReady();
879 out.write(onDiskBytesWithHeader);
880 out.write(onDiskChecksum);
881 }
882
883
884
885
886
887
888
889
890
891
892
893 byte[] getHeaderAndDataForTest() throws IOException {
894 ensureBlockReady();
895
896
897 byte[] output =
898 new byte[onDiskBytesWithHeader.length
899 + onDiskChecksum.length];
900 System.arraycopy(onDiskBytesWithHeader, 0, output, 0,
901 onDiskBytesWithHeader.length);
902 System.arraycopy(onDiskChecksum, 0, output,
903 onDiskBytesWithHeader.length, onDiskChecksum.length);
904 return output;
905 }
906
907
908
909
910 public void release() {
911 if (dataBlockEncodingCtx != null) {
912 dataBlockEncodingCtx.close();
913 dataBlockEncodingCtx = null;
914 }
915 if (defaultBlockEncodingCtx != null) {
916 defaultBlockEncodingCtx.close();
917 defaultBlockEncodingCtx = null;
918 }
919 }
920
921
922
923
924
925
926
927
928
929 int getOnDiskSizeWithoutHeader() {
930 expectState(State.BLOCK_READY);
931 return onDiskBytesWithHeader.length + onDiskChecksum.length - HConstants.HFILEBLOCK_HEADER_SIZE;
932 }
933
934
935
936
937
938
939
940
941 int getOnDiskSizeWithHeader() {
942 expectState(State.BLOCK_READY);
943 return onDiskBytesWithHeader.length + onDiskChecksum.length;
944 }
945
946
947
948
949 int getUncompressedSizeWithoutHeader() {
950 expectState(State.BLOCK_READY);
951 return uncompressedBytesWithHeader.length - HConstants.HFILEBLOCK_HEADER_SIZE;
952 }
953
954
955
956
957 int getUncompressedSizeWithHeader() {
958 expectState(State.BLOCK_READY);
959 return uncompressedBytesWithHeader.length;
960 }
961
962
963 public boolean isWriting() {
964 return state == State.WRITING;
965 }
966
967
968
969
970
971
972
973
974 public int blockSizeWritten() {
975 if (state != State.WRITING)
976 return 0;
977 return userDataStream.size();
978 }
979
980
981
982
983
984
985
986
987
988 ByteBuffer getUncompressedBufferWithHeader() {
989 expectState(State.BLOCK_READY);
990 return ByteBuffer.wrap(uncompressedBytesWithHeader);
991 }
992
993 private void expectState(State expectedState) {
994 if (state != expectedState) {
995 throw new IllegalStateException("Expected state: " + expectedState +
996 ", actual state: " + state);
997 }
998 }
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010 public void writeBlock(BlockWritable bw, FSDataOutputStream out)
1011 throws IOException {
1012 bw.writeToBlock(startWriting(bw.getBlockType()));
1013 writeHeaderAndData(out);
1014 }
1015
1016
1017
1018
1019
1020
1021
1022
1023 public HFileBlock getBlockForCaching() {
1024 HFileContext newContext = new HFileContextBuilder()
1025 .withBlockSize(fileContext.getBlocksize())
1026 .withBytesPerCheckSum(0)
1027 .withChecksumType(ChecksumType.NULL)
1028 .withCompression(fileContext.getCompression())
1029 .withDataBlockEncoding(fileContext.getDataBlockEncoding())
1030 .withHBaseCheckSum(fileContext.isUseHBaseChecksum())
1031 .withCompressTags(fileContext.isCompressTags())
1032 .withIncludesMvcc(fileContext.isIncludesMvcc())
1033 .withIncludesTags(fileContext.isIncludesTags())
1034 .build();
1035 return new HFileBlock(blockType, getOnDiskSizeWithoutHeader(),
1036 getUncompressedSizeWithoutHeader(), prevOffset, getUncompressedBufferWithHeader(),
1037 DONT_FILL_HEADER, startOffset,
1038 onDiskBytesWithHeader.length + onDiskChecksum.length, newContext);
1039 }
1040 }
1041
1042
1043 public interface BlockWritable {
1044
1045
1046 BlockType getBlockType();
1047
1048
1049
1050
1051
1052
1053
1054 void writeToBlock(DataOutput out) throws IOException;
1055 }
1056
1057
1058
1059
1060 public interface BlockIterator {
1061
1062
1063
1064
1065 HFileBlock nextBlock() throws IOException;
1066
1067
1068
1069
1070
1071 HFileBlock nextBlockWithBlockType(BlockType blockType) throws IOException;
1072 }
1073
1074
1075 public interface FSReader {
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088 HFileBlock readBlockData(long offset, long onDiskSize,
1089 int uncompressedSize, boolean pread) throws IOException;
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100 BlockIterator blockRange(long startOffset, long endOffset);
1101
1102
1103 void closeStreams() throws IOException;
1104 }
1105
1106
1107
1108
1109
1110 private abstract static class AbstractFSReader implements FSReader {
1111
1112
1113
1114 protected long fileSize;
1115
1116
1117 protected final int hdrSize;
1118
1119
1120 protected HFileSystem hfs;
1121
1122
1123 protected Path path;
1124
1125 private final Lock streamLock = new ReentrantLock();
1126
1127
1128 public static final int DEFAULT_BUFFER_SIZE = 1 << 20;
1129
1130 protected HFileContext fileContext;
1131
1132 public AbstractFSReader(long fileSize, HFileSystem hfs, Path path, HFileContext fileContext)
1133 throws IOException {
1134 this.fileSize = fileSize;
1135 this.hfs = hfs;
1136 this.path = path;
1137 this.fileContext = fileContext;
1138 this.hdrSize = headerSize(fileContext.isUseHBaseChecksum());
1139 }
1140
1141 @Override
1142 public BlockIterator blockRange(final long startOffset,
1143 final long endOffset) {
1144 return new BlockIterator() {
1145 private long offset = startOffset;
1146
1147 @Override
1148 public HFileBlock nextBlock() throws IOException {
1149 if (offset >= endOffset)
1150 return null;
1151 HFileBlock b = readBlockData(offset, -1, -1, false);
1152 offset += b.getOnDiskSizeWithHeader();
1153 return b;
1154 }
1155
1156 @Override
1157 public HFileBlock nextBlockWithBlockType(BlockType blockType)
1158 throws IOException {
1159 HFileBlock blk = nextBlock();
1160 if (blk.getBlockType() != blockType) {
1161 throw new IOException("Expected block of type " + blockType
1162 + " but found " + blk.getBlockType());
1163 }
1164 return blk;
1165 }
1166 };
1167 }
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184 protected int readAtOffset(FSDataInputStream istream,
1185 byte[] dest, int destOffset, int size,
1186 boolean peekIntoNextBlock, long fileOffset, boolean pread)
1187 throws IOException {
1188 if (peekIntoNextBlock &&
1189 destOffset + size + hdrSize > dest.length) {
1190
1191
1192 throw new IOException("Attempted to read " + size + " bytes and " +
1193 hdrSize + " bytes of next header into a " + dest.length +
1194 "-byte array at offset " + destOffset);
1195 }
1196
1197 if (!pread && streamLock.tryLock()) {
1198
1199 try {
1200 istream.seek(fileOffset);
1201
1202 long realOffset = istream.getPos();
1203 if (realOffset != fileOffset) {
1204 throw new IOException("Tried to seek to " + fileOffset + " to "
1205 + "read " + size + " bytes, but pos=" + realOffset
1206 + " after seek");
1207 }
1208
1209 if (!peekIntoNextBlock) {
1210 IOUtils.readFully(istream, dest, destOffset, size);
1211 return -1;
1212 }
1213
1214
1215 if (!readWithExtra(istream, dest, destOffset, size, hdrSize))
1216 return -1;
1217 } finally {
1218 streamLock.unlock();
1219 }
1220 } else {
1221
1222 int extraSize = peekIntoNextBlock ? hdrSize : 0;
1223 int ret = istream.read(fileOffset, dest, destOffset, size + extraSize);
1224 if (ret < size) {
1225 throw new IOException("Positional read of " + size + " bytes " +
1226 "failed at offset " + fileOffset + " (returned " + ret + ")");
1227 }
1228
1229 if (ret == size || ret < size + extraSize) {
1230
1231 return -1;
1232 }
1233 }
1234
1235 assert peekIntoNextBlock;
1236 return Bytes.toInt(dest, destOffset + size + BlockType.MAGIC_LENGTH) + hdrSize;
1237 }
1238
1239 }
1240
1241
1242
1243
1244
1245 private static class PrefetchedHeader {
1246 long offset = -1;
1247 byte[] header = new byte[HConstants.HFILEBLOCK_HEADER_SIZE];
1248 ByteBuffer buf = ByteBuffer.wrap(header, 0, HConstants.HFILEBLOCK_HEADER_SIZE);
1249 }
1250
1251
1252 static class FSReaderV2 extends AbstractFSReader {
1253
1254
1255 protected FSDataInputStreamWrapper streamWrapper;
1256
1257 private HFileBlockDecodingContext encodedBlockDecodingCtx;
1258
1259 private HFileBlockDefaultDecodingContext defaultDecodingCtx;
1260
1261 private ThreadLocal<PrefetchedHeader> prefetchedHeaderForThread =
1262 new ThreadLocal<PrefetchedHeader>() {
1263 @Override
1264 public PrefetchedHeader initialValue() {
1265 return new PrefetchedHeader();
1266 }
1267 };
1268
1269 public FSReaderV2(FSDataInputStreamWrapper stream, long fileSize, HFileSystem hfs, Path path,
1270 HFileContext fileContext) throws IOException {
1271 super(fileSize, hfs, path, fileContext);
1272 this.streamWrapper = stream;
1273
1274 this.streamWrapper.prepareForBlockReader(!fileContext.isUseHBaseChecksum());
1275 defaultDecodingCtx =
1276 new HFileBlockDefaultDecodingContext(fileContext);
1277 encodedBlockDecodingCtx =
1278 new HFileBlockDefaultDecodingContext(fileContext);
1279 }
1280
1281
1282
1283
1284
1285 FSReaderV2(FSDataInputStream istream, long fileSize, HFileContext fileContext) throws IOException {
1286 this(new FSDataInputStreamWrapper(istream), fileSize, null, null, fileContext);
1287 }
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300 @Override
1301 public HFileBlock readBlockData(long offset, long onDiskSizeWithHeaderL,
1302 int uncompressedSize, boolean pread) throws IOException {
1303
1304
1305
1306
1307
1308
1309 boolean doVerificationThruHBaseChecksum = streamWrapper.shouldUseHBaseChecksum();
1310 FSDataInputStream is = streamWrapper.getStream(doVerificationThruHBaseChecksum);
1311
1312 HFileBlock blk = readBlockDataInternal(is, offset,
1313 onDiskSizeWithHeaderL,
1314 uncompressedSize, pread,
1315 doVerificationThruHBaseChecksum);
1316 if (blk == null) {
1317 HFile.LOG.warn("HBase checksum verification failed for file " +
1318 path + " at offset " +
1319 offset + " filesize " + fileSize +
1320 ". Retrying read with HDFS checksums turned on...");
1321
1322 if (!doVerificationThruHBaseChecksum) {
1323 String msg = "HBase checksum verification failed for file " +
1324 path + " at offset " +
1325 offset + " filesize " + fileSize +
1326 " but this cannot happen because doVerify is " +
1327 doVerificationThruHBaseChecksum;
1328 HFile.LOG.warn(msg);
1329 throw new IOException(msg);
1330 }
1331 HFile.checksumFailures.incrementAndGet();
1332
1333
1334
1335
1336
1337
1338
1339 is = this.streamWrapper.fallbackToFsChecksum(CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD);
1340 doVerificationThruHBaseChecksum = false;
1341 blk = readBlockDataInternal(is, offset, onDiskSizeWithHeaderL,
1342 uncompressedSize, pread,
1343 doVerificationThruHBaseChecksum);
1344 if (blk != null) {
1345 HFile.LOG.warn("HDFS checksum verification suceeded for file " +
1346 path + " at offset " +
1347 offset + " filesize " + fileSize);
1348 }
1349 }
1350 if (blk == null && !doVerificationThruHBaseChecksum) {
1351 String msg = "readBlockData failed, possibly due to " +
1352 "checksum verification failed for file " + path +
1353 " at offset " + offset + " filesize " + fileSize;
1354 HFile.LOG.warn(msg);
1355 throw new IOException(msg);
1356 }
1357
1358
1359
1360
1361
1362
1363
1364
1365 streamWrapper.checksumOk();
1366 return blk;
1367 }
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382 private HFileBlock readBlockDataInternal(FSDataInputStream is, long offset,
1383 long onDiskSizeWithHeaderL, int uncompressedSize, boolean pread,
1384 boolean verifyChecksum) throws IOException {
1385 if (offset < 0) {
1386 throw new IOException("Invalid offset=" + offset + " trying to read "
1387 + "block (onDiskSize=" + onDiskSizeWithHeaderL
1388 + ", uncompressedSize=" + uncompressedSize + ")");
1389 }
1390 if (uncompressedSize != -1) {
1391 throw new IOException("Version 2 block reader API does not need " +
1392 "the uncompressed size parameter");
1393 }
1394
1395 if ((onDiskSizeWithHeaderL < hdrSize && onDiskSizeWithHeaderL != -1)
1396 || onDiskSizeWithHeaderL >= Integer.MAX_VALUE) {
1397 throw new IOException("Invalid onDisksize=" + onDiskSizeWithHeaderL
1398 + ": expected to be at least " + hdrSize
1399 + " and at most " + Integer.MAX_VALUE + ", or -1 (offset="
1400 + offset + ", uncompressedSize=" + uncompressedSize + ")");
1401 }
1402
1403 int onDiskSizeWithHeader = (int) onDiskSizeWithHeaderL;
1404
1405
1406
1407
1408
1409 PrefetchedHeader prefetchedHeader = prefetchedHeaderForThread.get();
1410 ByteBuffer headerBuf = prefetchedHeader.offset == offset ?
1411 prefetchedHeader.buf : null;
1412
1413 int nextBlockOnDiskSize = 0;
1414
1415 byte[] onDiskBlock = null;
1416
1417 HFileBlock b = null;
1418 if (onDiskSizeWithHeader > 0) {
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428 int preReadHeaderSize = headerBuf == null ? 0 : hdrSize;
1429 onDiskBlock = new byte[onDiskSizeWithHeader + hdrSize];
1430 nextBlockOnDiskSize = readAtOffset(is, onDiskBlock,
1431 preReadHeaderSize, onDiskSizeWithHeader - preReadHeaderSize,
1432 true, offset + preReadHeaderSize, pread);
1433 if (headerBuf != null) {
1434
1435
1436 System.arraycopy(headerBuf.array(),
1437 headerBuf.arrayOffset(), onDiskBlock, 0, hdrSize);
1438 } else {
1439 headerBuf = ByteBuffer.wrap(onDiskBlock, 0, hdrSize);
1440 }
1441
1442
1443
1444
1445 try {
1446 b = new HFileBlock(headerBuf, this.fileContext.isUseHBaseChecksum());
1447 } catch (IOException ex) {
1448
1449 throw new IOException("Failed to read compressed block at "
1450 + offset
1451 + ", onDiskSizeWithoutHeader="
1452 + onDiskSizeWithHeader
1453 + ", preReadHeaderSize="
1454 + hdrSize
1455 + ", header.length="
1456 + prefetchedHeader.header.length
1457 + ", header bytes: "
1458 + Bytes.toStringBinary(prefetchedHeader.header, 0,
1459 hdrSize), ex);
1460 }
1461
1462 int onDiskSizeWithoutHeader = onDiskSizeWithHeader - hdrSize;
1463 assert onDiskSizeWithoutHeader >= 0;
1464 b.validateOnDiskSizeWithoutHeader(onDiskSizeWithoutHeader);
1465 } else {
1466
1467
1468
1469
1470
1471
1472
1473
1474 if (headerBuf == null) {
1475
1476
1477
1478
1479
1480 headerBuf = ByteBuffer.allocate(hdrSize);
1481 readAtOffset(is, headerBuf.array(), headerBuf.arrayOffset(),
1482 hdrSize, false, offset, pread);
1483 }
1484 b = new HFileBlock(headerBuf, this.fileContext.isUseHBaseChecksum());
1485 onDiskBlock = new byte[b.getOnDiskSizeWithHeader() + hdrSize];
1486 System.arraycopy(headerBuf.array(),
1487 headerBuf.arrayOffset(), onDiskBlock, 0, hdrSize);
1488 nextBlockOnDiskSize =
1489 readAtOffset(is, onDiskBlock, hdrSize, b.getOnDiskSizeWithHeader()
1490 - hdrSize, true, offset + hdrSize, pread);
1491 onDiskSizeWithHeader = b.onDiskSizeWithoutHeader + hdrSize;
1492 }
1493
1494 Algorithm compressAlgo = fileContext.getCompression();
1495 boolean isCompressed =
1496 compressAlgo != null
1497 && compressAlgo != Compression.Algorithm.NONE;
1498
1499 Encryption.Context cryptoContext = fileContext.getEncryptionContext();
1500 boolean isEncrypted = cryptoContext != null
1501 && cryptoContext != Encryption.Context.NONE;
1502
1503 if (!isCompressed && !isEncrypted) {
1504 b.assumeUncompressed();
1505 }
1506
1507 if (verifyChecksum &&
1508 !validateBlockChecksum(b, onDiskBlock, hdrSize)) {
1509 return null;
1510 }
1511
1512 if (isCompressed || isEncrypted) {
1513
1514 b.allocateBuffer(nextBlockOnDiskSize > 0);
1515 if (b.blockType == BlockType.ENCODED_DATA) {
1516 encodedBlockDecodingCtx.prepareDecoding(b.getOnDiskSizeWithoutHeader(),
1517 b.getUncompressedSizeWithoutHeader(), b.getBufferWithoutHeader(), onDiskBlock,
1518 hdrSize);
1519 } else {
1520 defaultDecodingCtx.prepareDecoding(b.getOnDiskSizeWithoutHeader(),
1521 b.getUncompressedSizeWithoutHeader(), b.getBufferWithoutHeader(), onDiskBlock,
1522 hdrSize);
1523 }
1524 if (nextBlockOnDiskSize > 0) {
1525
1526 System.arraycopy(onDiskBlock, onDiskSizeWithHeader, b.buf.array(),
1527 b.buf.arrayOffset() + hdrSize
1528 + b.uncompressedSizeWithoutHeader + b.totalChecksumBytes(),
1529 hdrSize);
1530 }
1531 } else {
1532
1533
1534
1535
1536 b = new HFileBlock(ByteBuffer.wrap(onDiskBlock, 0,
1537 onDiskSizeWithHeader), this.fileContext.isUseHBaseChecksum());
1538 }
1539
1540 b.nextBlockOnDiskSizeWithHeader = nextBlockOnDiskSize;
1541
1542
1543 if (b.nextBlockOnDiskSizeWithHeader > 0) {
1544 prefetchedHeader.offset = offset + b.getOnDiskSizeWithHeader();
1545 System.arraycopy(onDiskBlock, onDiskSizeWithHeader,
1546 prefetchedHeader.header, 0, hdrSize);
1547 }
1548
1549 b.offset = offset;
1550 b.fileContext.setIncludesTags(this.fileContext.isIncludesTags());
1551 b.fileContext.setIncludesMvcc(this.fileContext.isIncludesMvcc());
1552 return b;
1553 }
1554
1555 void setIncludesMemstoreTS(boolean includesMemstoreTS) {
1556 this.fileContext.setIncludesMvcc(includesMemstoreTS);
1557 }
1558
1559 void setDataBlockEncoder(HFileDataBlockEncoder encoder) {
1560 encodedBlockDecodingCtx = encoder.newDataBlockDecodingContext(this.fileContext);
1561 }
1562
1563
1564
1565
1566
1567
1568
1569 protected boolean validateBlockChecksum(HFileBlock block,
1570 byte[] data, int hdrSize) throws IOException {
1571 return ChecksumUtil.validateBlockChecksum(path, block,
1572 data, hdrSize);
1573 }
1574
1575 @Override
1576 public void closeStreams() throws IOException {
1577 streamWrapper.close();
1578 }
1579 }
1580
1581 @Override
1582 public int getSerializedLength() {
1583 if (buf != null) {
1584 return this.buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE;
1585 }
1586 return 0;
1587 }
1588
1589 @Override
1590 public void serialize(ByteBuffer destination) {
1591 ByteBuffer dupBuf = this.buf.duplicate();
1592 dupBuf.rewind();
1593 destination.put(dupBuf);
1594 serializeExtraInfo(destination);
1595 }
1596
1597 public void serializeExtraInfo(ByteBuffer destination) {
1598 destination.put(this.fileContext.isUseHBaseChecksum() ? (byte) 1 : (byte) 0);
1599 destination.putLong(this.offset);
1600 destination.putInt(this.nextBlockOnDiskSizeWithHeader);
1601 destination.rewind();
1602 }
1603
1604 @Override
1605 public CacheableDeserializer<Cacheable> getDeserializer() {
1606 return HFileBlock.blockDeserializer;
1607 }
1608
1609 @Override
1610 public boolean equals(Object comparison) {
1611 if (this == comparison) {
1612 return true;
1613 }
1614 if (comparison == null) {
1615 return false;
1616 }
1617 if (comparison.getClass() != this.getClass()) {
1618 return false;
1619 }
1620
1621 HFileBlock castedComparison = (HFileBlock) comparison;
1622
1623 if (castedComparison.blockType != this.blockType) {
1624 return false;
1625 }
1626 if (castedComparison.nextBlockOnDiskSizeWithHeader != this.nextBlockOnDiskSizeWithHeader) {
1627 return false;
1628 }
1629 if (castedComparison.offset != this.offset) {
1630 return false;
1631 }
1632 if (castedComparison.onDiskSizeWithoutHeader != this.onDiskSizeWithoutHeader) {
1633 return false;
1634 }
1635 if (castedComparison.prevBlockOffset != this.prevBlockOffset) {
1636 return false;
1637 }
1638 if (castedComparison.uncompressedSizeWithoutHeader != this.uncompressedSizeWithoutHeader) {
1639 return false;
1640 }
1641 if (this.buf.compareTo(castedComparison.buf) != 0) {
1642 return false;
1643 }
1644 if (this.buf.position() != castedComparison.buf.position()){
1645 return false;
1646 }
1647 if (this.buf.limit() != castedComparison.buf.limit()){
1648 return false;
1649 }
1650 return true;
1651 }
1652
1653 public DataBlockEncoding getDataBlockEncoding() {
1654 if (blockType == BlockType.ENCODED_DATA) {
1655 return DataBlockEncoding.getEncodingById(getDataBlockEncodingId());
1656 }
1657 return DataBlockEncoding.NONE;
1658 }
1659
1660 byte getChecksumType() {
1661 return this.fileContext.getChecksumType().getCode();
1662 }
1663
1664 int getBytesPerChecksum() {
1665 return this.fileContext.getBytesPerChecksum();
1666 }
1667
1668 int getOnDiskDataSizeWithHeader() {
1669 return this.onDiskDataSizeWithHeader;
1670 }
1671
1672
1673
1674
1675
1676 int totalChecksumBytes() {
1677
1678
1679
1680
1681 if (!fileContext.isUseHBaseChecksum() || this.fileContext.getBytesPerChecksum() == 0) {
1682 return 0;
1683 }
1684 return (int)ChecksumUtil.numBytes(onDiskDataSizeWithHeader, this.fileContext.getBytesPerChecksum());
1685 }
1686
1687
1688
1689
1690 public int headerSize() {
1691 return headerSize(this.fileContext.isUseHBaseChecksum());
1692 }
1693
1694
1695
1696
1697 public static int headerSize(boolean usesHBaseChecksum) {
1698 if (usesHBaseChecksum) {
1699 return HConstants.HFILEBLOCK_HEADER_SIZE;
1700 }
1701 return HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;
1702 }
1703
1704
1705
1706
1707 public byte[] getDummyHeaderForVersion() {
1708 return getDummyHeaderForVersion(this.fileContext.isUseHBaseChecksum());
1709 }
1710
1711
1712
1713
1714 static private byte[] getDummyHeaderForVersion(boolean usesHBaseChecksum) {
1715 if (usesHBaseChecksum) {
1716 return HConstants.HFILEBLOCK_DUMMY_HEADER;
1717 }
1718 return DUMMY_HEADER_NO_CHECKSUM;
1719 }
1720
1721 public HFileContext getHFileContext() {
1722 return this.fileContext;
1723 }
1724
1725
1726
1727
1728
1729
1730 static String toStringHeader(ByteBuffer buf) throws IOException {
1731 int offset = buf.arrayOffset();
1732 byte[] b = buf.array();
1733 long magic = Bytes.toLong(b, offset);
1734 BlockType bt = BlockType.read(buf);
1735 offset += Bytes.SIZEOF_LONG;
1736 int compressedBlockSizeNoHeader = Bytes.toInt(b, offset);
1737 offset += Bytes.SIZEOF_INT;
1738 int uncompressedBlockSizeNoHeader = Bytes.toInt(b, offset);
1739 offset += Bytes.SIZEOF_INT;
1740 long prevBlockOffset = Bytes.toLong(b, offset);
1741 offset += Bytes.SIZEOF_LONG;
1742 byte cksumtype = b[offset];
1743 offset += Bytes.SIZEOF_BYTE;
1744 long bytesPerChecksum = Bytes.toInt(b, offset);
1745 offset += Bytes.SIZEOF_INT;
1746 long onDiskDataSizeWithHeader = Bytes.toInt(b, offset);
1747 offset += Bytes.SIZEOF_INT;
1748 return " Header dump: magic: " + magic +
1749 " blockType " + bt +
1750 " compressedBlockSizeNoHeader " +
1751 compressedBlockSizeNoHeader +
1752 " uncompressedBlockSizeNoHeader " +
1753 uncompressedBlockSizeNoHeader +
1754 " prevBlockOffset " + prevBlockOffset +
1755 " checksumType " + ChecksumType.codeToType(cksumtype) +
1756 " bytesPerChecksum " + bytesPerChecksum +
1757 " onDiskDataSizeWithHeader " + onDiskDataSizeWithHeader;
1758 }
1759 }
1760