1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.io.hfile;
19
20 import java.io.ByteArrayInputStream;
21 import java.io.ByteArrayOutputStream;
22 import java.io.DataInputStream;
23 import java.io.DataOutput;
24 import java.io.DataOutputStream;
25 import java.io.IOException;
26 import java.io.InputStream;
27 import java.nio.ByteBuffer;
28 import java.util.concurrent.locks.Lock;
29 import java.util.concurrent.locks.ReentrantLock;
30
31 import org.apache.hadoop.hbase.classification.InterfaceAudience;
32 import org.apache.hadoop.fs.FSDataInputStream;
33 import org.apache.hadoop.fs.FSDataOutputStream;
34 import org.apache.hadoop.fs.Path;
35 import org.apache.hadoop.hbase.HConstants;
36 import org.apache.hadoop.hbase.fs.HFileSystem;
37 import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
38 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
39 import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
40 import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;
41 import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
42 import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
43 import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
44 import org.apache.hadoop.hbase.util.Bytes;
45 import org.apache.hadoop.hbase.util.ChecksumType;
46 import org.apache.hadoop.hbase.util.ClassSize;
47 import org.apache.hadoop.hbase.util.CompoundBloomFilter;
48 import org.apache.hadoop.io.IOUtils;
49
50 import com.google.common.base.Preconditions;
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84 @InterfaceAudience.Private
85 public class HFileBlock implements Cacheable {
86
87
88
89
90
91
92 static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3;
93
94 public static final boolean FILL_HEADER = true;
95 public static final boolean DONT_FILL_HEADER = false;
96
97
98
99
100
101 public static final int ENCODED_HEADER_SIZE = HConstants.HFILEBLOCK_HEADER_SIZE
102 + DataBlockEncoding.ID_SIZE;
103
104 static final byte[] DUMMY_HEADER_NO_CHECKSUM =
105 new byte[HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM];
106
107 public static final int BYTE_BUFFER_HEAP_SIZE = (int) ClassSize.estimateBase(
108 ByteBuffer.wrap(new byte[0], 0, 0).getClass(), false);
109
110
111 public static final int EXTRA_SERIALIZATION_SPACE = Bytes.SIZEOF_BYTE + Bytes.SIZEOF_INT
112 + Bytes.SIZEOF_LONG;
113
114
115
116
117 static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT;
118
119 private static final CacheableDeserializer<Cacheable> blockDeserializer =
120 new CacheableDeserializer<Cacheable>() {
121 public HFileBlock deserialize(ByteBuffer buf, boolean reuse) throws IOException{
122 buf.limit(buf.limit() - HFileBlock.EXTRA_SERIALIZATION_SPACE).rewind();
123 ByteBuffer newByteBuffer;
124 if (reuse) {
125 newByteBuffer = buf.slice();
126 } else {
127 newByteBuffer = ByteBuffer.allocate(buf.limit());
128 newByteBuffer.put(buf);
129 }
130 buf.position(buf.limit());
131 buf.limit(buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE);
132 boolean usesChecksum = buf.get() == (byte)1;
133 HFileBlock ourBuffer = new HFileBlock(newByteBuffer, usesChecksum);
134 ourBuffer.offset = buf.getLong();
135 ourBuffer.nextBlockOnDiskSizeWithHeader = buf.getInt();
136 if (ourBuffer.hasNextBlockHeader()) {
137 ourBuffer.buf.limit(ourBuffer.buf.limit() - ourBuffer.headerSize());
138 }
139 return ourBuffer;
140 }
141
142 @Override
143 public int getDeserialiserIdentifier() {
144 return deserializerIdentifier;
145 }
146
147 @Override
148 public HFileBlock deserialize(ByteBuffer b) throws IOException {
149 return deserialize(b, false);
150 }
151 };
152 private static final int deserializerIdentifier;
153 static {
154 deserializerIdentifier = CacheableDeserializerIdManager
155 .registerDeserializer(blockDeserializer);
156 }
157
158
159 private BlockType blockType;
160
161
162 private int onDiskSizeWithoutHeader;
163
164
165 private final int uncompressedSizeWithoutHeader;
166
167
168 private final long prevBlockOffset;
169
170
171
172
173
174 private final int onDiskDataSizeWithHeader;
175
176
177 private ByteBuffer buf;
178
179
180 private HFileContext fileContext;
181
182
183
184
185
186 private long offset = -1;
187
188
189
190
191
192
193 private int nextBlockOnDiskSizeWithHeader = -1;
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211 HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,
212 long prevBlockOffset, ByteBuffer buf, boolean fillHeader, long offset,
213 int onDiskDataSizeWithHeader, HFileContext fileContext) {
214 this.blockType = blockType;
215 this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;
216 this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;
217 this.prevBlockOffset = prevBlockOffset;
218 this.buf = buf;
219 this.offset = offset;
220 this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;
221 this.fileContext = fileContext;
222 if (fillHeader)
223 overwriteHeader();
224 this.buf.rewind();
225 }
226
227
228
229
230 HFileBlock(HFileBlock that) {
231 this.blockType = that.blockType;
232 this.onDiskSizeWithoutHeader = that.onDiskSizeWithoutHeader;
233 this.uncompressedSizeWithoutHeader = that.uncompressedSizeWithoutHeader;
234 this.prevBlockOffset = that.prevBlockOffset;
235 this.buf = that.buf.duplicate();
236 this.offset = that.offset;
237 this.onDiskDataSizeWithHeader = that.onDiskDataSizeWithHeader;
238 this.fileContext = that.fileContext;
239 this.nextBlockOnDiskSizeWithHeader = that.nextBlockOnDiskSizeWithHeader;
240 }
241
242
243
244
245
246
247
248
249
250 HFileBlock(ByteBuffer b, boolean usesHBaseChecksum) throws IOException {
251 b.rewind();
252 blockType = BlockType.read(b);
253 onDiskSizeWithoutHeader = b.getInt();
254 uncompressedSizeWithoutHeader = b.getInt();
255 prevBlockOffset = b.getLong();
256 HFileContextBuilder contextBuilder = new HFileContextBuilder();
257 contextBuilder.withHBaseCheckSum(usesHBaseChecksum);
258 if (usesHBaseChecksum) {
259 contextBuilder.withChecksumType(ChecksumType.codeToType(b.get()));
260 contextBuilder.withBytesPerCheckSum(b.getInt());
261 this.onDiskDataSizeWithHeader = b.getInt();
262 } else {
263 contextBuilder.withChecksumType(ChecksumType.NULL);
264 contextBuilder.withBytesPerCheckSum(0);
265 this.onDiskDataSizeWithHeader = onDiskSizeWithoutHeader +
266 HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;
267 }
268 this.fileContext = contextBuilder.build();
269 buf = b;
270 buf.rewind();
271 }
272
273 public BlockType getBlockType() {
274 return blockType;
275 }
276
277
278 public short getDataBlockEncodingId() {
279 if (blockType != BlockType.ENCODED_DATA) {
280 throw new IllegalArgumentException("Querying encoder ID of a block " +
281 "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);
282 }
283 return buf.getShort(headerSize());
284 }
285
286
287
288
289 public int getOnDiskSizeWithHeader() {
290 return onDiskSizeWithoutHeader + headerSize();
291 }
292
293
294
295
296 public int getOnDiskSizeWithoutHeader() {
297 return onDiskSizeWithoutHeader;
298 }
299
300
301
302
303 public int getUncompressedSizeWithoutHeader() {
304 return uncompressedSizeWithoutHeader;
305 }
306
307
308
309
310
311 public long getPrevBlockOffset() {
312 return prevBlockOffset;
313 }
314
315
316
317
318
319 private void overwriteHeader() {
320 buf.rewind();
321 blockType.write(buf);
322 buf.putInt(onDiskSizeWithoutHeader);
323 buf.putInt(uncompressedSizeWithoutHeader);
324 buf.putLong(prevBlockOffset);
325 if (this.fileContext.isUseHBaseChecksum()) {
326 buf.put(fileContext.getChecksumType().getCode());
327 buf.putInt(fileContext.getBytesPerChecksum());
328 buf.putInt(onDiskDataSizeWithHeader);
329 }
330 }
331
332
333
334
335
336
337 public ByteBuffer getBufferWithoutHeader() {
338 return ByteBuffer.wrap(buf.array(), buf.arrayOffset() + headerSize(),
339 buf.limit() - headerSize() - totalChecksumBytes()).slice();
340 }
341
342
343
344
345
346
347
348
349
350
351 public ByteBuffer getBufferReadOnly() {
352 return ByteBuffer.wrap(buf.array(), buf.arrayOffset(),
353 buf.limit() - totalChecksumBytes()).slice();
354 }
355
356
357
358
359
360
361
362
363 public ByteBuffer getBufferReadOnlyWithHeader() {
364 return ByteBuffer.wrap(buf.array(), buf.arrayOffset(), buf.limit()).slice();
365 }
366
367
368
369
370
371
372
373 ByteBuffer getBufferWithHeader() {
374 ByteBuffer dupBuf = buf.duplicate();
375 dupBuf.rewind();
376 return dupBuf;
377 }
378
379 private void sanityCheckAssertion(long valueFromBuf, long valueFromField,
380 String fieldName) throws IOException {
381 if (valueFromBuf != valueFromField) {
382 throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf
383 + ") is different from that in the field (" + valueFromField + ")");
384 }
385 }
386
387 private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)
388 throws IOException {
389 if (valueFromBuf != valueFromField) {
390 throw new IOException("Block type stored in the buffer: " +
391 valueFromBuf + ", block type field: " + valueFromField);
392 }
393 }
394
395
396
397
398
399
400
401
402 void sanityCheck() throws IOException {
403 buf.rewind();
404
405 sanityCheckAssertion(BlockType.read(buf), blockType);
406
407 sanityCheckAssertion(buf.getInt(), onDiskSizeWithoutHeader,
408 "onDiskSizeWithoutHeader");
409
410 sanityCheckAssertion(buf.getInt(), uncompressedSizeWithoutHeader,
411 "uncompressedSizeWithoutHeader");
412
413 sanityCheckAssertion(buf.getLong(), prevBlockOffset, "prevBlocKOffset");
414 if (this.fileContext.isUseHBaseChecksum()) {
415 sanityCheckAssertion(buf.get(), this.fileContext.getChecksumType().getCode(), "checksumType");
416 sanityCheckAssertion(buf.getInt(), this.fileContext.getBytesPerChecksum(), "bytesPerChecksum");
417 sanityCheckAssertion(buf.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");
418 }
419
420 int cksumBytes = totalChecksumBytes();
421 int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;
422 if (buf.limit() != expectedBufLimit) {
423 throw new AssertionError("Expected buffer limit " + expectedBufLimit
424 + ", got " + buf.limit());
425 }
426
427
428
429 int hdrSize = headerSize();
430 if (buf.capacity() != expectedBufLimit &&
431 buf.capacity() != expectedBufLimit + hdrSize) {
432 throw new AssertionError("Invalid buffer capacity: " + buf.capacity() +
433 ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));
434 }
435 }
436
437 @Override
438 public String toString() {
439 StringBuilder sb = new StringBuilder()
440 .append("HFileBlock [")
441 .append(" fileOffset=").append(offset)
442 .append(" headerSize()=").append(headerSize())
443 .append(" blockType=").append(blockType)
444 .append(" onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)
445 .append(" uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)
446 .append(" prevBlockOffset=").append(prevBlockOffset)
447 .append(" isUseHBaseChecksum()=").append(fileContext.isUseHBaseChecksum());
448 if (fileContext.isUseHBaseChecksum()) {
449 sb.append(" checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))
450 .append(" bytesPerChecksum=").append(this.buf.getInt(24 + 1))
451 .append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);
452 } else {
453 sb.append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)
454 .append("(").append(onDiskSizeWithoutHeader)
455 .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");
456 }
457 sb.append(" getOnDiskSizeWithHeader()=").append(getOnDiskSizeWithHeader())
458 .append(" totalChecksumBytes()=").append(totalChecksumBytes())
459 .append(" isUnpacked()=").append(isUnpacked())
460 .append(" buf=[ ")
461 .append(buf)
462 .append(", array().length=").append(buf.array().length)
463 .append(", arrayOffset()=").append(buf.arrayOffset())
464 .append(" ]")
465 .append(" dataBeginsWith=")
466 .append(Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),
467 Math.min(32, buf.limit() - buf.arrayOffset() - headerSize())))
468 .append(" fileContext=").append(fileContext)
469 .append(" ]");
470 return sb.toString();
471 }
472
473
474
475
476 private void validateOnDiskSizeWithoutHeader(
477 int expectedOnDiskSizeWithoutHeader) throws IOException {
478 if (onDiskSizeWithoutHeader != expectedOnDiskSizeWithoutHeader) {
479 String blockInfoMsg =
480 "Block offset: " + offset + ", data starts with: "
481 + Bytes.toStringBinary(buf.array(), buf.arrayOffset(),
482 buf.arrayOffset() + Math.min(32, buf.limit()));
483 throw new IOException("On-disk size without header provided is "
484 + expectedOnDiskSizeWithoutHeader + ", but block "
485 + "header contains " + onDiskSizeWithoutHeader + ". " +
486 blockInfoMsg);
487 }
488 }
489
490
491
492
493
494 HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {
495 if (!fileContext.isCompressedOrEncrypted()) {
496
497
498
499 return this;
500 }
501
502 HFileBlock unpacked = new HFileBlock(this);
503 unpacked.allocateBuffer();
504
505 HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?
506 reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();
507 ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),
508 unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),
509 this.getBufferReadOnlyWithHeader().array(), this.headerSize());
510
511
512 if (unpacked.hasNextBlockHeader()) {
513 System.arraycopy(this.buf.array(), this.buf.arrayOffset() + this.onDiskDataSizeWithHeader,
514 unpacked.buf.array(), unpacked.buf.arrayOffset() + unpacked.headerSize() +
515 unpacked.uncompressedSizeWithoutHeader + unpacked.totalChecksumBytes(),
516 unpacked.headerSize());
517 }
518 return unpacked;
519 }
520
521
522
523
524 private boolean hasNextBlockHeader() {
525 return nextBlockOnDiskSizeWithHeader > 0;
526 }
527
528
529
530
531
532
533 private void allocateBuffer() {
534 int cksumBytes = totalChecksumBytes();
535 int headerSize = headerSize();
536 int capacityNeeded = headerSize + uncompressedSizeWithoutHeader +
537 cksumBytes + (hasNextBlockHeader() ? headerSize : 0);
538
539 ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);
540
541
542 System.arraycopy(buf.array(), buf.arrayOffset(), newBuf.array(),
543 newBuf.arrayOffset(), headerSize);
544
545 buf = newBuf;
546
547 buf.limit(headerSize + uncompressedSizeWithoutHeader + cksumBytes);
548 }
549
550
551
552
553
554 public boolean isUnpacked() {
555 final int cksumBytes = totalChecksumBytes();
556 final int headerSize = headerSize();
557 final int expectedCapacity = headerSize + uncompressedSizeWithoutHeader + cksumBytes;
558 final int bufCapacity = buf.capacity();
559 return bufCapacity == expectedCapacity || bufCapacity == expectedCapacity + headerSize;
560 }
561
562
563 public void assumeUncompressed() throws IOException {
564 if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader +
565 totalChecksumBytes()) {
566 throw new IOException("Using no compression but "
567 + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "
568 + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader
569 + ", numChecksumbytes=" + totalChecksumBytes());
570 }
571 }
572
573
574
575
576
577 public void expectType(BlockType expectedType) throws IOException {
578 if (blockType != expectedType) {
579 throw new IOException("Invalid block type: expected=" + expectedType
580 + ", actual=" + blockType);
581 }
582 }
583
584
585 public long getOffset() {
586 if (offset < 0) {
587 throw new IllegalStateException(
588 "HFile block offset not initialized properly");
589 }
590 return offset;
591 }
592
593
594
595
596 public DataInputStream getByteStream() {
597 return new DataInputStream(new ByteArrayInputStream(buf.array(),
598 buf.arrayOffset() + headerSize(), buf.limit() - headerSize()));
599 }
600
601 @Override
602 public long heapSize() {
603 long size = ClassSize.align(
604 ClassSize.OBJECT +
605
606 3 * ClassSize.REFERENCE +
607
608
609 4 * Bytes.SIZEOF_INT +
610
611 2 * Bytes.SIZEOF_LONG +
612
613 fileContext.heapSize()
614 );
615
616 if (buf != null) {
617
618 size += ClassSize.align(buf.capacity() + BYTE_BUFFER_HEAP_SIZE);
619 }
620
621 return ClassSize.align(size);
622 }
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639 public static boolean readWithExtra(InputStream in, byte buf[],
640 int bufOffset, int necessaryLen, int extraLen) throws IOException {
641 int bytesRemaining = necessaryLen + extraLen;
642 while (bytesRemaining > 0) {
643 int ret = in.read(buf, bufOffset, bytesRemaining);
644 if (ret == -1 && bytesRemaining <= extraLen) {
645
646 break;
647 }
648
649 if (ret < 0) {
650 throw new IOException("Premature EOF from inputStream (read "
651 + "returned " + ret + ", was trying to read " + necessaryLen
652 + " necessary bytes and " + extraLen + " extra bytes, "
653 + "successfully read "
654 + (necessaryLen + extraLen - bytesRemaining));
655 }
656 bufOffset += ret;
657 bytesRemaining -= ret;
658 }
659 return bytesRemaining <= 0;
660 }
661
662
663
664
665
666 public int getNextBlockOnDiskSizeWithHeader() {
667 return nextBlockOnDiskSizeWithHeader;
668 }
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683 public static class Writer {
684
685 private enum State {
686 INIT,
687 WRITING,
688 BLOCK_READY
689 };
690
691
692 private State state = State.INIT;
693
694
695 private final HFileDataBlockEncoder dataBlockEncoder;
696
697 private HFileBlockEncodingContext dataBlockEncodingCtx;
698
699
700 private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;
701
702
703
704
705
706
707
708 private ByteArrayOutputStream baosInMemory;
709
710
711
712
713
714
715 private BlockType blockType;
716
717
718
719
720
721 private DataOutputStream userDataStream;
722
723
724
725
726
727
728 private byte[] onDiskBytesWithHeader;
729
730
731
732
733
734
735
736 private byte[] onDiskChecksum;
737
738
739
740
741
742
743
744 private byte[] uncompressedBytesWithHeader;
745
746
747
748
749
750 private long startOffset;
751
752
753
754
755
756 private long[] prevOffsetByType;
757
758
759 private long prevOffset;
760
761 private HFileContext fileContext;
762
763
764
765
766 public Writer(HFileDataBlockEncoder dataBlockEncoder, HFileContext fileContext) {
767 this.dataBlockEncoder = dataBlockEncoder != null
768 ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE;
769 defaultBlockEncodingCtx = new HFileBlockDefaultEncodingContext(null,
770 HConstants.HFILEBLOCK_DUMMY_HEADER, fileContext);
771 dataBlockEncodingCtx = this.dataBlockEncoder
772 .newDataBlockEncodingContext(HConstants.HFILEBLOCK_DUMMY_HEADER, fileContext);
773
774 if (fileContext.getBytesPerChecksum() < HConstants.HFILEBLOCK_HEADER_SIZE) {
775 throw new RuntimeException("Unsupported value of bytesPerChecksum. " +
776 " Minimum is " + HConstants.HFILEBLOCK_HEADER_SIZE + " but the configured value is " +
777 fileContext.getBytesPerChecksum());
778 }
779
780 baosInMemory = new ByteArrayOutputStream();
781
782 prevOffsetByType = new long[BlockType.values().length];
783 for (int i = 0; i < prevOffsetByType.length; ++i)
784 prevOffsetByType[i] = -1;
785
786 this.fileContext = fileContext;
787 }
788
789
790
791
792
793
794
795 public DataOutputStream startWriting(BlockType newBlockType)
796 throws IOException {
797 if (state == State.BLOCK_READY && startOffset != -1) {
798
799
800 prevOffsetByType[blockType.getId()] = startOffset;
801 }
802
803 startOffset = -1;
804 blockType = newBlockType;
805
806 baosInMemory.reset();
807 baosInMemory.write(HConstants.HFILEBLOCK_DUMMY_HEADER);
808
809 state = State.WRITING;
810
811
812 userDataStream = new DataOutputStream(baosInMemory);
813 return userDataStream;
814 }
815
816
817
818
819
820
821
822
823 DataOutputStream getUserDataStream() {
824 expectState(State.WRITING);
825 return userDataStream;
826 }
827
828
829
830
831
832 private void ensureBlockReady() throws IOException {
833 Preconditions.checkState(state != State.INIT,
834 "Unexpected state: " + state);
835
836 if (state == State.BLOCK_READY)
837 return;
838
839
840 finishBlock();
841 }
842
843
844
845
846
847
848
849 private void finishBlock() throws IOException {
850 userDataStream.flush();
851
852 uncompressedBytesWithHeader = baosInMemory.toByteArray();
853 prevOffset = prevOffsetByType[blockType.getId()];
854
855
856
857
858 state = State.BLOCK_READY;
859 if (blockType == BlockType.DATA) {
860 encodeDataBlockForDisk();
861 } else {
862 defaultBlockEncodingCtx.compressAfterEncodingWithBlockType(
863 uncompressedBytesWithHeader, blockType);
864 onDiskBytesWithHeader =
865 defaultBlockEncodingCtx.getOnDiskBytesWithHeader();
866 }
867
868 int numBytes = (int) ChecksumUtil.numBytes(
869 onDiskBytesWithHeader.length,
870 fileContext.getBytesPerChecksum());
871
872
873 putHeader(onDiskBytesWithHeader, 0,
874 onDiskBytesWithHeader.length + numBytes,
875 uncompressedBytesWithHeader.length, onDiskBytesWithHeader.length);
876
877 putHeader(uncompressedBytesWithHeader, 0,
878 onDiskBytesWithHeader.length + numBytes,
879 uncompressedBytesWithHeader.length, onDiskBytesWithHeader.length);
880
881 onDiskChecksum = new byte[numBytes];
882 ChecksumUtil.generateChecksums(
883 onDiskBytesWithHeader, 0, onDiskBytesWithHeader.length,
884 onDiskChecksum, 0, fileContext.getChecksumType(), fileContext.getBytesPerChecksum());
885 }
886
887
888
889
890
891 private void encodeDataBlockForDisk() throws IOException {
892
893 ByteBuffer rawKeyValues =
894 ByteBuffer.wrap(uncompressedBytesWithHeader, HConstants.HFILEBLOCK_HEADER_SIZE,
895 uncompressedBytesWithHeader.length - HConstants.HFILEBLOCK_HEADER_SIZE).slice();
896
897
898 dataBlockEncoder.beforeWriteToDisk(rawKeyValues, dataBlockEncodingCtx, blockType);
899
900 uncompressedBytesWithHeader =
901 dataBlockEncodingCtx.getUncompressedBytesWithHeader();
902 onDiskBytesWithHeader =
903 dataBlockEncodingCtx.getOnDiskBytesWithHeader();
904 blockType = dataBlockEncodingCtx.getBlockType();
905 }
906
907
908
909
910
911
912
913
914
915 private void putHeader(byte[] dest, int offset, int onDiskSize,
916 int uncompressedSize, int onDiskDataSize) {
917 offset = blockType.put(dest, offset);
918 offset = Bytes.putInt(dest, offset, onDiskSize - HConstants.HFILEBLOCK_HEADER_SIZE);
919 offset = Bytes.putInt(dest, offset, uncompressedSize - HConstants.HFILEBLOCK_HEADER_SIZE);
920 offset = Bytes.putLong(dest, offset, prevOffset);
921 offset = Bytes.putByte(dest, offset, fileContext.getChecksumType().getCode());
922 offset = Bytes.putInt(dest, offset, fileContext.getBytesPerChecksum());
923 Bytes.putInt(dest, offset, onDiskDataSize);
924 }
925
926
927
928
929
930
931
932
933
934 public void writeHeaderAndData(FSDataOutputStream out) throws IOException {
935 long offset = out.getPos();
936 if (startOffset != -1 && offset != startOffset) {
937 throw new IOException("A " + blockType + " block written to a "
938 + "stream twice, first at offset " + startOffset + ", then at "
939 + offset);
940 }
941 startOffset = offset;
942
943 finishBlockAndWriteHeaderAndData((DataOutputStream) out);
944 }
945
946
947
948
949
950
951
952
953
954
955 private void finishBlockAndWriteHeaderAndData(DataOutputStream out)
956 throws IOException {
957 ensureBlockReady();
958 out.write(onDiskBytesWithHeader);
959 out.write(onDiskChecksum);
960 }
961
962
963
964
965
966
967
968
969
970
971
972 byte[] getHeaderAndDataForTest() throws IOException {
973 ensureBlockReady();
974
975
976 byte[] output =
977 new byte[onDiskBytesWithHeader.length
978 + onDiskChecksum.length];
979 System.arraycopy(onDiskBytesWithHeader, 0, output, 0,
980 onDiskBytesWithHeader.length);
981 System.arraycopy(onDiskChecksum, 0, output,
982 onDiskBytesWithHeader.length, onDiskChecksum.length);
983 return output;
984 }
985
986
987
988
989 public void release() {
990 if (dataBlockEncodingCtx != null) {
991 dataBlockEncodingCtx.close();
992 dataBlockEncodingCtx = null;
993 }
994 if (defaultBlockEncodingCtx != null) {
995 defaultBlockEncodingCtx.close();
996 defaultBlockEncodingCtx = null;
997 }
998 }
999
1000
1001
1002
1003
1004
1005
1006
1007
1008 int getOnDiskSizeWithoutHeader() {
1009 expectState(State.BLOCK_READY);
1010 return onDiskBytesWithHeader.length + onDiskChecksum.length - HConstants.HFILEBLOCK_HEADER_SIZE;
1011 }
1012
1013
1014
1015
1016
1017
1018
1019
1020 int getOnDiskSizeWithHeader() {
1021 expectState(State.BLOCK_READY);
1022 return onDiskBytesWithHeader.length + onDiskChecksum.length;
1023 }
1024
1025
1026
1027
1028 int getUncompressedSizeWithoutHeader() {
1029 expectState(State.BLOCK_READY);
1030 return uncompressedBytesWithHeader.length - HConstants.HFILEBLOCK_HEADER_SIZE;
1031 }
1032
1033
1034
1035
1036 int getUncompressedSizeWithHeader() {
1037 expectState(State.BLOCK_READY);
1038 return uncompressedBytesWithHeader.length;
1039 }
1040
1041
1042 public boolean isWriting() {
1043 return state == State.WRITING;
1044 }
1045
1046
1047
1048
1049
1050
1051
1052
1053 public int blockSizeWritten() {
1054 if (state != State.WRITING)
1055 return 0;
1056 return userDataStream.size();
1057 }
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067 ByteBuffer getUncompressedBufferWithHeader() {
1068 expectState(State.BLOCK_READY);
1069 return ByteBuffer.wrap(uncompressedBytesWithHeader);
1070 }
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080 ByteBuffer getOnDiskBufferWithHeader() {
1081 expectState(State.BLOCK_READY);
1082 return ByteBuffer.wrap(onDiskBytesWithHeader);
1083 }
1084
1085 private void expectState(State expectedState) {
1086 if (state != expectedState) {
1087 throw new IllegalStateException("Expected state: " + expectedState +
1088 ", actual state: " + state);
1089 }
1090 }
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102 public void writeBlock(BlockWritable bw, FSDataOutputStream out)
1103 throws IOException {
1104 bw.writeToBlock(startWriting(bw.getBlockType()));
1105 writeHeaderAndData(out);
1106 }
1107
1108
1109
1110
1111
1112
1113
1114
1115 public HFileBlock getBlockForCaching(CacheConfig cacheConf) {
1116 HFileContext newContext = new HFileContextBuilder()
1117 .withBlockSize(fileContext.getBlocksize())
1118 .withBytesPerCheckSum(0)
1119 .withChecksumType(ChecksumType.NULL)
1120 .withCompression(fileContext.getCompression())
1121 .withDataBlockEncoding(fileContext.getDataBlockEncoding())
1122 .withHBaseCheckSum(fileContext.isUseHBaseChecksum())
1123 .withCompressTags(fileContext.isCompressTags())
1124 .withIncludesMvcc(fileContext.isIncludesMvcc())
1125 .withIncludesTags(fileContext.isIncludesTags())
1126 .build();
1127 return new HFileBlock(blockType, getOnDiskSizeWithoutHeader(),
1128 getUncompressedSizeWithoutHeader(), prevOffset,
1129 cacheConf.shouldCacheCompressed(blockType.getCategory()) ?
1130 getOnDiskBufferWithHeader() :
1131 getUncompressedBufferWithHeader(),
1132 FILL_HEADER, startOffset,
1133 onDiskBytesWithHeader.length + onDiskChecksum.length, newContext);
1134 }
1135 }
1136
1137
1138 public interface BlockWritable {
1139
1140
1141 BlockType getBlockType();
1142
1143
1144
1145
1146
1147
1148
1149 void writeToBlock(DataOutput out) throws IOException;
1150 }
1151
1152
1153
1154
1155 public interface BlockIterator {
1156
1157
1158
1159
1160 HFileBlock nextBlock() throws IOException;
1161
1162
1163
1164
1165
1166 HFileBlock nextBlockWithBlockType(BlockType blockType) throws IOException;
1167 }
1168
1169
1170 public interface FSReader {
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183 HFileBlock readBlockData(long offset, long onDiskSize,
1184 int uncompressedSize, boolean pread) throws IOException;
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195 BlockIterator blockRange(long startOffset, long endOffset);
1196
1197
1198 void closeStreams() throws IOException;
1199
1200
1201 HFileBlockDecodingContext getBlockDecodingContext();
1202
1203
1204 HFileBlockDecodingContext getDefaultBlockDecodingContext();
1205 }
1206
1207
1208
1209
1210
1211 private abstract static class AbstractFSReader implements FSReader {
1212
1213
1214
1215 protected long fileSize;
1216
1217
1218 protected final int hdrSize;
1219
1220
1221 protected HFileSystem hfs;
1222
1223
1224 protected Path path;
1225
1226 private final Lock streamLock = new ReentrantLock();
1227
1228
1229 public static final int DEFAULT_BUFFER_SIZE = 1 << 20;
1230
1231 protected HFileContext fileContext;
1232
1233 public AbstractFSReader(long fileSize, HFileSystem hfs, Path path, HFileContext fileContext)
1234 throws IOException {
1235 this.fileSize = fileSize;
1236 this.hfs = hfs;
1237 this.path = path;
1238 this.fileContext = fileContext;
1239 this.hdrSize = headerSize(fileContext.isUseHBaseChecksum());
1240 }
1241
1242 @Override
1243 public BlockIterator blockRange(final long startOffset,
1244 final long endOffset) {
1245 final FSReader owner = this;
1246 return new BlockIterator() {
1247 private long offset = startOffset;
1248
1249 @Override
1250 public HFileBlock nextBlock() throws IOException {
1251 if (offset >= endOffset)
1252 return null;
1253 HFileBlock b = readBlockData(offset, -1, -1, false);
1254 offset += b.getOnDiskSizeWithHeader();
1255 return b.unpack(fileContext, owner);
1256 }
1257
1258 @Override
1259 public HFileBlock nextBlockWithBlockType(BlockType blockType)
1260 throws IOException {
1261 HFileBlock blk = nextBlock();
1262 if (blk.getBlockType() != blockType) {
1263 throw new IOException("Expected block of type " + blockType
1264 + " but found " + blk.getBlockType());
1265 }
1266 return blk;
1267 }
1268 };
1269 }
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286 protected int readAtOffset(FSDataInputStream istream,
1287 byte[] dest, int destOffset, int size,
1288 boolean peekIntoNextBlock, long fileOffset, boolean pread)
1289 throws IOException {
1290 if (peekIntoNextBlock &&
1291 destOffset + size + hdrSize > dest.length) {
1292
1293
1294 throw new IOException("Attempted to read " + size + " bytes and " +
1295 hdrSize + " bytes of next header into a " + dest.length +
1296 "-byte array at offset " + destOffset);
1297 }
1298
1299 if (!pread && streamLock.tryLock()) {
1300
1301 try {
1302 istream.seek(fileOffset);
1303
1304 long realOffset = istream.getPos();
1305 if (realOffset != fileOffset) {
1306 throw new IOException("Tried to seek to " + fileOffset + " to "
1307 + "read " + size + " bytes, but pos=" + realOffset
1308 + " after seek");
1309 }
1310
1311 if (!peekIntoNextBlock) {
1312 IOUtils.readFully(istream, dest, destOffset, size);
1313 return -1;
1314 }
1315
1316
1317 if (!readWithExtra(istream, dest, destOffset, size, hdrSize))
1318 return -1;
1319 } finally {
1320 streamLock.unlock();
1321 }
1322 } else {
1323
1324 int extraSize = peekIntoNextBlock ? hdrSize : 0;
1325 int ret = istream.read(fileOffset, dest, destOffset, size + extraSize);
1326 if (ret < size) {
1327 throw new IOException("Positional read of " + size + " bytes " +
1328 "failed at offset " + fileOffset + " (returned " + ret + ")");
1329 }
1330
1331 if (ret == size || ret < size + extraSize) {
1332
1333 return -1;
1334 }
1335 }
1336
1337 assert peekIntoNextBlock;
1338 return Bytes.toInt(dest, destOffset + size + BlockType.MAGIC_LENGTH) + hdrSize;
1339 }
1340
1341 }
1342
1343
1344
1345
1346
1347 private static class PrefetchedHeader {
1348 long offset = -1;
1349 byte[] header = new byte[HConstants.HFILEBLOCK_HEADER_SIZE];
1350 ByteBuffer buf = ByteBuffer.wrap(header, 0, HConstants.HFILEBLOCK_HEADER_SIZE);
1351 }
1352
1353
1354 static class FSReaderV2 extends AbstractFSReader {
1355
1356
1357 protected FSDataInputStreamWrapper streamWrapper;
1358
1359 private HFileBlockDecodingContext encodedBlockDecodingCtx;
1360
1361
1362 private final HFileBlockDefaultDecodingContext defaultDecodingCtx;
1363
1364 private ThreadLocal<PrefetchedHeader> prefetchedHeaderForThread =
1365 new ThreadLocal<PrefetchedHeader>() {
1366 @Override
1367 public PrefetchedHeader initialValue() {
1368 return new PrefetchedHeader();
1369 }
1370 };
1371
1372 public FSReaderV2(FSDataInputStreamWrapper stream, long fileSize, HFileSystem hfs, Path path,
1373 HFileContext fileContext) throws IOException {
1374 super(fileSize, hfs, path, fileContext);
1375 this.streamWrapper = stream;
1376
1377 this.streamWrapper.prepareForBlockReader(!fileContext.isUseHBaseChecksum());
1378 defaultDecodingCtx = new HFileBlockDefaultDecodingContext(fileContext);
1379 encodedBlockDecodingCtx = defaultDecodingCtx;
1380 }
1381
1382
1383
1384
1385
1386 FSReaderV2(FSDataInputStream istream, long fileSize, HFileContext fileContext) throws IOException {
1387 this(new FSDataInputStreamWrapper(istream), fileSize, null, null, fileContext);
1388 }
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401 @Override
1402 public HFileBlock readBlockData(long offset, long onDiskSizeWithHeaderL,
1403 int uncompressedSize, boolean pread) throws IOException {
1404
1405
1406
1407
1408
1409
1410 boolean doVerificationThruHBaseChecksum = streamWrapper.shouldUseHBaseChecksum();
1411 FSDataInputStream is = streamWrapper.getStream(doVerificationThruHBaseChecksum);
1412
1413 HFileBlock blk = readBlockDataInternal(is, offset,
1414 onDiskSizeWithHeaderL,
1415 uncompressedSize, pread,
1416 doVerificationThruHBaseChecksum);
1417 if (blk == null) {
1418 HFile.LOG.warn("HBase checksum verification failed for file " +
1419 path + " at offset " +
1420 offset + " filesize " + fileSize +
1421 ". Retrying read with HDFS checksums turned on...");
1422
1423 if (!doVerificationThruHBaseChecksum) {
1424 String msg = "HBase checksum verification failed for file " +
1425 path + " at offset " +
1426 offset + " filesize " + fileSize +
1427 " but this cannot happen because doVerify is " +
1428 doVerificationThruHBaseChecksum;
1429 HFile.LOG.warn(msg);
1430 throw new IOException(msg);
1431 }
1432 HFile.checksumFailures.incrementAndGet();
1433
1434
1435
1436
1437
1438
1439
1440 is = this.streamWrapper.fallbackToFsChecksum(CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD);
1441 doVerificationThruHBaseChecksum = false;
1442 blk = readBlockDataInternal(is, offset, onDiskSizeWithHeaderL,
1443 uncompressedSize, pread,
1444 doVerificationThruHBaseChecksum);
1445 if (blk != null) {
1446 HFile.LOG.warn("HDFS checksum verification suceeded for file " +
1447 path + " at offset " +
1448 offset + " filesize " + fileSize);
1449 }
1450 }
1451 if (blk == null && !doVerificationThruHBaseChecksum) {
1452 String msg = "readBlockData failed, possibly due to " +
1453 "checksum verification failed for file " + path +
1454 " at offset " + offset + " filesize " + fileSize;
1455 HFile.LOG.warn(msg);
1456 throw new IOException(msg);
1457 }
1458
1459
1460
1461
1462
1463
1464
1465
1466 streamWrapper.checksumOk();
1467 return blk;
1468 }
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483 private HFileBlock readBlockDataInternal(FSDataInputStream is, long offset,
1484 long onDiskSizeWithHeaderL, int uncompressedSize, boolean pread,
1485 boolean verifyChecksum) throws IOException {
1486 if (offset < 0) {
1487 throw new IOException("Invalid offset=" + offset + " trying to read "
1488 + "block (onDiskSize=" + onDiskSizeWithHeaderL
1489 + ", uncompressedSize=" + uncompressedSize + ")");
1490 }
1491 if (uncompressedSize != -1) {
1492 throw new IOException("Version 2 block reader API does not need " +
1493 "the uncompressed size parameter");
1494 }
1495
1496 if ((onDiskSizeWithHeaderL < hdrSize && onDiskSizeWithHeaderL != -1)
1497 || onDiskSizeWithHeaderL >= Integer.MAX_VALUE) {
1498 throw new IOException("Invalid onDisksize=" + onDiskSizeWithHeaderL
1499 + ": expected to be at least " + hdrSize
1500 + " and at most " + Integer.MAX_VALUE + ", or -1 (offset="
1501 + offset + ", uncompressedSize=" + uncompressedSize + ")");
1502 }
1503
1504 int onDiskSizeWithHeader = (int) onDiskSizeWithHeaderL;
1505
1506
1507
1508
1509
1510 PrefetchedHeader prefetchedHeader = prefetchedHeaderForThread.get();
1511 ByteBuffer headerBuf = prefetchedHeader.offset == offset ?
1512 prefetchedHeader.buf : null;
1513
1514 int nextBlockOnDiskSize = 0;
1515
1516 byte[] onDiskBlock = null;
1517
1518 HFileBlock b = null;
1519 if (onDiskSizeWithHeader > 0) {
1520
1521
1522
1523
1524
1525
1526
1527
1528 int preReadHeaderSize = headerBuf == null ? 0 : hdrSize;
1529 onDiskBlock = new byte[onDiskSizeWithHeader + hdrSize];
1530
1531 nextBlockOnDiskSize = readAtOffset(is, onDiskBlock,
1532 preReadHeaderSize, onDiskSizeWithHeader - preReadHeaderSize,
1533 true, offset + preReadHeaderSize, pread);
1534 if (headerBuf != null) {
1535
1536
1537 System.arraycopy(headerBuf.array(),
1538 headerBuf.arrayOffset(), onDiskBlock, 0, hdrSize);
1539 } else {
1540 headerBuf = ByteBuffer.wrap(onDiskBlock, 0, hdrSize);
1541 }
1542
1543
1544
1545 try {
1546 b = new HFileBlock(headerBuf, fileContext.isUseHBaseChecksum());
1547 } catch (IOException ex) {
1548
1549 throw new IOException("Failed to read compressed block at "
1550 + offset
1551 + ", onDiskSizeWithoutHeader="
1552 + onDiskSizeWithHeader
1553 + ", preReadHeaderSize="
1554 + hdrSize
1555 + ", header.length="
1556 + prefetchedHeader.header.length
1557 + ", header bytes: "
1558 + Bytes.toStringBinary(prefetchedHeader.header, 0,
1559 hdrSize), ex);
1560 }
1561
1562 int onDiskSizeWithoutHeader = onDiskSizeWithHeader - hdrSize;
1563 assert onDiskSizeWithoutHeader >= 0;
1564 b.validateOnDiskSizeWithoutHeader(onDiskSizeWithoutHeader);
1565 } else {
1566
1567
1568
1569
1570
1571
1572
1573
1574 if (headerBuf == null) {
1575
1576
1577
1578
1579
1580 headerBuf = ByteBuffer.allocate(hdrSize);
1581 readAtOffset(is, headerBuf.array(), headerBuf.arrayOffset(),
1582 hdrSize, false, offset, pread);
1583 }
1584 b = new HFileBlock(headerBuf, fileContext.isUseHBaseChecksum());
1585 onDiskBlock = new byte[b.getOnDiskSizeWithHeader() + hdrSize];
1586 System.arraycopy(headerBuf.array(), headerBuf.arrayOffset(), onDiskBlock, 0, hdrSize);
1587 nextBlockOnDiskSize =
1588 readAtOffset(is, onDiskBlock, hdrSize, b.getOnDiskSizeWithHeader()
1589 - hdrSize, true, offset + hdrSize, pread);
1590 onDiskSizeWithHeader = b.onDiskSizeWithoutHeader + hdrSize;
1591 }
1592
1593 if (!fileContext.isCompressedOrEncrypted()) {
1594 b.assumeUncompressed();
1595 }
1596
1597 if (verifyChecksum && !validateBlockChecksum(b, onDiskBlock, hdrSize)) {
1598 return null;
1599 }
1600
1601
1602
1603
1604
1605 b = new HFileBlock(ByteBuffer.wrap(onDiskBlock, 0, onDiskSizeWithHeader),
1606 this.fileContext.isUseHBaseChecksum());
1607
1608 b.nextBlockOnDiskSizeWithHeader = nextBlockOnDiskSize;
1609
1610
1611 if (b.hasNextBlockHeader()) {
1612 prefetchedHeader.offset = offset + b.getOnDiskSizeWithHeader();
1613 System.arraycopy(onDiskBlock, onDiskSizeWithHeader,
1614 prefetchedHeader.header, 0, hdrSize);
1615 }
1616
1617 b.offset = offset;
1618 b.fileContext.setIncludesTags(this.fileContext.isIncludesTags());
1619 b.fileContext.setIncludesMvcc(this.fileContext.isIncludesMvcc());
1620 return b;
1621 }
1622
1623 void setIncludesMemstoreTS(boolean includesMemstoreTS) {
1624 this.fileContext.setIncludesMvcc(includesMemstoreTS);
1625 }
1626
1627 void setDataBlockEncoder(HFileDataBlockEncoder encoder) {
1628 encodedBlockDecodingCtx = encoder.newDataBlockDecodingContext(this.fileContext);
1629 }
1630
1631 @Override
1632 public HFileBlockDecodingContext getBlockDecodingContext() {
1633 return this.encodedBlockDecodingCtx;
1634 }
1635
1636 @Override
1637 public HFileBlockDecodingContext getDefaultBlockDecodingContext() {
1638 return this.defaultDecodingCtx;
1639 }
1640
1641
1642
1643
1644
1645
1646
1647 protected boolean validateBlockChecksum(HFileBlock block, byte[] data, int hdrSize)
1648 throws IOException {
1649 return ChecksumUtil.validateBlockChecksum(path, block, data, hdrSize);
1650 }
1651
1652 @Override
1653 public void closeStreams() throws IOException {
1654 streamWrapper.close();
1655 }
1656
1657 @Override
1658 public String toString() {
1659 return "FSReaderV2 [ hfs=" + hfs + " path=" + path + " fileContext=" + fileContext + " ]";
1660 }
1661 }
1662
1663 @Override
1664 public int getSerializedLength() {
1665 if (buf != null) {
1666
1667 int extraSpace = hasNextBlockHeader() ? headerSize() : 0;
1668 return this.buf.limit() + extraSpace + HFileBlock.EXTRA_SERIALIZATION_SPACE;
1669 }
1670 return 0;
1671 }
1672
1673 @Override
1674 public void serialize(ByteBuffer destination) {
1675
1676 destination.put(this.buf.array(), this.buf.arrayOffset(),
1677 getSerializedLength() - EXTRA_SERIALIZATION_SPACE);
1678 serializeExtraInfo(destination);
1679 }
1680
1681 public void serializeExtraInfo(ByteBuffer destination) {
1682 destination.put(this.fileContext.isUseHBaseChecksum() ? (byte) 1 : (byte) 0);
1683 destination.putLong(this.offset);
1684 destination.putInt(this.nextBlockOnDiskSizeWithHeader);
1685 destination.rewind();
1686 }
1687
1688 @Override
1689 public CacheableDeserializer<Cacheable> getDeserializer() {
1690 return HFileBlock.blockDeserializer;
1691 }
1692
1693 @Override
1694 public boolean equals(Object comparison) {
1695 if (this == comparison) {
1696 return true;
1697 }
1698 if (comparison == null) {
1699 return false;
1700 }
1701 if (comparison.getClass() != this.getClass()) {
1702 return false;
1703 }
1704
1705 HFileBlock castedComparison = (HFileBlock) comparison;
1706
1707 if (castedComparison.blockType != this.blockType) {
1708 return false;
1709 }
1710 if (castedComparison.nextBlockOnDiskSizeWithHeader != this.nextBlockOnDiskSizeWithHeader) {
1711 return false;
1712 }
1713 if (castedComparison.offset != this.offset) {
1714 return false;
1715 }
1716 if (castedComparison.onDiskSizeWithoutHeader != this.onDiskSizeWithoutHeader) {
1717 return false;
1718 }
1719 if (castedComparison.prevBlockOffset != this.prevBlockOffset) {
1720 return false;
1721 }
1722 if (castedComparison.uncompressedSizeWithoutHeader != this.uncompressedSizeWithoutHeader) {
1723 return false;
1724 }
1725 if (Bytes.compareTo(this.buf.array(), this.buf.arrayOffset(), this.buf.limit(),
1726 castedComparison.buf.array(), castedComparison.buf.arrayOffset(),
1727 castedComparison.buf.limit()) != 0) {
1728 return false;
1729 }
1730 return true;
1731 }
1732
1733 public DataBlockEncoding getDataBlockEncoding() {
1734 if (blockType == BlockType.ENCODED_DATA) {
1735 return DataBlockEncoding.getEncodingById(getDataBlockEncodingId());
1736 }
1737 return DataBlockEncoding.NONE;
1738 }
1739
1740 byte getChecksumType() {
1741 return this.fileContext.getChecksumType().getCode();
1742 }
1743
1744 int getBytesPerChecksum() {
1745 return this.fileContext.getBytesPerChecksum();
1746 }
1747
1748
1749 int getOnDiskDataSizeWithHeader() {
1750 return this.onDiskDataSizeWithHeader;
1751 }
1752
1753
1754
1755
1756
1757 int totalChecksumBytes() {
1758
1759
1760
1761
1762 if (!fileContext.isUseHBaseChecksum() || this.fileContext.getBytesPerChecksum() == 0) {
1763 return 0;
1764 }
1765 return (int)ChecksumUtil.numBytes(onDiskDataSizeWithHeader, this.fileContext.getBytesPerChecksum());
1766 }
1767
1768
1769
1770
1771 public int headerSize() {
1772 return headerSize(this.fileContext.isUseHBaseChecksum());
1773 }
1774
1775
1776
1777
1778 public static int headerSize(boolean usesHBaseChecksum) {
1779 if (usesHBaseChecksum) {
1780 return HConstants.HFILEBLOCK_HEADER_SIZE;
1781 }
1782 return HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;
1783 }
1784
1785
1786
1787
1788 public byte[] getDummyHeaderForVersion() {
1789 return getDummyHeaderForVersion(this.fileContext.isUseHBaseChecksum());
1790 }
1791
1792
1793
1794
1795 static private byte[] getDummyHeaderForVersion(boolean usesHBaseChecksum) {
1796 if (usesHBaseChecksum) {
1797 return HConstants.HFILEBLOCK_DUMMY_HEADER;
1798 }
1799 return DUMMY_HEADER_NO_CHECKSUM;
1800 }
1801
1802
1803
1804
1805
1806 public HFileContext getHFileContext() {
1807 return this.fileContext;
1808 }
1809
1810
1811
1812
1813
1814
1815 static String toStringHeader(ByteBuffer buf) throws IOException {
1816 int offset = buf.arrayOffset();
1817 byte[] b = buf.array();
1818 long magic = Bytes.toLong(b, offset);
1819 BlockType bt = BlockType.read(buf);
1820 offset += Bytes.SIZEOF_LONG;
1821 int compressedBlockSizeNoHeader = Bytes.toInt(b, offset);
1822 offset += Bytes.SIZEOF_INT;
1823 int uncompressedBlockSizeNoHeader = Bytes.toInt(b, offset);
1824 offset += Bytes.SIZEOF_INT;
1825 long prevBlockOffset = Bytes.toLong(b, offset);
1826 offset += Bytes.SIZEOF_LONG;
1827 byte cksumtype = b[offset];
1828 offset += Bytes.SIZEOF_BYTE;
1829 long bytesPerChecksum = Bytes.toInt(b, offset);
1830 offset += Bytes.SIZEOF_INT;
1831 long onDiskDataSizeWithHeader = Bytes.toInt(b, offset);
1832 offset += Bytes.SIZEOF_INT;
1833 return " Header dump: magic: " + magic +
1834 " blockType " + bt +
1835 " compressedBlockSizeNoHeader " +
1836 compressedBlockSizeNoHeader +
1837 " uncompressedBlockSizeNoHeader " +
1838 uncompressedBlockSizeNoHeader +
1839 " prevBlockOffset " + prevBlockOffset +
1840 " checksumType " + ChecksumType.codeToType(cksumtype) +
1841 " bytesPerChecksum " + bytesPerChecksum +
1842 " onDiskDataSizeWithHeader " + onDiskDataSizeWithHeader;
1843 }
1844 }