1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.io.hfile;
19
20 import java.io.DataOutputStream;
21 import java.io.IOException;
22
23 import org.apache.commons.logging.Log;
24 import org.apache.commons.logging.LogFactory;
25 import org.apache.hadoop.classification.InterfaceAudience;
26 import org.apache.hadoop.conf.Configuration;
27 import org.apache.hadoop.fs.FSDataOutputStream;
28 import org.apache.hadoop.fs.FileSystem;
29 import org.apache.hadoop.fs.Path;
30 import org.apache.hadoop.hbase.HConstants;
31 import org.apache.hadoop.hbase.KeyValue;
32 import org.apache.hadoop.hbase.KeyValue.KVComparator;
33 import org.apache.hadoop.hbase.io.crypto.Encryption;
34 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
35 import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
36 import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
37 import org.apache.hadoop.hbase.security.EncryptionUtil;
38 import org.apache.hadoop.hbase.security.User;
39 import org.apache.hadoop.hbase.util.Bytes;
40 import org.apache.hadoop.io.WritableUtils;
41
42
43
44
45 @InterfaceAudience.Private
46 public class HFileWriterV3 extends HFileWriterV2 {
47
48 private static final Log LOG = LogFactory.getLog(HFileWriterV3.class);
49
50 private int maxTagsLength = 0;
51
52 static class WriterFactoryV3 extends HFile.WriterFactory {
53 WriterFactoryV3(Configuration conf, CacheConfig cacheConf) {
54 super(conf, cacheConf);
55 }
56
57 @Override
58 public Writer createWriter(FileSystem fs, Path path, FSDataOutputStream ostream,
59 final KVComparator comparator, HFileContext fileContext)
60 throws IOException {
61 return new HFileWriterV3(conf, cacheConf, fs, path, ostream, comparator, fileContext);
62 }
63 }
64
65
66 public HFileWriterV3(Configuration conf, CacheConfig cacheConf, FileSystem fs, Path path,
67 FSDataOutputStream ostream, final KVComparator comparator,
68 final HFileContext fileContext) throws IOException {
69 super(conf, cacheConf, fs, path, ostream, comparator, fileContext);
70 if (LOG.isTraceEnabled()) {
71 LOG.trace("Writer" + (path != null ? " for " + path : "") +
72 " initialized with cacheConf: " + cacheConf +
73 " comparator: " + comparator.getClass().getSimpleName() +
74 " fileContext: " + fileContext);
75 }
76 }
77
78
79
80
81
82
83
84
85
86 @Override
87 public void append(final KeyValue kv) throws IOException {
88
89 append(kv.getMvccVersion(), kv.getBuffer(), kv.getKeyOffset(), kv.getKeyLength(),
90 kv.getBuffer(), kv.getValueOffset(), kv.getValueLength(), kv.getBuffer(),
91 kv.getTagsOffset(), kv.getTagsLengthUnsigned());
92 this.maxMemstoreTS = Math.max(this.maxMemstoreTS, kv.getMvccVersion());
93 }
94
95
96
97
98
99
100
101
102
103
104 @Override
105 public void append(final byte[] key, final byte[] value) throws IOException {
106 append(key, value, HConstants.EMPTY_BYTE_ARRAY);
107 }
108
109
110
111
112
113
114
115
116
117
118
119
120 @Override
121 public void append(final byte[] key, final byte[] value, byte[] tag) throws IOException {
122 append(0, key, 0, key.length, value, 0, value.length, tag, 0, tag.length);
123 }
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139 private void append(final long memstoreTS, final byte[] key, final int koffset,
140 final int klength, final byte[] value, final int voffset, final int vlength,
141 final byte[] tag, final int tagsOffset, final int tagsLength) throws IOException {
142 boolean dupKey = checkKey(key, koffset, klength);
143 checkValue(value, voffset, vlength);
144 if (!dupKey) {
145 checkBlockBoundary();
146 }
147
148 if (!fsBlockWriter.isWriting())
149 newBlock();
150
151
152
153 {
154 DataOutputStream out = fsBlockWriter.getUserDataStream();
155 out.writeInt(klength);
156 totalKeyLength += klength;
157 out.writeInt(vlength);
158 totalValueLength += vlength;
159 out.write(key, koffset, klength);
160 out.write(value, voffset, vlength);
161
162 if (hFileContext.isIncludesTags()) {
163 out.writeShort(tagsLength);
164 if (tagsLength > 0) {
165 out.write(tag, tagsOffset, tagsLength);
166 if (tagsLength > maxTagsLength) {
167 maxTagsLength = tagsLength;
168 }
169 }
170 }
171 if (this.hFileContext.isIncludesMvcc()) {
172 WritableUtils.writeVLong(out, memstoreTS);
173 }
174 }
175
176
177 if (firstKeyInBlock == null) {
178
179 firstKeyInBlock = new byte[klength];
180 System.arraycopy(key, koffset, firstKeyInBlock, 0, klength);
181 }
182
183 lastKeyBuffer = key;
184 lastKeyOffset = koffset;
185 lastKeyLength = klength;
186 entryCount++;
187 }
188
189 protected void finishFileInfo() throws IOException {
190 super.finishFileInfo();
191 if (hFileContext.getDataBlockEncoding() == DataBlockEncoding.PREFIX_TREE) {
192
193
194 fileInfo.append(FileInfo.MAX_TAGS_LEN, Bytes.toBytes(this.maxTagsLength), false);
195 } else if (hFileContext.isIncludesTags()) {
196
197
198 fileInfo.append(FileInfo.MAX_TAGS_LEN, Bytes.toBytes(this.maxTagsLength), false);
199 boolean tagsCompressed = (hFileContext.getDataBlockEncoding() != DataBlockEncoding.NONE)
200 && hFileContext.isCompressTags();
201 fileInfo.append(FileInfo.TAGS_COMPRESSED, Bytes.toBytes(tagsCompressed), false);
202 }
203 }
204
205 @Override
206 protected int getMajorVersion() {
207 return 3;
208 }
209
210 @Override
211 protected int getMinorVersion() {
212 return HFileReaderV3.MAX_MINOR_VERSION;
213 }
214
215 @Override
216 protected void finishClose(FixedFileTrailer trailer) throws IOException {
217
218 Encryption.Context cryptoContext = hFileContext.getEncryptionContext();
219 if (cryptoContext != Encryption.Context.NONE) {
220
221
222 trailer.setEncryptionKey(EncryptionUtil.wrapKey(cryptoContext.getConf(),
223 cryptoContext.getConf().get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY,
224 User.getCurrent().getShortName()),
225 cryptoContext.getKey()));
226 }
227
228 super.finishClose(trailer);
229 }
230
231 }