1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17 package org.apache.hadoop.hbase.io.hfile;
18
19 import static org.junit.Assert.assertEquals;
20 import static org.junit.Assert.assertTrue;
21
22 import java.io.ByteArrayOutputStream;
23 import java.io.DataOutputStream;
24 import java.io.IOException;
25 import java.nio.ByteBuffer;
26 import java.util.ArrayList;
27 import java.util.Collection;
28 import java.util.List;
29
30 import org.apache.hadoop.hbase.HConstants;
31 import org.apache.hadoop.hbase.KeyValue;
32 import org.apache.hadoop.hbase.testclassification.SmallTests;
33 import org.apache.hadoop.hbase.io.HeapSize;
34 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
35 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
36 import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
37 import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
38 import org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.BufferGrabbingByteArrayOutputStream;
39 import org.apache.hadoop.hbase.util.ChecksumType;
40 import org.apache.hadoop.hbase.util.test.RedundantKVGenerator;
41 import org.junit.Test;
42 import org.junit.experimental.categories.Category;
43 import org.junit.runner.RunWith;
44 import org.junit.runners.Parameterized;
45 import org.junit.runners.Parameterized.Parameters;
46
47 @RunWith(Parameterized.class)
48 @Category(SmallTests.class)
49 public class TestHFileDataBlockEncoder {
50 private HFileDataBlockEncoder blockEncoder;
51 private RedundantKVGenerator generator = new RedundantKVGenerator();
52 private boolean includesMemstoreTS;
53
54
55
56
57
58 public TestHFileDataBlockEncoder(HFileDataBlockEncoder blockEncoder,
59 boolean includesMemstoreTS) {
60 this.blockEncoder = blockEncoder;
61 this.includesMemstoreTS = includesMemstoreTS;
62 System.err.println("Encoding: " + blockEncoder.getDataBlockEncoding()
63 + ", includesMemstoreTS: " + includesMemstoreTS);
64 }
65
66
67
68
69
70 @Test
71 public void testEncodingWithCache() throws IOException {
72 testEncodingWithCacheInternals(false);
73 testEncodingWithCacheInternals(true);
74 }
75
76 private void testEncodingWithCacheInternals(boolean useTag) throws IOException {
77 List<KeyValue> kvs = generator.generateTestKeyValues(60, useTag);
78 HFileBlock block = getSampleHFileBlock(kvs, useTag);
79 HFileBlock cacheBlock = createBlockOnDisk(kvs, block, useTag);
80
81 LruBlockCache blockCache =
82 new LruBlockCache(8 * 1024 * 1024, 32 * 1024);
83 BlockCacheKey cacheKey = new BlockCacheKey("test", 0);
84 blockCache.cacheBlock(cacheKey, cacheBlock);
85
86 HeapSize heapSize = blockCache.getBlock(cacheKey, false, false, true);
87 assertTrue(heapSize instanceof HFileBlock);
88
89 HFileBlock returnedBlock = (HFileBlock) heapSize;;
90
91 if (blockEncoder.getDataBlockEncoding() ==
92 DataBlockEncoding.NONE) {
93 assertEquals(block.getBufferWithHeader(),
94 returnedBlock.getBufferWithHeader());
95 } else {
96 if (BlockType.ENCODED_DATA != returnedBlock.getBlockType()) {
97 System.out.println(blockEncoder);
98 }
99 assertEquals(BlockType.ENCODED_DATA, returnedBlock.getBlockType());
100 }
101 }
102
103
104 @Test
105 public void testHeaderSizeInCacheWithoutChecksum() throws Exception {
106 testHeaderSizeInCacheWithoutChecksumInternals(false);
107 testHeaderSizeInCacheWithoutChecksumInternals(true);
108 }
109
110 private void testHeaderSizeInCacheWithoutChecksumInternals(boolean useTags) throws IOException {
111 int headerSize = HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;
112
113 List<KeyValue> kvs = generator.generateTestKeyValues(60, useTags);
114 ByteBuffer keyValues = RedundantKVGenerator.convertKvToByteBuffer(kvs, includesMemstoreTS);
115 int size = keyValues.limit();
116 ByteBuffer buf = ByteBuffer.allocate(size + headerSize);
117 buf.position(headerSize);
118 keyValues.rewind();
119 buf.put(keyValues);
120 HFileContext hfileContext = new HFileContextBuilder().withHBaseCheckSum(false)
121 .withIncludesMvcc(includesMemstoreTS)
122 .withIncludesTags(useTags)
123 .withBlockSize(0)
124 .withChecksumType(ChecksumType.NULL)
125 .build();
126 HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, buf,
127 HFileBlock.FILL_HEADER, 0,
128 0, hfileContext);
129 HFileBlock cacheBlock = createBlockOnDisk(kvs, block, useTags);
130 assertEquals(headerSize, cacheBlock.getDummyHeaderForVersion().length);
131 }
132
133
134
135
136
137 @Test
138 public void testEncoding() throws IOException {
139 testEncodingInternals(false);
140 testEncodingInternals(true);
141 }
142
143 private void testEncodingInternals(boolean useTag) throws IOException {
144
145 List<KeyValue> kvs = generator.generateTestKeyValues(60, useTag);
146 HFileBlock block = getSampleHFileBlock(kvs, useTag);
147 HFileBlock blockOnDisk = createBlockOnDisk(kvs, block, useTag);
148
149 if (blockEncoder.getDataBlockEncoding() !=
150 DataBlockEncoding.NONE) {
151 assertEquals(BlockType.ENCODED_DATA, blockOnDisk.getBlockType());
152 assertEquals(blockEncoder.getDataBlockEncoding().getId(),
153 blockOnDisk.getDataBlockEncodingId());
154 } else {
155 assertEquals(BlockType.DATA, blockOnDisk.getBlockType());
156 }
157 }
158
159 private HFileBlock getSampleHFileBlock(List<KeyValue> kvs, boolean useTag) {
160 ByteBuffer keyValues = RedundantKVGenerator.convertKvToByteBuffer(kvs, includesMemstoreTS);
161 int size = keyValues.limit();
162 ByteBuffer buf = ByteBuffer.allocate(size + HConstants.HFILEBLOCK_HEADER_SIZE);
163 buf.position(HConstants.HFILEBLOCK_HEADER_SIZE);
164 keyValues.rewind();
165 buf.put(keyValues);
166 HFileContext meta = new HFileContextBuilder()
167 .withIncludesMvcc(includesMemstoreTS)
168 .withIncludesTags(useTag)
169 .withHBaseCheckSum(true)
170 .withCompression(Algorithm.NONE)
171 .withBlockSize(0)
172 .withChecksumType(ChecksumType.NULL)
173 .build();
174 HFileBlock b = new HFileBlock(BlockType.DATA, size, size, -1, buf,
175 HFileBlock.FILL_HEADER, 0,
176 0, meta);
177 return b;
178 }
179
180 private HFileBlock createBlockOnDisk(List<KeyValue> kvs, HFileBlock block, boolean useTags)
181 throws IOException {
182 int size;
183 HFileBlockEncodingContext context = new HFileBlockDefaultEncodingContext(
184 blockEncoder.getDataBlockEncoding(), HConstants.HFILEBLOCK_DUMMY_HEADER,
185 block.getHFileContext());
186
187 ByteArrayOutputStream baos = new ByteArrayOutputStream();
188 baos.write(block.getDummyHeaderForVersion());
189 DataOutputStream dos = new DataOutputStream(baos);
190 blockEncoder.startBlockEncoding(context, dos);
191 for (KeyValue kv : kvs) {
192 blockEncoder.encode(kv, context, dos);
193 }
194 BufferGrabbingByteArrayOutputStream stream = new BufferGrabbingByteArrayOutputStream();
195 baos.writeTo(stream);
196 blockEncoder.endBlockEncoding(context, dos, stream.getBuffer(), BlockType.DATA);
197 byte[] encodedBytes = baos.toByteArray();
198 size = encodedBytes.length - block.getDummyHeaderForVersion().length;
199 return new HFileBlock(context.getBlockType(), size, size, -1, ByteBuffer.wrap(encodedBytes),
200 HFileBlock.FILL_HEADER, 0, block.getOnDiskDataSizeWithHeader(), block.getHFileContext());
201 }
202
203
204
205
206 @Parameters
207 public static Collection<Object[]> getAllConfigurations() {
208 List<Object[]> configurations =
209 new ArrayList<Object[]>();
210
211 for (DataBlockEncoding diskAlgo : DataBlockEncoding.values()) {
212 for (boolean includesMemstoreTS : new boolean[] { false, true }) {
213 HFileDataBlockEncoder dbe = (diskAlgo == DataBlockEncoding.NONE) ?
214 NoOpDataBlockEncoder.INSTANCE : new HFileDataBlockEncoderImpl(diskAlgo);
215 configurations.add(new Object[] { dbe, new Boolean(includesMemstoreTS) });
216 }
217 }
218
219 return configurations;
220 }
221 }