1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.mapreduce;
19
20 import static org.junit.Assert.assertEquals;
21
22 import java.util.HashMap;
23 import java.util.Map;
24
25 import org.apache.commons.logging.Log;
26 import org.apache.commons.logging.LogFactory;
27 import org.apache.hadoop.fs.FileStatus;
28 import org.apache.hadoop.fs.FileSystem;
29 import org.apache.hadoop.fs.Path;
30 import org.apache.hadoop.hbase.HBaseTestingUtility;
31 import org.apache.hadoop.hbase.TableName;
32 import org.apache.hadoop.hbase.client.HTable;
33 import org.apache.hadoop.hbase.client.Put;
34 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
35 import org.apache.hadoop.hbase.testclassification.LargeTests;
36 import org.apache.hadoop.hbase.util.Bytes;
37 import org.apache.hadoop.io.MapFile;
38 import org.junit.AfterClass;
39 import org.junit.Assert;
40 import org.junit.BeforeClass;
41 import org.junit.Test;
42 import org.junit.experimental.categories.Category;
43
44 import com.google.common.collect.ImmutableMap;
45 import com.google.common.collect.Maps;
46
47
48
49
50 @Category(LargeTests.class)
51 public class TestHashTable {
52
53 private static final Log LOG = LogFactory.getLog(TestHashTable.class);
54
55 private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
56
57 @BeforeClass
58 public static void beforeClass() throws Exception {
59 TEST_UTIL.startMiniCluster(3);
60 TEST_UTIL.startMiniMapReduceCluster();
61 }
62
63 @AfterClass
64 public static void afterClass() throws Exception {
65 TEST_UTIL.shutdownMiniMapReduceCluster();
66 TEST_UTIL.shutdownMiniCluster();
67 }
68
69 @Test
70 public void testHashTable() throws Exception {
71 final String tableName = "testHashTable";
72 final byte[] family = Bytes.toBytes("family");
73 final byte[] column1 = Bytes.toBytes("c1");
74 final byte[] column2 = Bytes.toBytes("c2");
75 final byte[] column3 = Bytes.toBytes("c3");
76
77 int numRows = 100;
78 int numRegions = 10;
79 int numHashFiles = 3;
80
81 byte[][] splitRows = new byte[numRegions-1][];
82 for (int i = 1; i < numRegions; i++) {
83 splitRows[i-1] = Bytes.toBytes(numRows * i / numRegions);
84 }
85
86 long timestamp = 1430764183454L;
87
88 HTable t1 = TEST_UTIL.createTable(TableName.valueOf(tableName), family, splitRows);
89 for (int i = 0; i < numRows; i++) {
90 Put p = new Put(Bytes.toBytes(i), timestamp);
91 p.addColumn(family, column1, column1);
92 p.addColumn(family, column2, column2);
93 p.addColumn(family, column3, column3);
94 t1.put(p);
95 }
96 t1.close();
97
98 HashTable hashTable = new HashTable(TEST_UTIL.getConfiguration());
99
100 Path testDir = TEST_UTIL.getDataTestDirOnTestFS(tableName);
101
102 long batchSize = 300;
103 int code = hashTable.run(new String[] {
104 "--batchsize=" + batchSize,
105 "--numhashfiles=" + numHashFiles,
106 "--scanbatch=2",
107 tableName,
108 testDir.toString()});
109 assertEquals("test job failed", 0, code);
110
111 FileSystem fs = TEST_UTIL.getTestFileSystem();
112
113 HashTable.TableHash tableHash = HashTable.TableHash.read(fs.getConf(), testDir);
114 assertEquals(tableName, tableHash.tableName);
115 assertEquals(batchSize, tableHash.batchSize);
116 assertEquals(numHashFiles, tableHash.numHashFiles);
117 assertEquals(numHashFiles - 1, tableHash.partitions.size());
118 for (ImmutableBytesWritable bytes : tableHash.partitions) {
119 LOG.debug("partition: " + Bytes.toInt(bytes.get()));
120 }
121
122 ImmutableMap<Integer, ImmutableBytesWritable> expectedHashes
123 = ImmutableMap.<Integer, ImmutableBytesWritable>builder()
124 .put(-1, new ImmutableBytesWritable(Bytes.fromHex("714cb10a9e3b5569852980edd8c6ca2f")))
125 .put(5, new ImmutableBytesWritable(Bytes.fromHex("28d961d9252ce8f8d44a07b38d3e1d96")))
126 .put(10, new ImmutableBytesWritable(Bytes.fromHex("f6bbc4a224d8fd929b783a92599eaffa")))
127 .put(15, new ImmutableBytesWritable(Bytes.fromHex("522deb5d97f73a414ecc11457be46881")))
128 .put(20, new ImmutableBytesWritable(Bytes.fromHex("b026f2611aaa46f7110116d807545352")))
129 .put(25, new ImmutableBytesWritable(Bytes.fromHex("39ffc1a3094aa12a2e90ffd9cef2ce93")))
130 .put(30, new ImmutableBytesWritable(Bytes.fromHex("f6b4d75727ce9a30ac29e4f08f601666")))
131 .put(35, new ImmutableBytesWritable(Bytes.fromHex("422e2d2f1eb79a8f02171a705a42c090")))
132 .put(40, new ImmutableBytesWritable(Bytes.fromHex("559ad61c900fffefea0a15abf8a97bc3")))
133 .put(45, new ImmutableBytesWritable(Bytes.fromHex("23019084513eca41cee436b2a29611cb")))
134 .put(50, new ImmutableBytesWritable(Bytes.fromHex("b40467d222ddb4949b142fe145ee9edc")))
135 .put(55, new ImmutableBytesWritable(Bytes.fromHex("372bf89fcd8ca4b7ab3c1add9d07f7e4")))
136 .put(60, new ImmutableBytesWritable(Bytes.fromHex("69ae0585e6255de27dce974e332b8f8b")))
137 .put(65, new ImmutableBytesWritable(Bytes.fromHex("8029610044297aad0abdbecd485d8e59")))
138 .put(70, new ImmutableBytesWritable(Bytes.fromHex("de5f784f7f78987b6e57ecfd81c8646f")))
139 .put(75, new ImmutableBytesWritable(Bytes.fromHex("1cd757cc4e1715c8c3b1c24447a1ec56")))
140 .put(80, new ImmutableBytesWritable(Bytes.fromHex("f9a53aacfeb6142b08066615e7038095")))
141 .put(85, new ImmutableBytesWritable(Bytes.fromHex("89b872b7e639df32d3276b33928c0c91")))
142 .put(90, new ImmutableBytesWritable(Bytes.fromHex("45eeac0646d46a474ea0484175faed38")))
143 .put(95, new ImmutableBytesWritable(Bytes.fromHex("f57c447e32a08f4bf1abb2892839ac56")))
144 .build();
145
146 Map<Integer, ImmutableBytesWritable> actualHashes
147 = new HashMap<Integer, ImmutableBytesWritable>();
148 Path dataDir = new Path(testDir, HashTable.HASH_DATA_DIR);
149 for (int i = 0; i < numHashFiles; i++) {
150 Path hashPath = new Path(dataDir, HashTable.TableHash.getDataFileName(i));
151
152 MapFile.Reader reader = new MapFile.Reader(hashPath, fs.getConf());
153 ImmutableBytesWritable key = new ImmutableBytesWritable();
154 ImmutableBytesWritable hash = new ImmutableBytesWritable();
155 while(reader.next(key, hash)) {
156 String keyString = Bytes.toHex(key.get(), key.getOffset(), key.getLength());
157 LOG.debug("Key: " + (keyString.isEmpty() ? "-1" : Integer.parseInt(keyString, 16))
158 + " Hash: " + Bytes.toHex(hash.get(), hash.getOffset(), hash.getLength()));
159
160 int intKey = -1;
161 if (key.getLength() > 0) {
162 intKey = Bytes.toInt(key.get(), key.getOffset(), key.getLength());
163 }
164 if (actualHashes.containsKey(intKey)) {
165 Assert.fail("duplicate key in data files: " + intKey);
166 }
167 actualHashes.put(intKey, new ImmutableBytesWritable(hash.copyBytes()));
168 }
169 reader.close();
170 }
171
172 FileStatus[] files = fs.listStatus(testDir);
173 for (FileStatus file : files) {
174 LOG.debug("Output file: " + file.getPath());
175 }
176
177 files = fs.listStatus(dataDir);
178 for (FileStatus file : files) {
179 LOG.debug("Data file: " + file.getPath());
180 }
181
182 if (!expectedHashes.equals(actualHashes)) {
183 LOG.error("Diff: " + Maps.difference(expectedHashes, actualHashes));
184 }
185 Assert.assertEquals(expectedHashes, actualHashes);
186
187 TEST_UTIL.deleteTable(tableName);
188 TEST_UTIL.cleanupDataTestDirOnTestFS();
189 }
190
191
192 }