1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.mapreduce;
20
21 import static org.junit.Assert.assertEquals;
22 import static org.junit.Assert.assertTrue;
23
24 import java.io.IOException;
25 import java.util.ArrayList;
26 import java.util.Arrays;
27 import java.util.HashSet;
28 import java.util.List;
29 import java.util.Set;
30 import java.util.UUID;
31
32 import org.apache.commons.logging.Log;
33 import org.apache.commons.logging.LogFactory;
34 import org.apache.hadoop.conf.Configurable;
35 import org.apache.hadoop.conf.Configuration;
36 import org.apache.hadoop.fs.FSDataOutputStream;
37 import org.apache.hadoop.fs.FileStatus;
38 import org.apache.hadoop.fs.FileSystem;
39 import org.apache.hadoop.fs.Path;
40 import org.apache.hadoop.hbase.Cell;
41 import org.apache.hadoop.hbase.CellUtil;
42 import org.apache.hadoop.hbase.HBaseTestingUtility;
43 import org.apache.hadoop.hbase.HConstants;
44 import org.apache.hadoop.hbase.TableName;
45 import org.apache.hadoop.hbase.TableNotFoundException;
46 import org.apache.hadoop.hbase.client.HTable;
47 import org.apache.hadoop.hbase.client.Result;
48 import org.apache.hadoop.hbase.client.ResultScanner;
49 import org.apache.hadoop.hbase.client.Scan;
50 import org.apache.hadoop.hbase.client.Table;
51 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
52 import org.apache.hadoop.hbase.io.hfile.HFile;
53 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
54 import org.apache.hadoop.hbase.testclassification.LargeTests;
55 import org.apache.hadoop.hbase.util.Bytes;
56 import org.apache.hadoop.io.Text;
57 import org.apache.hadoop.mapred.Utils.OutputFileUtils.OutputFilesFilter;
58 import org.apache.hadoop.mapreduce.Job;
59 import org.apache.hadoop.util.GenericOptionsParser;
60 import org.apache.hadoop.util.Tool;
61 import org.apache.hadoop.util.ToolRunner;
62 import org.junit.AfterClass;
63 import org.junit.BeforeClass;
64 import org.junit.Test;
65 import org.junit.experimental.categories.Category;
66
67 @Category(LargeTests.class)
68 public class TestImportTsv implements Configurable {
69
70 protected static final Log LOG = LogFactory.getLog(TestImportTsv.class);
71 protected static final String NAME = TestImportTsv.class.getSimpleName();
72 protected static HBaseTestingUtility util = new HBaseTestingUtility();
73
74
75
76
77
78 protected static final String DELETE_AFTER_LOAD_CONF = NAME + ".deleteAfterLoad";
79
80
81
82
83 protected static final String FORCE_COMBINER_CONF = NAME + ".forceCombiner";
84
85 private final String FAMILY = "FAM";
86
87 public Configuration getConf() {
88 return util.getConfiguration();
89 }
90
91 public void setConf(Configuration conf) {
92 throw new IllegalArgumentException("setConf not supported");
93 }
94
95 @BeforeClass
96 public static void provisionCluster() throws Exception {
97 util.startMiniCluster();
98 util.startMiniMapReduceCluster();
99 }
100
101 @AfterClass
102 public static void releaseCluster() throws Exception {
103 util.shutdownMiniMapReduceCluster();
104 util.shutdownMiniCluster();
105 }
106
107 @Test
108 public void testMROnTable() throws Exception {
109 String table = "test-" + UUID.randomUUID();
110
111
112 String[] args = new String[] {
113 "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
114 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
115 table
116 };
117
118 util.createTable(TableName.valueOf(table), FAMILY);
119 doMROnTableTest(util, FAMILY, null, args, 1);
120 util.deleteTable(table);
121 }
122
123 @Test
124 public void testMROnTableWithTimestamp() throws Exception {
125 String table = "test-" + UUID.randomUUID();
126
127
128 String[] args = new String[] {
129 "-D" + ImportTsv.COLUMNS_CONF_KEY
130 + "=HBASE_ROW_KEY,HBASE_TS_KEY,FAM:A,FAM:B",
131 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,",
132 table
133 };
134 String data = "KEY,1234,VALUE1,VALUE2\n";
135
136 util.createTable(TableName.valueOf(table), FAMILY);
137 doMROnTableTest(util, FAMILY, data, args, 1);
138 util.deleteTable(table);
139 }
140
141
142 @Test
143 public void testMROnTableWithCustomMapper()
144 throws Exception {
145 String table = "test-" + UUID.randomUUID();
146
147
148 String[] args = new String[] {
149 "-D" + ImportTsv.MAPPER_CONF_KEY + "=org.apache.hadoop.hbase.mapreduce.TsvImporterCustomTestMapper",
150 table
151 };
152
153 util.createTable(TableName.valueOf(table), FAMILY);
154 doMROnTableTest(util, FAMILY, null, args, 3);
155 util.deleteTable(table);
156 }
157
158 @Test
159 public void testBulkOutputWithoutAnExistingTable() throws Exception {
160 String table = "test-" + UUID.randomUUID();
161
162
163 Path hfiles = new Path(util.getDataTestDirOnTestFS(table), "hfiles");
164 String[] args = new String[] {
165 "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
166 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
167 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
168 table
169 };
170
171 doMROnTableTest(util, FAMILY, null, args, 3);
172 util.deleteTable(table);
173 }
174
175 @Test
176 public void testBulkOutputWithAnExistingTable() throws Exception {
177 String table = "test-" + UUID.randomUUID();
178
179
180 Path hfiles = new Path(util.getDataTestDirOnTestFS(table), "hfiles");
181 String[] args = new String[] {
182 "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
183 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
184 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
185 table
186 };
187
188 util.createTable(TableName.valueOf(table), FAMILY);
189 doMROnTableTest(util, FAMILY, null, args, 3);
190 util.deleteTable(table);
191 }
192
193 @Test
194 public void testBulkOutputWithAnExistingTableNoStrictTrue() throws Exception {
195 String table = "test-" + UUID.randomUUID();
196
197 Path hfiles = new Path(util.getDataTestDirOnTestFS(table), "hfiles");
198 String[] args = new String[] {
199 "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
200 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
201 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
202 "-D" + ImportTsv.NO_STRICT_COL_FAMILY + "=true",
203 table
204 };
205 util.createTable(TableName.valueOf(table), FAMILY);
206 doMROnTableTest(util, FAMILY, null, args, 3);
207 util.deleteTable(table);
208 }
209
210 @Test
211 public void testJobConfigurationsWithTsvImporterTextMapper() throws Exception {
212 String table = "test-" + UUID.randomUUID();
213 Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table),"hfiles");
214 String INPUT_FILE = "InputFile1.csv";
215
216 String[] args =
217 new String[] {
218 "-D" + ImportTsv.MAPPER_CONF_KEY
219 + "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
220 "-D" + ImportTsv.COLUMNS_CONF_KEY
221 + "=HBASE_ROW_KEY,FAM:A,FAM:B",
222 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,",
223 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), table,
224 INPUT_FILE
225 };
226 GenericOptionsParser opts = new GenericOptionsParser(util.getConfiguration(), args);
227 args = opts.getRemainingArgs();
228 Job job = ImportTsv.createSubmittableJob(util.getConfiguration(), args);
229 assertTrue(job.getMapperClass().equals(TsvImporterTextMapper.class));
230 assertTrue(job.getReducerClass().equals(TextSortReducer.class));
231 assertTrue(job.getMapOutputValueClass().equals(Text.class));
232 }
233
234 @Test
235 public void testBulkOutputWithTsvImporterTextMapper() throws Exception {
236 String table = "test-" + UUID.randomUUID();
237 String FAMILY = "FAM";
238 Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table),"hfiles");
239
240 String[] args =
241 new String[] {
242 "-D" + ImportTsv.MAPPER_CONF_KEY
243 + "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
244 "-D" + ImportTsv.COLUMNS_CONF_KEY
245 + "=HBASE_ROW_KEY,FAM:A,FAM:B",
246 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
247 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), table
248 };
249 String data = "KEY\u001bVALUE4\u001bVALUE8\n";
250 doMROnTableTest(util, FAMILY, data, args, 4);
251 }
252
253 @Test(expected = TableNotFoundException.class)
254 public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception {
255 String table = "test-" + UUID.randomUUID();
256 String[] args =
257 new String[] { table, "/inputFile" };
258
259 Configuration conf = new Configuration(util.getConfiguration());
260 conf.set(ImportTsv.COLUMNS_CONF_KEY, "HBASE_ROW_KEY,FAM:A");
261 conf.set(ImportTsv.BULK_OUTPUT_CONF_KEY, "/output");
262 conf.set(ImportTsv.CREATE_TABLE_CONF_KEY, "no");
263 ImportTsv.createSubmittableJob(conf, args);
264 }
265
266 @Test(expected = TableNotFoundException.class)
267 public void testMRWithoutAnExistingTable() throws Exception {
268 String table = "test-" + UUID.randomUUID();
269 String[] args =
270 new String[] { table, "/inputFile" };
271
272 Configuration conf = new Configuration(util.getConfiguration());
273 ImportTsv.createSubmittableJob(conf, args);
274 }
275
276
277
278
279 @Test
280 public void testTsvImporterTextMapperWithInvalidData() throws Exception {
281 String table = "test-" + UUID.randomUUID();
282 String FAMILY = "FAM";
283 Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table), "hfiles");
284
285 String[] args =
286 new String[] {
287 "-D" + ImportTsv.MAPPER_CONF_KEY
288 + "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
289 "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,HBASE_TS_KEY,FAM:A,FAM:B",
290 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,",
291 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), table };
292
293 String data = "KEY,1234,VALUE1,VALUE2\nKEY\nKEY,1235,VALUE1,VALUE2\n";
294 doMROnTableTest(util, FAMILY, data, args, 1, 4);
295 util.deleteTable(table);
296 }
297
298 protected static Tool doMROnTableTest(HBaseTestingUtility util, String family,
299 String data, String[] args) throws Exception {
300 return doMROnTableTest(util, family, data, args, 1);
301 }
302
303 protected static Tool doMROnTableTest(HBaseTestingUtility util, String family, String data,
304 String[] args, int valueMultiplier) throws Exception {
305 return doMROnTableTest(util, family, data, args, valueMultiplier, -1);
306 }
307
308
309
310
311
312
313
314
315
316 protected static Tool doMROnTableTest(HBaseTestingUtility util, String family, String data,
317 String[] args, int valueMultiplier, int expectedKVCount)
318 throws Exception {
319 String table = args[args.length - 1];
320 Configuration conf = new Configuration(util.getConfiguration());
321
322
323 FileSystem fs = FileSystem.get(conf);
324 Path inputPath = fs.makeQualified(new Path(util.getDataTestDirOnTestFS(table), "input.dat"));
325 FSDataOutputStream op = fs.create(inputPath, true);
326 if (data == null) {
327 data = "KEY\u001bVALUE1\u001bVALUE2\n";
328 }
329 op.write(Bytes.toBytes(data));
330 op.close();
331 LOG.debug(String.format("Wrote test data to file: %s", inputPath));
332
333 if (conf.getBoolean(FORCE_COMBINER_CONF, true)) {
334 LOG.debug("Forcing combiner.");
335 conf.setInt("mapreduce.map.combine.minspills", 1);
336 }
337
338
339 List<String> argv = new ArrayList<String>(Arrays.asList(args));
340 argv.add(inputPath.toString());
341 Tool tool = new ImportTsv();
342 LOG.debug("Running ImportTsv with arguments: " + argv);
343 assertEquals(0, ToolRunner.run(conf, tool, argv.toArray(args)));
344
345
346
347
348 boolean createdHFiles = false;
349 String outputPath = null;
350 for (String arg : argv) {
351 if (arg.contains(ImportTsv.BULK_OUTPUT_CONF_KEY)) {
352 createdHFiles = true;
353
354 outputPath = arg.split("=")[1];
355 break;
356 }
357 }
358
359 if (createdHFiles)
360 validateHFiles(fs, outputPath, family, expectedKVCount);
361 else
362 validateTable(conf, TableName.valueOf(table), family, valueMultiplier);
363
364 if (conf.getBoolean(DELETE_AFTER_LOAD_CONF, true)) {
365 LOG.debug("Deleting test subdirectory");
366 util.cleanupDataTestDirOnTestFS(table);
367 }
368 return tool;
369 }
370
371
372
373
374 private static void validateTable(Configuration conf, TableName tableName,
375 String family, int valueMultiplier) throws IOException {
376
377 LOG.debug("Validating table.");
378 Table table = new HTable(conf, tableName);
379 boolean verified = false;
380 long pause = conf.getLong("hbase.client.pause", 5 * 1000);
381 int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
382 for (int i = 0; i < numRetries; i++) {
383 try {
384 Scan scan = new Scan();
385
386 scan.addFamily(Bytes.toBytes(family));
387 ResultScanner resScanner = table.getScanner(scan);
388 for (Result res : resScanner) {
389 assertTrue(res.size() == 2);
390 List<Cell> kvs = res.listCells();
391 assertTrue(CellUtil.matchingRow(kvs.get(0), Bytes.toBytes("KEY")));
392 assertTrue(CellUtil.matchingRow(kvs.get(1), Bytes.toBytes("KEY")));
393 assertTrue(CellUtil.matchingValue(kvs.get(0), Bytes.toBytes("VALUE" + valueMultiplier)));
394 assertTrue(CellUtil.matchingValue(kvs.get(1), Bytes.toBytes("VALUE" + 2 * valueMultiplier)));
395
396 }
397 verified = true;
398 break;
399 } catch (NullPointerException e) {
400
401
402 }
403 try {
404 Thread.sleep(pause);
405 } catch (InterruptedException e) {
406
407 }
408 }
409 table.close();
410 assertTrue(verified);
411 }
412
413
414
415
416 private static void validateHFiles(FileSystem fs, String outputPath, String family,
417 int expectedKVCount) throws IOException {
418
419 LOG.debug("Validating HFiles.");
420 Set<String> configFamilies = new HashSet<String>();
421 configFamilies.add(family);
422 Set<String> foundFamilies = new HashSet<String>();
423 int actualKVCount = 0;
424 for (FileStatus cfStatus : fs.listStatus(new Path(outputPath), new OutputFilesFilter())) {
425 String[] elements = cfStatus.getPath().toString().split(Path.SEPARATOR);
426 String cf = elements[elements.length - 1];
427 foundFamilies.add(cf);
428 assertTrue(
429 String.format(
430 "HFile output contains a column family (%s) not present in input families (%s)",
431 cf, configFamilies),
432 configFamilies.contains(cf));
433 for (FileStatus hfile : fs.listStatus(cfStatus.getPath())) {
434 assertTrue(
435 String.format("HFile %s appears to contain no data.", hfile.getPath()),
436 hfile.getLen() > 0);
437
438 if (expectedKVCount > -1) {
439 actualKVCount += getKVCountFromHfile(fs, hfile.getPath());
440 }
441 }
442 }
443 if (expectedKVCount > -1) {
444 assertTrue(String.format(
445 "KV count in output hfile=<%d> doesn't match with expected KV count=<%d>", actualKVCount,
446 expectedKVCount), actualKVCount == expectedKVCount);
447 }
448 }
449
450
451
452
453
454
455
456
457 private static int getKVCountFromHfile(FileSystem fs, Path p) throws IOException {
458 Configuration conf = util.getConfiguration();
459 HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
460 reader.loadFileInfo();
461 HFileScanner scanner = reader.getScanner(false, false);
462 scanner.seekTo();
463 int count = 0;
464 do {
465 count++;
466 } while (scanner.next());
467 reader.close();
468 return count;
469 }
470 }
471