View Javadoc

1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.mapreduce;
19  
20  import static org.junit.Assert.assertEquals;
21  import static org.junit.Assert.assertTrue;
22  
23  import java.io.IOException;
24  import java.util.ArrayList;
25  import java.util.Arrays;
26  import java.util.List;
27  import java.util.UUID;
28  
29  import org.apache.commons.logging.Log;
30  import org.apache.commons.logging.LogFactory;
31  import org.apache.hadoop.conf.Configurable;
32  import org.apache.hadoop.conf.Configuration;
33  import org.apache.hadoop.fs.FSDataOutputStream;
34  import org.apache.hadoop.fs.FileSystem;
35  import org.apache.hadoop.fs.Path;
36  import org.apache.hadoop.hbase.Cell;
37  import org.apache.hadoop.hbase.CellUtil;
38  import org.apache.hadoop.hbase.HBaseTestingUtility;
39  import org.apache.hadoop.hbase.HConstants;
40  import org.apache.hadoop.hbase.testclassification.LargeTests;
41  import org.apache.hadoop.hbase.TableName;
42  import org.apache.hadoop.hbase.client.Admin;
43  import org.apache.hadoop.hbase.client.Durability;
44  import org.apache.hadoop.hbase.client.HBaseAdmin;
45  import org.apache.hadoop.hbase.client.HTable;
46  import org.apache.hadoop.hbase.client.Put;
47  import org.apache.hadoop.hbase.client.Result;
48  import org.apache.hadoop.hbase.client.ResultScanner;
49  import org.apache.hadoop.hbase.client.Scan;
50  import org.apache.hadoop.hbase.client.Table;
51  import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
52  import org.apache.hadoop.hbase.coprocessor.ObserverContext;
53  import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
54  import org.apache.hadoop.hbase.regionserver.Region;
55  import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
56  import org.apache.hadoop.hbase.util.Bytes;
57  import org.apache.hadoop.util.Tool;
58  import org.apache.hadoop.util.ToolRunner;
59  import org.junit.AfterClass;
60  import org.junit.BeforeClass;
61  import org.junit.Test;
62  import org.junit.experimental.categories.Category;
63  
64  @Category(LargeTests.class)
65  public class TestImportTSVWithOperationAttributes implements Configurable {
66  
67    protected static final Log LOG = LogFactory.getLog(TestImportTSVWithOperationAttributes.class);
68    protected static final String NAME = TestImportTsv.class.getSimpleName();
69    protected static HBaseTestingUtility util = new HBaseTestingUtility();
70  
71    /**
72     * Delete the tmp directory after running doMROnTableTest. Boolean. Default is
73     * false.
74     */
75    protected static final String DELETE_AFTER_LOAD_CONF = NAME + ".deleteAfterLoad";
76  
77    /**
78     * Force use of combiner in doMROnTableTest. Boolean. Default is true.
79     */
80    protected static final String FORCE_COMBINER_CONF = NAME + ".forceCombiner";
81  
82    private static Configuration conf;
83  
84    private static final String TEST_ATR_KEY = "test";
85  
86    private final String FAMILY = "FAM";
87  
88    public Configuration getConf() {
89      return util.getConfiguration();
90    }
91  
92    public void setConf(Configuration conf) {
93      throw new IllegalArgumentException("setConf not supported");
94    }
95  
96    @BeforeClass
97    public static void provisionCluster() throws Exception {
98      conf = util.getConfiguration();
99      conf.set("hbase.coprocessor.master.classes", OperationAttributesTestController.class.getName());
100     conf.set("hbase.coprocessor.region.classes", OperationAttributesTestController.class.getName());
101     util.startMiniCluster();
102     Admin admin = new HBaseAdmin(util.getConfiguration());
103     util.startMiniMapReduceCluster();
104   }
105 
106   @AfterClass
107   public static void releaseCluster() throws Exception {
108     util.shutdownMiniMapReduceCluster();
109     util.shutdownMiniCluster();
110   }
111 
112   @Test
113   public void testMROnTable() throws Exception {
114     String tableName = "test-" + UUID.randomUUID();
115 
116     // Prepare the arguments required for the test.
117     String[] args = new String[] {
118         "-D" + ImportTsv.MAPPER_CONF_KEY
119             + "=org.apache.hadoop.hbase.mapreduce.TsvImporterCustomTestMapperForOprAttr",
120         "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_ATTRIBUTES_KEY",
121         "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
122     String data = "KEY\u001bVALUE1\u001bVALUE2\u001btest=>myvalue\n";
123     util.createTable(TableName.valueOf(tableName), FAMILY);
124     doMROnTableTest(util, FAMILY, data, args, 1, true);
125     util.deleteTable(tableName);
126   }
127 
128   @Test
129   public void testMROnTableWithInvalidOperationAttr() throws Exception {
130     String tableName = "test-" + UUID.randomUUID();
131 
132     // Prepare the arguments required for the test.
133     String[] args = new String[] {
134         "-D" + ImportTsv.MAPPER_CONF_KEY
135             + "=org.apache.hadoop.hbase.mapreduce.TsvImporterCustomTestMapperForOprAttr",
136         "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_ATTRIBUTES_KEY",
137         "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
138     String data = "KEY\u001bVALUE1\u001bVALUE2\u001btest1=>myvalue\n";
139     util.createTable(TableName.valueOf(tableName), FAMILY);
140     doMROnTableTest(util, FAMILY, data, args, 1, false);
141     util.deleteTable(tableName);
142   }
143 
144   /**
145    * Run an ImportTsv job and perform basic validation on the results. Returns
146    * the ImportTsv <code>Tool</code> instance so that other tests can inspect it
147    * for further validation as necessary. This method is static to insure
148    * non-reliance on instance's util/conf facilities.
149    * 
150    * @param args
151    *          Any arguments to pass BEFORE inputFile path is appended.
152    * @param dataAvailable
153    * @return The Tool instance used to run the test.
154    */
155   private Tool doMROnTableTest(HBaseTestingUtility util, String family, String data, String[] args,
156       int valueMultiplier, boolean dataAvailable) throws Exception {
157     String table = args[args.length - 1];
158     Configuration conf = new Configuration(util.getConfiguration());
159 
160     // populate input file
161     FileSystem fs = FileSystem.get(conf);
162     Path inputPath = fs.makeQualified(new Path(util.getDataTestDirOnTestFS(table), "input.dat"));
163     FSDataOutputStream op = fs.create(inputPath, true);
164     op.write(Bytes.toBytes(data));
165     op.close();
166     LOG.debug(String.format("Wrote test data to file: %s", inputPath));
167 
168     if (conf.getBoolean(FORCE_COMBINER_CONF, true)) {
169       LOG.debug("Forcing combiner.");
170       conf.setInt("mapreduce.map.combine.minspills", 1);
171     }
172 
173     // run the import
174     List<String> argv = new ArrayList<String>(Arrays.asList(args));
175     argv.add(inputPath.toString());
176     Tool tool = new ImportTsv();
177     LOG.debug("Running ImportTsv with arguments: " + argv);
178     assertEquals(0, ToolRunner.run(conf, tool, argv.toArray(args)));
179 
180     validateTable(conf, TableName.valueOf(table), family, valueMultiplier, dataAvailable);
181 
182     if (conf.getBoolean(DELETE_AFTER_LOAD_CONF, true)) {
183       LOG.debug("Deleting test subdirectory");
184       util.cleanupDataTestDirOnTestFS(table);
185     }
186     return tool;
187   }
188 
189   /**
190    * Confirm ImportTsv via data in online table.
191    * 
192    * @param dataAvailable
193    */
194   private static void validateTable(Configuration conf, TableName tableName, String family,
195       int valueMultiplier, boolean dataAvailable) throws IOException {
196 
197     LOG.debug("Validating table.");
198     Table table = new HTable(conf, tableName);
199     boolean verified = false;
200     long pause = conf.getLong("hbase.client.pause", 5 * 1000);
201     int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
202     for (int i = 0; i < numRetries; i++) {
203       try {
204         Scan scan = new Scan();
205         // Scan entire family.
206         scan.addFamily(Bytes.toBytes(family));
207         if (dataAvailable) {
208           ResultScanner resScanner = table.getScanner(scan);
209           for (Result res : resScanner) {
210             LOG.debug("Getting results " + res.size());
211             assertTrue(res.size() == 2);
212             List<Cell> kvs = res.listCells();
213             assertTrue(CellUtil.matchingRow(kvs.get(0), Bytes.toBytes("KEY")));
214             assertTrue(CellUtil.matchingRow(kvs.get(1), Bytes.toBytes("KEY")));
215             assertTrue(CellUtil.matchingValue(kvs.get(0), Bytes.toBytes("VALUE" + valueMultiplier)));
216             assertTrue(CellUtil.matchingValue(kvs.get(1),
217                 Bytes.toBytes("VALUE" + 2 * valueMultiplier)));
218             // Only one result set is expected, so let it loop.
219             verified = true;
220           }
221         } else {
222           ResultScanner resScanner = table.getScanner(scan);
223           Result[] next = resScanner.next(2);
224           assertEquals(0, next.length);
225           verified = true;
226         }
227 
228         break;
229       } catch (NullPointerException e) {
230         // If here, a cell was empty. Presume its because updates came in
231         // after the scanner had been opened. Wait a while and retry.
232       }
233       try {
234         Thread.sleep(pause);
235       } catch (InterruptedException e) {
236         // continue
237       }
238     }
239     table.close();
240     assertTrue(verified);
241   }
242 
243   public static class OperationAttributesTestController extends BaseRegionObserver {
244 
245     @Override
246     public void prePut(ObserverContext<RegionCoprocessorEnvironment> e, Put put, WALEdit edit,
247         Durability durability) throws IOException {
248       Region region = e.getEnvironment().getRegion();
249       if (!region.getRegionInfo().isMetaTable()
250           && !region.getRegionInfo().getTable().isSystemTable()) {
251         if (put.getAttribute(TEST_ATR_KEY) != null) {
252           LOG.debug("allow any put to happen " + region.getRegionInfo().getRegionNameAsString());
253         } else {
254           e.bypass();
255         }
256       }
257     }
258   }
259 }