1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17 package org.apache.hadoop.hbase.spark.example.hbasecontext;
18
19 import org.apache.hadoop.conf.Configuration;
20 import org.apache.hadoop.hbase.HBaseConfiguration;
21 import org.apache.hadoop.hbase.TableName;
22 import org.apache.hadoop.hbase.client.Delete;
23 import org.apache.hadoop.hbase.spark.JavaHBaseContext;
24 import org.apache.hadoop.hbase.util.Bytes;
25 import org.apache.spark.SparkConf;
26 import org.apache.spark.api.java.JavaRDD;
27 import org.apache.spark.api.java.JavaSparkContext;
28 import org.apache.spark.api.java.function.Function;
29
30 import java.util.ArrayList;
31 import java.util.List;
32
33
34
35
36
37 final public class JavaHBaseBulkDeleteExample {
38
39 private JavaHBaseBulkDeleteExample() {}
40
41 public static void main(String[] args) {
42 if (args.length < 1) {
43 System.out.println("JavaHBaseBulkDeleteExample {tableName}");
44 return;
45 }
46
47 String tableName = args[0];
48
49 SparkConf sparkConf = new SparkConf().setAppName("JavaHBaseBulkDeleteExample " + tableName);
50 JavaSparkContext jsc = new JavaSparkContext(sparkConf);
51
52 try {
53 List<byte[]> list = new ArrayList<>();
54 list.add(Bytes.toBytes("1"));
55 list.add(Bytes.toBytes("2"));
56 list.add(Bytes.toBytes("3"));
57 list.add(Bytes.toBytes("4"));
58 list.add(Bytes.toBytes("5"));
59
60 JavaRDD<byte[]> rdd = jsc.parallelize(list);
61
62 Configuration conf = HBaseConfiguration.create();
63
64 JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
65
66 hbaseContext.bulkDelete(rdd,
67 TableName.valueOf(tableName), new DeleteFunction(), 4);
68 } finally {
69 jsc.stop();
70 }
71
72 }
73
74 public static class DeleteFunction implements Function<byte[], Delete> {
75 private static final long serialVersionUID = 1L;
76 public Delete call(byte[] v) throws Exception {
77 return new Delete(v);
78 }
79 }
80 }