1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase;
19
20 import org.apache.hadoop.fs.FileSystem;
21 import org.apache.hadoop.fs.Path;
22 import org.apache.hadoop.hbase.client.HTable;
23 import org.apache.hadoop.hbase.client.Put;
24 import org.apache.hadoop.hbase.testclassification.MediumTests;
25 import org.apache.hadoop.hbase.util.Bytes;
26 import org.apache.hadoop.hdfs.MiniDFSCluster;
27 import org.junit.Test;
28 import org.junit.experimental.categories.Category;
29
30 import java.util.UUID;
31
32 import static org.junit.Assert.assertEquals;
33 import static org.junit.Assert.assertTrue;
34
35
36
37
38 @Category(MediumTests.class)
39 public class TestHBaseOnOtherDfsCluster {
40
41 @Test
42 public void testOveralyOnOtherCluster() throws Exception {
43
44 HBaseTestingUtility util1 = new HBaseTestingUtility();
45 MiniDFSCluster dfs = util1.startMiniDFSCluster(1);
46
47
48 HBaseTestingUtility util2 = new HBaseTestingUtility();
49
50 util2.setDFSCluster(dfs, false);
51 util2.startMiniCluster();
52
53
54 FileSystem fs = dfs.getFileSystem();
55 FileSystem targetFs = util2.getDFSCluster().getFileSystem();
56 assertFsSameUri(fs, targetFs);
57
58 fs = FileSystem.get(util1.getConfiguration());
59 targetFs = FileSystem.get(util2.getConfiguration());
60 assertFsSameUri(fs, targetFs);
61
62 Path randomFile = new Path("/"+UUID.randomUUID());
63 assertTrue(targetFs.createNewFile(randomFile));
64 assertTrue(fs.exists(randomFile));
65
66
67 byte[] family = Bytes.toBytes("testfamily");
68 byte[] tablename = Bytes.toBytes("testtable");
69 HTable table = util2.createTable(tablename, family);
70 Put p = new Put(new byte[] { 1, 2, 3 });
71 p.add(family, null, new byte[] { 1 });
72 table.put(p);
73 table.flushCommits();
74
75
76 util2.shutdownMiniCluster();
77 util1.shutdownMiniDFSCluster();
78 }
79
80 private void assertFsSameUri(FileSystem sourceFs, FileSystem targetFs) {
81 Path source = new Path(sourceFs.getUri());
82 Path target = new Path(targetFs.getUri());
83 assertEquals(source, target);
84 }
85 }