HBASE-24033 Add ut for loading the corrupt recovered hfiles (#1322)
[hbase.git] / hbase-server / src / test / java / org / apache / hadoop / hbase / TestHBaseOnOtherDfsCluster.java
blob531ba079c4f11581e98da94378fbd539b07b4f33
1 /**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org.apache.hadoop.hbase;
20 import static org.junit.Assert.assertEquals;
21 import static org.junit.Assert.assertTrue;
23 import org.apache.hadoop.fs.FileSystem;
24 import org.apache.hadoop.fs.Path;
25 import org.apache.hadoop.hbase.client.Put;
26 import org.apache.hadoop.hbase.client.Table;
27 import org.apache.hadoop.hbase.testclassification.MediumTests;
28 import org.apache.hadoop.hbase.util.Bytes;
29 import org.apache.hadoop.hdfs.MiniDFSCluster;
30 import org.junit.ClassRule;
31 import org.junit.Rule;
32 import org.junit.Test;
33 import org.junit.experimental.categories.Category;
34 import org.junit.rules.TestName;
36 /**
37 * Test that an HBase cluster can run on top of an existing MiniDfsCluster
39 @Category(MediumTests.class)
40 public class TestHBaseOnOtherDfsCluster {
42 @ClassRule
43 public static final HBaseClassTestRule CLASS_RULE =
44 HBaseClassTestRule.forClass(TestHBaseOnOtherDfsCluster.class);
46 @Rule
47 public TestName name = new TestName();
49 @Test
50 public void testOveralyOnOtherCluster() throws Exception {
51 // just run HDFS
52 HBaseTestingUtility util1 = new HBaseTestingUtility();
53 MiniDFSCluster dfs = util1.startMiniDFSCluster(1);
55 // run HBase on that HDFS
56 HBaseTestingUtility util2 = new HBaseTestingUtility();
57 // set the dfs
58 util2.setDFSCluster(dfs, false);
59 util2.startMiniCluster();
61 //ensure that they are pointed at the same place
62 FileSystem fs = dfs.getFileSystem();
63 FileSystem targetFs = util2.getDFSCluster().getFileSystem();
64 assertFsSameUri(fs, targetFs);
66 fs = FileSystem.get(util1.getConfiguration());
67 targetFs = FileSystem.get(util2.getConfiguration());
68 assertFsSameUri(fs, targetFs);
70 Path randomFile = new Path("/"+util1.getRandomUUID());
71 assertTrue(targetFs.createNewFile(randomFile));
72 assertTrue(fs.exists(randomFile));
74 // do a simple create/write to ensure the cluster works as expected
75 byte[] family = Bytes.toBytes("testfamily");
76 final TableName tablename = TableName.valueOf(name.getMethodName());
77 Table table = util2.createTable(tablename, family);
78 Put p = new Put(new byte[] { 1, 2, 3 });
79 p.addColumn(family, null, new byte[] { 1 });
80 table.put(p);
82 // shutdown and make sure cleanly shutting down
83 util2.shutdownMiniCluster();
84 util1.shutdownMiniDFSCluster();
87 private void assertFsSameUri(FileSystem sourceFs, FileSystem targetFs) {
88 Path source = new Path(sourceFs.getUri());
89 Path target = new Path(targetFs.getUri());
90 assertEquals(source, target);