2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org
.apache
.hadoop
.hbase
.backup
;
20 import static org
.junit
.Assert
.assertFalse
;
21 import static org
.junit
.Assert
.assertTrue
;
23 import java
.io
.IOException
;
24 import java
.util
.ArrayList
;
25 import java
.util
.HashMap
;
26 import java
.util
.List
;
28 import org
.apache
.hadoop
.conf
.Configuration
;
29 import org
.apache
.hadoop
.fs
.FileStatus
;
30 import org
.apache
.hadoop
.fs
.FileSystem
;
31 import org
.apache
.hadoop
.fs
.Path
;
32 import org
.apache
.hadoop
.hbase
.HBaseClassTestRule
;
33 import org
.apache
.hadoop
.hbase
.HBaseTestingUtility
;
34 import org
.apache
.hadoop
.hbase
.TableName
;
35 import org
.apache
.hadoop
.hbase
.backup
.impl
.BackupSystemTable
;
36 import org
.apache
.hadoop
.hbase
.client
.Connection
;
37 import org
.apache
.hadoop
.hbase
.client
.ConnectionFactory
;
38 import org
.apache
.hadoop
.hbase
.testclassification
.MasterTests
;
39 import org
.apache
.hadoop
.hbase
.testclassification
.SmallTests
;
40 import org
.apache
.hadoop
.hbase
.util
.Bytes
;
41 import org
.junit
.After
;
42 import org
.junit
.AfterClass
;
43 import org
.junit
.Before
;
44 import org
.junit
.BeforeClass
;
45 import org
.junit
.ClassRule
;
46 import org
.junit
.Test
;
47 import org
.junit
.experimental
.categories
.Category
;
48 import org
.slf4j
.Logger
;
49 import org
.slf4j
.LoggerFactory
;
51 @Category({ MasterTests
.class, SmallTests
.class })
52 public class TestBackupHFileCleaner
{
55 public static final HBaseClassTestRule CLASS_RULE
=
56 HBaseClassTestRule
.forClass(TestBackupHFileCleaner
.class);
58 private static final Logger LOG
= LoggerFactory
.getLogger(TestBackupHFileCleaner
.class);
59 private final static HBaseTestingUtility TEST_UTIL
= new HBaseTestingUtility();
60 private static Configuration conf
= TEST_UTIL
.getConfiguration();
61 private static TableName tableName
= TableName
.valueOf("backup.hfile.cleaner");
62 private static String famName
= "fam";
63 static FileSystem fs
= null;
67 * @throws Exception if starting the mini cluster or getting the filesystem fails
70 public static void setUpBeforeClass() throws Exception
{
71 conf
.setBoolean(BackupRestoreConstants
.BACKUP_ENABLE_KEY
, true);
72 TEST_UTIL
.startMiniZKCluster();
73 TEST_UTIL
.startMiniCluster(1);
74 fs
= FileSystem
.get(conf
);
78 * @throws Exception if closing the filesystem or shutting down the mini cluster fails
81 public static void tearDownAfterClass() throws Exception
{
85 TEST_UTIL
.shutdownMiniCluster();
89 public void setup() throws IOException
{
90 root
= TEST_UTIL
.getDataTestDirOnTestFS();
94 public void cleanup() {
96 fs
.delete(root
, true);
97 } catch (IOException e
) {
98 LOG
.warn("Failed to delete files recursively from path " + root
);
103 public void testGetDeletableFiles() throws IOException
{
105 Path file
= new Path(root
, "testIsFileDeletableWithNoHFileRefs");
106 fs
.createNewFile(file
);
107 // 2. Assert file is successfully created
108 assertTrue("Test file not created!", fs
.exists(file
));
109 BackupHFileCleaner cleaner
= new BackupHFileCleaner();
110 cleaner
.setConf(conf
);
111 cleaner
.setCheckForFullyBackedUpTables(false);
112 // 3. Assert that file as is should be deletable
113 List
<FileStatus
> stats
= new ArrayList
<>();
114 FileStatus stat
= fs
.getFileStatus(file
);
116 Iterable
<FileStatus
> deletable
= cleaner
.getDeletableFiles(stats
);
117 deletable
= cleaner
.getDeletableFiles(stats
);
118 boolean found
= false;
119 for (FileStatus stat1
: deletable
) {
120 if (stat
.equals(stat1
)) {
124 assertTrue("Cleaner should allow to delete this file as there is no hfile reference "
127 // 4. Add the file as bulk load
128 List
<Path
> list
= new ArrayList
<>(1);
130 try (Connection conn
= ConnectionFactory
.createConnection(conf
);
131 BackupSystemTable sysTbl
= new BackupSystemTable(conn
)) {
132 List
<TableName
> sTableList
= new ArrayList
<>();
133 sTableList
.add(tableName
);
134 Map
<byte[], List
<Path
>>[] maps
= new Map
[1];
135 maps
[0] = new HashMap
<>();
136 maps
[0].put(Bytes
.toBytes(famName
), list
);
137 sysTbl
.writeBulkLoadedFiles(sTableList
, maps
, "1");
140 // 5. Assert file should not be deletable
141 deletable
= cleaner
.getDeletableFiles(stats
);
142 deletable
= cleaner
.getDeletableFiles(stats
);
144 for (FileStatus stat1
: deletable
) {
145 if (stat
.equals(stat1
)) {
149 assertFalse("Cleaner should not allow to delete this file as there is a hfile reference "