1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.master.cleaner;
19
20 import static org.junit.Assert.assertEquals;
21 import static org.junit.Assert.assertFalse;
22 import static org.junit.Assert.assertTrue;
23
24 import java.io.IOException;
25
26 import org.apache.commons.logging.Log;
27 import org.apache.commons.logging.LogFactory;
28 import org.apache.hadoop.conf.Configuration;
29 import org.apache.hadoop.fs.FileStatus;
30 import org.apache.hadoop.fs.FileSystem;
31 import org.apache.hadoop.fs.Path;
32 import org.apache.hadoop.hbase.HBaseTestingUtility;
33 import org.apache.hadoop.hbase.HConstants;
34 import org.apache.hadoop.hbase.testclassification.MediumTests;
35 import org.apache.hadoop.hbase.Server;
36 import org.apache.hadoop.hbase.ServerName;
37 import org.apache.hadoop.hbase.catalog.CatalogTracker;
38 import org.apache.hadoop.hbase.util.EnvironmentEdge;
39 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
40 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
41 import org.junit.AfterClass;
42 import org.junit.BeforeClass;
43 import org.junit.Test;
44 import org.junit.experimental.categories.Category;
45
46 @Category(MediumTests.class)
47 public class TestHFileCleaner {
48 private static final Log LOG = LogFactory.getLog(TestHFileCleaner.class);
49
50 private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
51
52 @BeforeClass
53 public static void setupCluster() throws Exception {
54
55 UTIL.startMiniDFSCluster(1);
56 }
57
58 @AfterClass
59 public static void shutdownCluster() throws IOException {
60 UTIL.shutdownMiniDFSCluster();
61 }
62
63 @Test
64 public void testTTLCleaner() throws IOException, InterruptedException {
65 FileSystem fs = UTIL.getDFSCluster().getFileSystem();
66 Path root = UTIL.getDataTestDirOnTestFS();
67 Path file = new Path(root, "file");
68 fs.createNewFile(file);
69 long createTime = System.currentTimeMillis();
70 assertTrue("Test file not created!", fs.exists(file));
71 TimeToLiveHFileCleaner cleaner = new TimeToLiveHFileCleaner();
72
73 fs.setTimes(file, createTime - 100, -1);
74 Configuration conf = UTIL.getConfiguration();
75 conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, 100);
76 cleaner.setConf(conf);
77 assertTrue("File not set deletable - check mod time:" + getFileStats(file, fs)
78 + " with create time:" + createTime, cleaner.isFileDeletable(fs.getFileStatus(file)));
79 }
80
81
82
83
84
85 private String getFileStats(Path file, FileSystem fs) throws IOException {
86 FileStatus status = fs.getFileStatus(file);
87 return "File" + file + ", mtime:" + status.getModificationTime() + ", atime:"
88 + status.getAccessTime();
89 }
90
91 @Test(timeout = 60 *1000)
92 public void testHFileCleaning() throws Exception {
93 final EnvironmentEdge originalEdge = EnvironmentEdgeManager.getDelegate();
94 String prefix = "someHFileThatWouldBeAUUID";
95 Configuration conf = UTIL.getConfiguration();
96
97 long ttl = 2000;
98 conf.set(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS,
99 "org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner");
100 conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, ttl);
101 Server server = new DummyServer();
102 Path archivedHfileDir = new Path(UTIL.getDataTestDirOnTestFS(), HConstants.HFILE_ARCHIVE_DIRECTORY);
103 FileSystem fs = FileSystem.get(conf);
104 HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir);
105
106
107 final long createTime = System.currentTimeMillis();
108 fs.delete(archivedHfileDir, true);
109 fs.mkdirs(archivedHfileDir);
110
111 fs.createNewFile(new Path(archivedHfileDir, "dfd-dfd"));
112
113
114 LOG.debug("Now is: " + createTime);
115 for (int i = 1; i < 32; i++) {
116
117
118 Path fileName = new Path(archivedHfileDir, (prefix + "." + (createTime + i)));
119 fs.createNewFile(fileName);
120
121 fs.setTimes(fileName, createTime - ttl - 1, -1);
122 LOG.debug("Creating " + getFileStats(fileName, fs));
123 }
124
125
126
127 Path saved = new Path(archivedHfileDir, prefix + ".00000000000");
128 fs.createNewFile(saved);
129
130 fs.setTimes(saved, createTime - ttl / 2, -1);
131 LOG.debug("Creating " + getFileStats(saved, fs));
132 for (FileStatus stat : fs.listStatus(archivedHfileDir)) {
133 LOG.debug(stat.getPath().toString());
134 }
135
136 assertEquals(33, fs.listStatus(archivedHfileDir).length);
137
138
139 EnvironmentEdge setTime = new EnvironmentEdge() {
140 @Override
141 public long currentTimeMillis() {
142 return createTime;
143 }
144 };
145 EnvironmentEdgeManager.injectEdge(setTime);
146
147
148 cleaner.chore();
149
150
151 assertEquals(1, fs.listStatus(archivedHfileDir).length);
152
153 for (FileStatus file : fs.listStatus(archivedHfileDir)) {
154 LOG.debug("Kept hfiles: " + file.getPath().getName());
155 }
156
157 cleaner.interrupt();
158
159 EnvironmentEdgeManager.injectEdge(originalEdge);
160 }
161
162 @Test
163 public void testRemovesEmptyDirectories() throws Exception {
164 Configuration conf = UTIL.getConfiguration();
165
166 conf.setStrings(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS, "");
167 Server server = new DummyServer();
168 Path archivedHfileDir = new Path(UTIL.getDataTestDirOnTestFS(), HConstants.HFILE_ARCHIVE_DIRECTORY);
169
170
171 FileSystem fs = UTIL.getDFSCluster().getFileSystem();
172 HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir);
173
174
175 Path table = new Path(archivedHfileDir, "table");
176 Path region = new Path(table, "regionsomthing");
177 Path family = new Path(region, "fam");
178 Path file = new Path(family, "file12345");
179 fs.mkdirs(family);
180 if (!fs.exists(family)) throw new RuntimeException("Couldn't create test family:" + family);
181 fs.create(file).close();
182 if (!fs.exists(file)) throw new RuntimeException("Test file didn't get created:" + file);
183
184
185 cleaner.chore();
186
187
188 assertFalse("family directory not removed for empty directory", fs.exists(family));
189 assertFalse("region directory not removed for empty directory", fs.exists(region));
190 assertFalse("table directory not removed for empty directory", fs.exists(table));
191 assertTrue("archive directory", fs.exists(archivedHfileDir));
192 }
193
194 static class DummyServer implements Server {
195
196 @Override
197 public Configuration getConfiguration() {
198 return UTIL.getConfiguration();
199 }
200
201 @Override
202 public ZooKeeperWatcher getZooKeeper() {
203 try {
204 return new ZooKeeperWatcher(getConfiguration(), "dummy server", this);
205 } catch (IOException e) {
206 e.printStackTrace();
207 }
208 return null;
209 }
210
211 @Override
212 public CatalogTracker getCatalogTracker() {
213 return null;
214 }
215
216 @Override
217 public ServerName getServerName() {
218 return ServerName.valueOf("regionserver,60020,000000");
219 }
220
221 @Override
222 public void abort(String why, Throwable e) {
223 }
224
225 @Override
226 public boolean isAborted() {
227 return false;
228 }
229
230 @Override
231 public void stop(String why) {
232 }
233
234 @Override
235 public boolean isStopped() {
236 return false;
237 }
238 }
239 }