1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.master.cleaner;
19
20 import static org.junit.Assert.assertEquals;
21 import static org.junit.Assert.assertFalse;
22 import static org.junit.Assert.assertTrue;
23
24 import java.io.IOException;
25
26 import org.apache.commons.logging.Log;
27 import org.apache.commons.logging.LogFactory;
28 import org.apache.hadoop.conf.Configuration;
29 import org.apache.hadoop.fs.FileStatus;
30 import org.apache.hadoop.fs.FileSystem;
31 import org.apache.hadoop.fs.Path;
32 import org.apache.hadoop.hbase.ChoreService;
33 import org.apache.hadoop.hbase.CoordinatedStateManager;
34 import org.apache.hadoop.hbase.HBaseTestingUtility;
35 import org.apache.hadoop.hbase.HConstants;
36 import org.apache.hadoop.hbase.testclassification.MediumTests;
37 import org.apache.hadoop.hbase.Server;
38 import org.apache.hadoop.hbase.ServerName;
39 import org.apache.hadoop.hbase.client.ClusterConnection;
40 import org.apache.hadoop.hbase.util.EnvironmentEdge;
41 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
42 import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
43 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
44 import org.junit.AfterClass;
45 import org.junit.BeforeClass;
46 import org.junit.Test;
47 import org.junit.experimental.categories.Category;
48
49 @Category(MediumTests.class)
50 public class TestHFileCleaner {
51 private static final Log LOG = LogFactory.getLog(TestHFileCleaner.class);
52
53 private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
54
55 @BeforeClass
56 public static void setupCluster() throws Exception {
57
58 UTIL.startMiniDFSCluster(1);
59 }
60
61 @AfterClass
62 public static void shutdownCluster() throws IOException {
63 UTIL.shutdownMiniDFSCluster();
64 }
65
66 @Test
67 public void testTTLCleaner() throws IOException, InterruptedException {
68 FileSystem fs = UTIL.getDFSCluster().getFileSystem();
69 Path root = UTIL.getDataTestDirOnTestFS();
70 Path file = new Path(root, "file");
71 fs.createNewFile(file);
72 long createTime = System.currentTimeMillis();
73 assertTrue("Test file not created!", fs.exists(file));
74 TimeToLiveHFileCleaner cleaner = new TimeToLiveHFileCleaner();
75
76 fs.setTimes(file, createTime - 100, -1);
77 Configuration conf = UTIL.getConfiguration();
78 conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, 100);
79 cleaner.setConf(conf);
80 assertTrue("File not set deletable - check mod time:" + getFileStats(file, fs)
81 + " with create time:" + createTime, cleaner.isFileDeletable(fs.getFileStatus(file)));
82 }
83
84
85
86
87
88 private String getFileStats(Path file, FileSystem fs) throws IOException {
89 FileStatus status = fs.getFileStatus(file);
90 return "File" + file + ", mtime:" + status.getModificationTime() + ", atime:"
91 + status.getAccessTime();
92 }
93
94 @Test(timeout = 60 *1000)
95 public void testHFileCleaning() throws Exception {
96 final EnvironmentEdge originalEdge = EnvironmentEdgeManager.getDelegate();
97 String prefix = "someHFileThatWouldBeAUUID";
98 Configuration conf = UTIL.getConfiguration();
99
100 long ttl = 2000;
101 conf.set(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS,
102 "org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner");
103 conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, ttl);
104 Server server = new DummyServer();
105 Path archivedHfileDir = new Path(UTIL.getDataTestDirOnTestFS(), HConstants.HFILE_ARCHIVE_DIRECTORY);
106 FileSystem fs = FileSystem.get(conf);
107 HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir);
108
109
110 final long createTime = System.currentTimeMillis();
111 fs.delete(archivedHfileDir, true);
112 fs.mkdirs(archivedHfileDir);
113
114 fs.createNewFile(new Path(archivedHfileDir, "dfd-dfd"));
115
116
117 LOG.debug("Now is: " + createTime);
118 for (int i = 1; i < 32; i++) {
119
120
121 Path fileName = new Path(archivedHfileDir, (prefix + "." + (createTime + i)));
122 fs.createNewFile(fileName);
123
124 fs.setTimes(fileName, createTime - ttl - 1, -1);
125 LOG.debug("Creating " + getFileStats(fileName, fs));
126 }
127
128
129
130 Path saved = new Path(archivedHfileDir, prefix + ".00000000000");
131 fs.createNewFile(saved);
132
133 fs.setTimes(saved, createTime - ttl / 2, -1);
134 LOG.debug("Creating " + getFileStats(saved, fs));
135 for (FileStatus stat : fs.listStatus(archivedHfileDir)) {
136 LOG.debug(stat.getPath().toString());
137 }
138
139 assertEquals(33, fs.listStatus(archivedHfileDir).length);
140
141
142 EnvironmentEdge setTime = new EnvironmentEdge() {
143 @Override
144 public long currentTime() {
145 return createTime;
146 }
147 };
148 EnvironmentEdgeManager.injectEdge(setTime);
149
150
151 cleaner.chore();
152
153
154 assertEquals(1, fs.listStatus(archivedHfileDir).length);
155
156 for (FileStatus file : fs.listStatus(archivedHfileDir)) {
157 LOG.debug("Kept hfiles: " + file.getPath().getName());
158 }
159
160
161 EnvironmentEdgeManager.injectEdge(originalEdge);
162 }
163
164 @Test
165 public void testRemovesEmptyDirectories() throws Exception {
166 Configuration conf = UTIL.getConfiguration();
167
168 conf.setStrings(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS, "");
169 Server server = new DummyServer();
170 Path archivedHfileDir = new Path(UTIL.getDataTestDirOnTestFS(), HConstants.HFILE_ARCHIVE_DIRECTORY);
171
172
173 FileSystem fs = UTIL.getDFSCluster().getFileSystem();
174 HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir);
175
176
177 Path table = new Path(archivedHfileDir, "table");
178 Path region = new Path(table, "regionsomthing");
179 Path family = new Path(region, "fam");
180 Path file = new Path(family, "file12345");
181 fs.mkdirs(family);
182 if (!fs.exists(family)) throw new RuntimeException("Couldn't create test family:" + family);
183 fs.create(file).close();
184 if (!fs.exists(file)) throw new RuntimeException("Test file didn't get created:" + file);
185
186
187 cleaner.chore();
188
189
190 assertFalse("family directory not removed for empty directory", fs.exists(family));
191 assertFalse("region directory not removed for empty directory", fs.exists(region));
192 assertFalse("table directory not removed for empty directory", fs.exists(table));
193 assertTrue("archive directory", fs.exists(archivedHfileDir));
194 }
195
196 static class DummyServer implements Server {
197
198 @Override
199 public Configuration getConfiguration() {
200 return UTIL.getConfiguration();
201 }
202
203 @Override
204 public ZooKeeperWatcher getZooKeeper() {
205 try {
206 return new ZooKeeperWatcher(getConfiguration(), "dummy server", this);
207 } catch (IOException e) {
208 e.printStackTrace();
209 }
210 return null;
211 }
212
213 @Override
214 public CoordinatedStateManager getCoordinatedStateManager() {
215 return null;
216 }
217
218 @Override
219 public ClusterConnection getConnection() {
220 return null;
221 }
222
223 @Override
224 public MetaTableLocator getMetaTableLocator() {
225 return null;
226 }
227
228 @Override
229 public ServerName getServerName() {
230 return ServerName.valueOf("regionserver,60020,000000");
231 }
232
233 @Override
234 public void abort(String why, Throwable e) {
235 }
236
237 @Override
238 public boolean isAborted() {
239 return false;
240 }
241
242 @Override
243 public void stop(String why) {
244 }
245
246 @Override
247 public boolean isStopped() {
248 return false;
249 }
250
251 @Override
252 public ChoreService getChoreService() {
253 return null;
254 }
255 }
256 }