1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.replication;
20
21 import java.io.IOException;
22 import java.util.UUID;
23
24 import org.apache.hadoop.conf.Configuration;
25 import org.apache.hadoop.fs.FileSystem;
26 import org.apache.hadoop.fs.Path;
27 import org.apache.hadoop.hbase.Stoppable;
28 import org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceInterface;
29 import org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceManager;
30
31
32
33
34 public class ReplicationSourceDummy implements ReplicationSourceInterface {
35
36 ReplicationSourceManager manager;
37 String peerClusterId;
38 Path currentPath;
39
40 @Override
41 public void init(Configuration conf, FileSystem fs, ReplicationSourceManager manager,
42 ReplicationQueues rq, ReplicationPeers rp, Stoppable stopper, String peerClusterId,
43 UUID clusterId) throws IOException {
44
45 this.manager = manager;
46 this.peerClusterId = peerClusterId;
47 }
48
49 @Override
50 public void enqueueLog(Path log) {
51 this.currentPath = log;
52 }
53
54 @Override
55 public Path getCurrentPath() {
56 return this.currentPath;
57 }
58
59 @Override
60 public void startup() {
61
62 }
63
64 @Override
65 public void terminate(String reason) {
66
67 }
68
69 @Override
70 public void terminate(String reason, Exception e) {
71
72 }
73
74 @Override
75 public String getPeerClusterZnode() {
76 return peerClusterId;
77 }
78
79 @Override
80 public String getPeerClusterId() {
81 String[] parts = peerClusterId.split("-", 2);
82 return parts.length != 1 ?
83 parts[0] : peerClusterId;
84 }
85
86 @Override
87 public String getStats() {
88 return "";
89 }
90 }