1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.replication;
20
21 import java.io.IOException;
22 import java.util.UUID;
23
24 import org.apache.hadoop.conf.Configuration;
25 import org.apache.hadoop.fs.FileSystem;
26 import org.apache.hadoop.fs.Path;
27 import org.apache.hadoop.hbase.Stoppable;
28 import org.apache.hadoop.hbase.replication.regionserver.MetricsSource;
29 import org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceInterface;
30 import org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceManager;
31
32
33
34
35 public class ReplicationSourceDummy implements ReplicationSourceInterface {
36
37 ReplicationSourceManager manager;
38 String peerClusterId;
39 Path currentPath;
40
41 @Override
42 public void init(Configuration conf, FileSystem fs, ReplicationSourceManager manager,
43 ReplicationQueues rq, ReplicationPeers rp, Stoppable stopper, String peerClusterId,
44 UUID clusterId, ReplicationEndpoint replicationEndpoint, MetricsSource metrics)
45 throws IOException {
46
47 this.manager = manager;
48 this.peerClusterId = peerClusterId;
49 }
50
51 @Override
52 public void enqueueLog(Path log) {
53 this.currentPath = log;
54 }
55
56 @Override
57 public Path getCurrentPath() {
58 return this.currentPath;
59 }
60
61 @Override
62 public void startup() {
63
64 }
65
66 @Override
67 public void terminate(String reason) {
68
69 }
70
71 @Override
72 public void terminate(String reason, Exception e) {
73
74 }
75
76 @Override
77 public String getPeerClusterZnode() {
78 return peerClusterId;
79 }
80
81 @Override
82 public String getPeerClusterId() {
83 String[] parts = peerClusterId.split("-", 2);
84 return parts.length != 1 ?
85 parts[0] : peerClusterId;
86 }
87
88 @Override
89 public String getStats() {
90 return "";
91 }
92 }