HBASE-26765 Minor refactor of async scanning code (#4121)
[hbase.git] / hbase-client / src / main / java / org / apache / hadoop / hbase / client / HBaseHbck.java
blobd153ef7dd77118f7f0f3138cc8cbdecbebfad8f6
1 /**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 package org.apache.hadoop.hbase.client;
20 import java.io.IOException;
21 import java.util.HashMap;
22 import java.util.List;
23 import java.util.Map;
24 import java.util.concurrent.Callable;
25 import java.util.stream.Collectors;
26 import org.apache.hadoop.conf.Configuration;
27 import org.apache.hadoop.hbase.ServerName;
28 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
29 import org.apache.hadoop.hbase.master.RegionState;
30 import org.apache.yetus.audience.InterfaceAudience;
31 import org.slf4j.Logger;
32 import org.slf4j.LoggerFactory;
34 import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
36 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
37 import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
38 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
39 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignsResponse;
40 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BypassProcedureRequest;
41 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BypassProcedureResponse;
42 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.FixMetaRequest;
43 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse;
44 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.HbckService.BlockingInterface;
45 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RegionSpecifierAndState;
46 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunHbckChoreRequest;
47 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunHbckChoreResponse;
48 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ScheduleSCPsForUnknownServersRequest;
49 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ScheduleSCPsForUnknownServersResponse;
50 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ScheduleServerCrashProcedureResponse;
51 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignsResponse;
53 /**
54 * Use {@link Connection#getHbck()} to obtain an instance of {@link Hbck} instead of
55 * constructing an HBaseHbck directly.
57 * <p>Connection should be an <i>unmanaged</i> connection obtained via
58 * {@link ConnectionFactory#createConnection(Configuration)}.</p>
60 * <p>NOTE: The methods in here can do damage to a cluster if applied in the wrong sequence or at
61 * the wrong time. Use with caution. For experts only. These methods are only for the
62 * extreme case where the cluster has been damaged or has achieved an inconsistent state because
63 * of some unforeseen circumstance or bug and requires manual intervention.
65 * <p>An instance of this class is lightweight and not-thread safe. A new instance should be created
66 * by each thread. Pooling or caching of the instance is not recommended.</p>
68 * @see ConnectionFactory
69 * @see Hbck
71 @InterfaceAudience.Private
72 public class HBaseHbck implements Hbck {
73 private static final Logger LOG = LoggerFactory.getLogger(HBaseHbck.class);
75 private boolean aborted;
76 private final BlockingInterface hbck;
78 private RpcControllerFactory rpcControllerFactory;
80 HBaseHbck(BlockingInterface hbck, RpcControllerFactory rpcControllerFactory) {
81 this.hbck = hbck;
82 this.rpcControllerFactory = rpcControllerFactory;
85 @Override
86 public void close() throws IOException {
87 // currently does nothing
90 @Override
91 public void abort(String why, Throwable e) {
92 this.aborted = true;
93 // Currently does nothing but throw the passed message and exception
94 throw new RuntimeException(why, e);
97 @Override
98 public boolean isAborted() {
99 return this.aborted;
102 @Override
103 public TableState setTableStateInMeta(TableState state) throws IOException {
104 try {
105 GetTableStateResponse response = hbck.setTableStateInMeta(
106 rpcControllerFactory.newController(),
107 RequestConverter.buildSetTableStateInMetaRequest(state));
108 return TableState.convert(state.getTableName(), response.getTableState());
109 } catch (ServiceException se) {
110 LOG.debug("table={}, state={}", state.getTableName(), state.getState(), se);
111 throw new IOException(se);
115 @Override
116 public Map<String, RegionState.State> setRegionStateInMeta(
117 Map<String, RegionState.State> nameOrEncodedName2State) throws IOException {
118 try {
119 if (LOG.isDebugEnabled()) {
120 nameOrEncodedName2State.forEach((k, v) -> LOG.debug("region={}, state={}", k, v));
122 MasterProtos.SetRegionStateInMetaResponse response =
123 hbck.setRegionStateInMeta(rpcControllerFactory.newController(),
124 RequestConverter.buildSetRegionStateInMetaRequest(nameOrEncodedName2State));
125 Map<String, RegionState.State> result = new HashMap<>();
126 for (RegionSpecifierAndState nameAndState : response.getStatesList()) {
127 result.put(nameAndState.getRegionSpecifier().getValue().toStringUtf8(),
128 RegionState.State.convert(nameAndState.getState()));
130 return result;
131 } catch (ServiceException se) {
132 throw new IOException(se);
136 @Override
137 public List<Long> assigns(List<String> encodedRegionNames, boolean override)
138 throws IOException {
139 try {
140 AssignsResponse response = this.hbck.assigns(rpcControllerFactory.newController(),
141 RequestConverter.toAssignRegionsRequest(encodedRegionNames, override));
142 return response.getPidList();
143 } catch (ServiceException se) {
144 LOG.debug(toCommaDelimitedString(encodedRegionNames), se);
145 throw new IOException(se);
149 @Override
150 public List<Long> unassigns(List<String> encodedRegionNames, boolean override)
151 throws IOException {
152 try {
153 UnassignsResponse response = this.hbck.unassigns(rpcControllerFactory.newController(),
154 RequestConverter.toUnassignRegionsRequest(encodedRegionNames, override));
155 return response.getPidList();
156 } catch (ServiceException se) {
157 LOG.debug(toCommaDelimitedString(encodedRegionNames), se);
158 throw new IOException(se);
162 private static String toCommaDelimitedString(List<String> list) {
163 return list.stream().collect(Collectors.joining(", "));
166 @Override
167 public List<Boolean> bypassProcedure(List<Long> pids, long waitTime, boolean override,
168 boolean recursive)
169 throws IOException {
170 BypassProcedureResponse response = ProtobufUtil.call(
171 new Callable<BypassProcedureResponse>() {
172 @Override
173 public BypassProcedureResponse call() throws Exception {
174 try {
175 return hbck.bypassProcedure(rpcControllerFactory.newController(),
176 BypassProcedureRequest.newBuilder().addAllProcId(pids).
177 setWaitTime(waitTime).setOverride(override).setRecursive(recursive).build());
178 } catch (Throwable t) {
179 LOG.error(pids.stream().map(i -> i.toString()).
180 collect(Collectors.joining(", ")), t);
181 throw t;
185 return response.getBypassedList();
188 @Override
189 public List<Long> scheduleServerCrashProcedures(List<ServerName> serverNames)
190 throws IOException {
191 try {
192 ScheduleServerCrashProcedureResponse response =
193 this.hbck.scheduleServerCrashProcedure(rpcControllerFactory.newController(),
194 RequestConverter.toScheduleServerCrashProcedureRequest(serverNames));
195 return response.getPidList();
196 } catch (ServiceException se) {
197 LOG.debug(toCommaDelimitedString(
198 serverNames.stream().map(serverName -> ProtobufUtil.toServerName(serverName).toString())
199 .collect(Collectors.toList())),
200 se);
201 throw new IOException(se);
205 @Override
206 public List<Long> scheduleSCPsForUnknownServers() throws IOException {
207 try {
208 ScheduleSCPsForUnknownServersResponse response =
209 this.hbck.scheduleSCPsForUnknownServers(
210 rpcControllerFactory.newController(),
211 ScheduleSCPsForUnknownServersRequest.newBuilder().build());
212 return response.getPidList();
213 } catch (ServiceException se) {
214 LOG.debug("Failed to run ServerCrashProcedures for unknown servers", se);
215 throw new IOException(se);
219 @Override
220 public boolean runHbckChore() throws IOException {
221 try {
222 RunHbckChoreResponse response = this.hbck.runHbckChore(rpcControllerFactory.newController(),
223 RunHbckChoreRequest.newBuilder().build());
224 return response.getRan();
225 } catch (ServiceException se) {
226 LOG.debug("Failed to run HBCK chore", se);
227 throw new IOException(se);
231 @Override
232 public void fixMeta() throws IOException {
233 try {
234 this.hbck.fixMeta(rpcControllerFactory.newController(), FixMetaRequest.newBuilder().build());
235 } catch (ServiceException se) {
236 throw new IOException(se);