You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/05/23 07:36:35 UTC
[23/50] [abbrv] hbase git commit: HBASE-14614 Procedure v2 - Core
Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager,
one that describes Assignment using a State Machine built on top of
ProcedureV2 facility.
http://git-wip-us.apache.org/repos/asf/hbase/blob/f56592fd/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
new file mode 100644
index 0000000..8d5ff3c
--- /dev/null
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
@@ -0,0 +1,375 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.procedure2;
+
+import java.io.IOException;
+import java.lang.Thread.UncaughtExceptionHandler;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.DelayQueue;
+import java.util.concurrent.Future;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.procedure2.util.DelayedUtil;
+import org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedContainerWithTimestamp;
+import org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedWithTimeout;
+import org.apache.hadoop.hbase.procedure2.util.StringUtils;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.Threads;
+
+import com.google.common.collect.ArrayListMultimap;
+
+/**
+ * A procedure dispatcher that aggregates and sends after elapsed time or after we hit
+ * count threshold. Creates its own threadpool to run RPCs with timeout.
+ * <ul>
+ * <li>Each server queue has a dispatch buffer</li>
+ * <li>Once the dispatch buffer reaches a threshold-size/time we send<li>
+ * </ul>
+ * <p>Call {@link #start()} and then {@link #submitTask(Callable)}. When done,
+ * call {@link #stop()}.
+ */
+@InterfaceAudience.Private
+public abstract class RemoteProcedureDispatcher<TEnv, TRemote extends Comparable<TRemote>> {
+ private static final Log LOG = LogFactory.getLog(RemoteProcedureDispatcher.class);
+
+ public static final String THREAD_POOL_SIZE_CONF_KEY =
+ "hbase.procedure.remote.dispatcher.threadpool.size";
+ private static final int DEFAULT_THREAD_POOL_SIZE = 128;
+
+ public static final String DISPATCH_DELAY_CONF_KEY =
+ "hbase.procedure.remote.dispatcher.delay.msec";
+ private static final int DEFAULT_DISPATCH_DELAY = 150;
+
+ public static final String DISPATCH_MAX_QUEUE_SIZE_CONF_KEY =
+ "hbase.procedure.remote.dispatcher.max.queue.size";
+ private static final int DEFAULT_MAX_QUEUE_SIZE = 32;
+
+ private final AtomicBoolean running = new AtomicBoolean(false);
+ private final ConcurrentHashMap<TRemote, BufferNode> nodeMap =
+ new ConcurrentHashMap<TRemote, BufferNode>();
+
+ private final int operationDelay;
+ private final int queueMaxSize;
+ private final int corePoolSize;
+
+ private TimeoutExecutorThread timeoutExecutor;
+ private ThreadPoolExecutor threadPool;
+
+ protected RemoteProcedureDispatcher(Configuration conf) {
+ this.corePoolSize = conf.getInt(THREAD_POOL_SIZE_CONF_KEY, DEFAULT_THREAD_POOL_SIZE);
+ this.operationDelay = conf.getInt(DISPATCH_DELAY_CONF_KEY, DEFAULT_DISPATCH_DELAY);
+ this.queueMaxSize = conf.getInt(DISPATCH_MAX_QUEUE_SIZE_CONF_KEY, DEFAULT_MAX_QUEUE_SIZE);
+ }
+
+ public boolean start() {
+ if (running.getAndSet(true)) {
+ LOG.warn("Already running");
+ return false;
+ }
+
+ LOG.info("Starting procedure remote dispatcher; threads=" + this.corePoolSize +
+ ", queueMaxSize=" + this.queueMaxSize + ", operationDelay=" + this.operationDelay);
+
+ // Create the timeout executor
+ timeoutExecutor = new TimeoutExecutorThread();
+ timeoutExecutor.start();
+
+ // Create the thread pool that will execute RPCs
+ threadPool = Threads.getBoundedCachedThreadPool(corePoolSize, 60L, TimeUnit.SECONDS,
+ Threads.newDaemonThreadFactory(this.getClass().getSimpleName(),
+ getUncaughtExceptionHandler()));
+ return true;
+ }
+
+ public boolean stop() {
+ if (!running.getAndSet(false)) {
+ return false;
+ }
+
+ LOG.info("Stopping procedure remote dispatcher");
+
+ // send stop signals
+ timeoutExecutor.sendStopSignal();
+ threadPool.shutdownNow();
+ return true;
+ }
+
+ public void join() {
+ assert !running.get() : "expected not running";
+
+ // wait the timeout executor
+ timeoutExecutor.awaitTermination();
+ timeoutExecutor = null;
+
+ // wait for the thread pool to terminate
+ threadPool.shutdownNow();
+ try {
+ while (!threadPool.awaitTermination(60, TimeUnit.SECONDS)) {
+ LOG.warn("Waiting for thread-pool to terminate");
+ }
+ } catch (InterruptedException e) {
+ LOG.warn("Interrupted while waiting for thread-pool termination", e);
+ }
+ }
+
+ protected UncaughtExceptionHandler getUncaughtExceptionHandler() {
+ return new UncaughtExceptionHandler() {
+ @Override
+ public void uncaughtException(Thread t, Throwable e) {
+ LOG.warn("Failed to execute remote procedures " + t.getName(), e);
+ }
+ };
+ }
+
+ // ============================================================================================
+ // Node Helpers
+ // ============================================================================================
+ /**
+ * Add a node that will be able to execute remote procedures
+ * @param key the node identifier
+ */
+ public void addNode(final TRemote key) {
+ assert key != null: "Tried to add a node with a null key";
+ final BufferNode newNode = new BufferNode(key);
+ nodeMap.putIfAbsent(key, newNode);
+ }
+
+ /**
+ * Add a remote rpc. Be sure to check result for successful add.
+ * @param key the node identifier
+ * @return True if we successfully added the operation.
+ */
+ public boolean addOperationToNode(final TRemote key, RemoteProcedure rp) {
+ assert key != null : "found null key for node";
+ BufferNode node = nodeMap.get(key);
+ if (node == null) {
+ return false;
+ }
+ node.add(rp);
+ // Check our node still in the map; could have been removed by #removeNode.
+ return nodeMap.contains(node);
+ }
+
+ /**
+ * Remove a remote node
+ * @param key the node identifier
+ */
+ public boolean removeNode(final TRemote key) {
+ final BufferNode node = nodeMap.remove(key);
+ if (node == null) return false;
+ node.abortOperationsInQueue();
+ return true;
+ }
+
+ // ============================================================================================
+ // Task Helpers
+ // ============================================================================================
+ protected Future<Void> submitTask(Callable<Void> task) {
+ return threadPool.submit(task);
+ }
+
+ protected Future<Void> submitTask(Callable<Void> task, long delay, TimeUnit unit) {
+ final FutureTask<Void> futureTask = new FutureTask(task);
+ timeoutExecutor.add(new DelayedTask(futureTask, delay, unit));
+ return futureTask;
+ }
+
+ protected abstract void remoteDispatch(TRemote key, Set<RemoteProcedure> operations);
+ protected abstract void abortPendingOperations(TRemote key, Set<RemoteProcedure> operations);
+
+ /**
+ * Data structure with reference to remote operation.
+ */
+ public static abstract class RemoteOperation {
+ private final RemoteProcedure remoteProcedure;
+
+ protected RemoteOperation(final RemoteProcedure remoteProcedure) {
+ this.remoteProcedure = remoteProcedure;
+ }
+
+ public RemoteProcedure getRemoteProcedure() {
+ return remoteProcedure;
+ }
+ }
+
+ /**
+ * Remote procedure reference.
+ * @param <TEnv>
+ * @param <TRemote>
+ */
+ public interface RemoteProcedure<TEnv, TRemote> {
+ RemoteOperation remoteCallBuild(TEnv env, TRemote remote);
+ void remoteCallCompleted(TEnv env, TRemote remote, RemoteOperation response);
+ void remoteCallFailed(TEnv env, TRemote remote, IOException exception);
+ }
+
+ /**
+ * Account of what procedures are running on remote node.
+ * @param <TEnv>
+ * @param <TRemote>
+ */
+ public interface RemoteNode<TEnv, TRemote> {
+ TRemote getKey();
+ void add(RemoteProcedure<TEnv, TRemote> operation);
+ void dispatch();
+ }
+
+ protected ArrayListMultimap<Class<?>, RemoteOperation> buildAndGroupRequestByType(final TEnv env,
+ final TRemote remote, final Set<RemoteProcedure> operations) {
+ final ArrayListMultimap<Class<?>, RemoteOperation> requestByType = ArrayListMultimap.create();
+ for (RemoteProcedure proc: operations) {
+ RemoteOperation operation = proc.remoteCallBuild(env, remote);
+ requestByType.put(operation.getClass(), operation);
+ }
+ return requestByType;
+ }
+
+ protected <T extends RemoteOperation> List<T> fetchType(
+ final ArrayListMultimap<Class<?>, RemoteOperation> requestByType, final Class<T> type) {
+ return (List<T>)requestByType.removeAll(type);
+ }
+
+ // ============================================================================================
+ // Timeout Helpers
+ // ============================================================================================
+ private final class TimeoutExecutorThread extends Thread {
+ private final DelayQueue<DelayedWithTimeout> queue = new DelayQueue<DelayedWithTimeout>();
+
+ public TimeoutExecutorThread() {
+ super("ProcedureDispatcherTimeoutThread");
+ }
+
+ @Override
+ public void run() {
+ while (running.get()) {
+ final DelayedWithTimeout task = DelayedUtil.takeWithoutInterrupt(queue);
+ if (task == null || task == DelayedUtil.DELAYED_POISON) {
+ // the executor may be shutting down, and the task is just the shutdown request
+ continue;
+ }
+ if (task instanceof DelayedTask) {
+ threadPool.execute(((DelayedTask)task).getObject());
+ } else {
+ ((BufferNode)task).dispatch();
+ }
+ }
+ }
+
+ public void add(final DelayedWithTimeout delayed) {
+ queue.add(delayed);
+ }
+
+ public void remove(final DelayedWithTimeout delayed) {
+ queue.remove(delayed);
+ }
+
+ public void sendStopSignal() {
+ queue.add(DelayedUtil.DELAYED_POISON);
+ }
+
+ public void awaitTermination() {
+ try {
+ final long startTime = EnvironmentEdgeManager.currentTime();
+ for (int i = 0; isAlive(); ++i) {
+ sendStopSignal();
+ join(250);
+ if (i > 0 && (i % 8) == 0) {
+ LOG.warn("Waiting termination of thread " + getName() + ", " +
+ StringUtils.humanTimeDiff(EnvironmentEdgeManager.currentTime() - startTime));
+ }
+ }
+ } catch (InterruptedException e) {
+ LOG.warn(getName() + " join wait got interrupted", e);
+ }
+ }
+ }
+
+ // ============================================================================================
+ // Internals Helpers
+ // ============================================================================================
+
+ /**
+ * Node that contains a set of RemoteProcedures
+ */
+ protected final class BufferNode extends DelayedContainerWithTimestamp<TRemote>
+ implements RemoteNode<TEnv, TRemote> {
+ private Set<RemoteProcedure> operations;
+
+ protected BufferNode(final TRemote key) {
+ super(key, 0);
+ }
+
+ public TRemote getKey() {
+ return getObject();
+ }
+
+ public synchronized void add(final RemoteProcedure operation) {
+ if (this.operations == null) {
+ this.operations = new HashSet<>();
+ setTimeout(EnvironmentEdgeManager.currentTime() + operationDelay);
+ timeoutExecutor.add(this);
+ }
+ this.operations.add(operation);
+ if (this.operations.size() > queueMaxSize) {
+ timeoutExecutor.remove(this);
+ dispatch();
+ }
+ }
+
+ public synchronized void dispatch() {
+ if (operations != null) {
+ remoteDispatch(getKey(), operations);
+ this.operations = null;
+ }
+ }
+
+ public synchronized void abortOperationsInQueue() {
+ if (operations != null) {
+ abortPendingOperations(getKey(), operations);
+ this.operations = null;
+ }
+ }
+
+ @Override
+ public String toString() {
+ return super.toString() + ", operations=" + this.operations;
+ }
+ }
+
+ /**
+ * Delayed object that holds a FutureTask.
+ * used to submit something later to the thread-pool.
+ */
+ private static final class DelayedTask extends DelayedContainerWithTimestamp<FutureTask<Void>> {
+ public DelayedTask(final FutureTask<Void> task, final long delay, final TimeUnit unit) {
+ super(task, EnvironmentEdgeManager.currentTime() + unit.toMillis(delay));
+ }
+ };
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/f56592fd/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SequentialProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SequentialProcedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SequentialProcedure.java
index 1a84070..64bb278 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SequentialProcedure.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SequentialProcedure.java
@@ -27,12 +27,13 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData;
/**
- * A SequentialProcedure describes one step in a procedure chain.
+ * A SequentialProcedure describes one step in a procedure chain:
+ * <pre>
* -> Step 1 -> Step 2 -> Step 3
- *
+ * </pre>
* The main difference from a base Procedure is that the execute() of a
- * SequentialProcedure will be called only once, there will be no second
- * execute() call once the child are finished. which means once the child
+ * SequentialProcedure will be called only once; there will be no second
+ * execute() call once the children are finished. which means once the child
* of a SequentialProcedure are completed the SequentialProcedure is completed too.
*/
@InterfaceAudience.Private
http://git-wip-us.apache.org/repos/asf/hbase/blob/f56592fd/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
index 0590a93..437cc6c 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
@@ -21,9 +21,10 @@ package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
-import java.util.concurrent.atomic.AtomicBoolean;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -56,7 +57,7 @@ public abstract class StateMachineProcedure<TEnvironment, TState>
private int stateCount = 0;
private int[] states = null;
- private ArrayList<Procedure> subProcList = null;
+ private List<Procedure<?>> subProcList = null;
protected enum Flow {
HAS_MORE_STATE,
@@ -125,12 +126,15 @@ public abstract class StateMachineProcedure<TEnvironment, TState>
* Add a child procedure to execute
* @param subProcedure the child procedure
*/
- protected void addChildProcedure(Procedure... subProcedure) {
+ protected void addChildProcedure(Procedure<?>... subProcedure) {
+ if (subProcedure == null) return;
+ final int len = subProcedure.length;
+ if (len == 0) return;
if (subProcList == null) {
- subProcList = new ArrayList<>(subProcedure.length);
+ subProcList = new ArrayList<>(len);
}
- for (int i = 0; i < subProcedure.length; ++i) {
- Procedure proc = subProcedure[i];
+ for (int i = 0; i < len; ++i) {
+ Procedure<?> proc = subProcedure[i];
if (!proc.hasOwner()) proc.setOwner(getOwner());
subProcList.add(proc);
}
@@ -144,21 +148,17 @@ public abstract class StateMachineProcedure<TEnvironment, TState>
failIfAborted();
if (!hasMoreState() || isFailed()) return null;
-
TState state = getCurrentState();
if (stateCount == 0) {
setNextState(getStateId(state));
}
-
stateFlow = executeFromState(env, state);
if (!hasMoreState()) setNextState(EOF_STATE);
-
- if (subProcList != null && subProcList.size() != 0) {
+ if (subProcList != null && !subProcList.isEmpty()) {
Procedure[] subProcedures = subProcList.toArray(new Procedure[subProcList.size()]);
subProcList = null;
return subProcedures;
}
-
return (isWaiting() || isFailed() || !hasMoreState()) ? null : new Procedure[] {this};
} finally {
updateTimestamp();
http://git-wip-us.apache.org/repos/asf/hbase/blob/f56592fd/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/NoopProcedureStore.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/NoopProcedureStore.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/NoopProcedureStore.java
index c03e326..9e53f42 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/NoopProcedureStore.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/NoopProcedureStore.java
@@ -52,8 +52,8 @@ public class NoopProcedureStore extends ProcedureStoreBase {
}
@Override
- public void setRunningProcedureCount(final int count) {
- // no-op
+ public int setRunningProcedureCount(final int count) {
+ return count;
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/f56592fd/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.java
index 385cedb..a690c81 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.java
@@ -153,8 +153,9 @@ public interface ProcedureStore {
/**
* Set the number of procedure running.
* This can be used, for example, by the store to know how long to wait before a sync.
+ * @return how many procedures are running (may not be same as <code>count</code>).
*/
- void setRunningProcedureCount(int count);
+ int setRunningProcedureCount(int count);
/**
* Acquire the lease for the procedure store.
http://git-wip-us.apache.org/repos/asf/hbase/blob/f56592fd/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
index c672045..0a05e6e 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
@@ -83,11 +83,11 @@ public class ProcedureWALFormatReader {
//
// Fast Start: INIT/INSERT record and StackIDs
// ---------------------------------------------
- // We have two special record, INIT and INSERT that tracks the first time
- // the procedure was added to the WAL. We can use that information to be able
- // to start procedures before reaching the end of the WAL, or before reading all the WALs.
- // but in some cases the WAL with that record can be already gone.
- // In alternative we can use the stackIds on each procedure,
+ // We have two special records, INIT and INSERT, that track the first time
+ // the procedure was added to the WAL. We can use this information to be able
+ // to start procedures before reaching the end of the WAL, or before reading all WALs.
+ // But in some cases, the WAL with that record can be already gone.
+ // As an alternative, we can use the stackIds on each procedure,
// to identify when a procedure is ready to start.
// If there are gaps in the sum of the stackIds we need to read more WALs.
//
@@ -107,16 +107,16 @@ public class ProcedureWALFormatReader {
* Global tracker that will be used by the WALProcedureStore after load.
* If the last WAL was closed cleanly we already have a full tracker ready to be used.
* If the last WAL was truncated (e.g. master killed) the tracker will be empty
- * and the 'partial' flag will be set. In this case on WAL replay we are going
+ * and the 'partial' flag will be set. In this case, on WAL replay we are going
* to rebuild the tracker.
*/
private final ProcedureStoreTracker tracker;
- // private final boolean hasFastStartSupport;
+ // TODO: private final boolean hasFastStartSupport;
/**
* If tracker for a log file is partial (see {@link ProcedureStoreTracker#partial}), we
* re-build the list of procedures updated in that WAL because we need it for log cleaning
- * purpose. If all procedures updated in a WAL are found to be obsolete, it can be safely deleted.
+ * purposes. If all procedures updated in a WAL are found to be obsolete, it can be safely deleted.
* (see {@link WALProcedureStore#removeInactiveLogs()}).
* However, we don't need deleted part of a WAL's tracker for this purpose, so we don't bother
* re-building it.
@@ -137,7 +137,7 @@ public class ProcedureWALFormatReader {
public void read(final ProcedureWALFile log) throws IOException {
localTracker = log.getTracker().isPartial() ? log.getTracker() : null;
if (localTracker != null) {
- LOG.info("Rebuilding tracker for log - " + log);
+ LOG.info("Rebuilding tracker for " + log);
}
FSDataInputStream stream = log.getStream();
@@ -146,7 +146,7 @@ public class ProcedureWALFormatReader {
while (hasMore) {
ProcedureWALEntry entry = ProcedureWALFormat.readEntry(stream);
if (entry == null) {
- LOG.warn("nothing left to decode. exiting with missing EOF");
+ LOG.warn("Nothing left to decode. Exiting with missing EOF, log=" + log);
break;
}
switch (entry.getType()) {
@@ -171,7 +171,7 @@ public class ProcedureWALFormatReader {
}
}
} catch (InvalidProtocolBufferException e) {
- LOG.error("got an exception while reading the procedure WAL: " + log, e);
+ LOG.error("While reading procedure from " + log, e);
loader.markCorruptedWAL(log, e);
}
@@ -211,7 +211,7 @@ public class ProcedureWALFormatReader {
maxProcId = Math.max(maxProcId, proc.getProcId());
if (isRequired(proc.getProcId())) {
if (LOG.isTraceEnabled()) {
- LOG.trace("read " + entry.getType() + " entry " + proc.getProcId());
+ LOG.trace("Read " + entry.getType() + " entry " + proc.getProcId());
}
localProcedureMap.add(proc);
if (tracker.isPartial()) {
@@ -296,7 +296,7 @@ public class ProcedureWALFormatReader {
// replayOrderHead = C <-> B <-> E <-> D <-> A <-> G
//
// We also have a lazy grouping by "root procedure", and a list of
- // unlinked procedure. If after reading all the WALs we have unlinked
+ // unlinked procedures. If after reading all the WALs we have unlinked
// procedures it means that we had a missing WAL or a corruption.
// rootHead = A <-> D <-> G
// B E
@@ -639,17 +639,17 @@ public class ProcedureWALFormatReader {
* "ready" means that we all the information that we need in-memory.
*
* Example-1:
- * We have two WALs, we start reading fronm the newest (wal-2)
+ * We have two WALs, we start reading from the newest (wal-2)
* wal-2 | C B |
* wal-1 | A B C |
*
* If C and B don't depend on A (A is not the parent), we can start them
- * before reading wal-1. If B is the only one with parent A we can start C
- * and read one more WAL before being able to start B.
+ * before reading wal-1. If B is the only one with parent A we can start C.
+ * We have to read one more WAL before being able to start B.
*
* How do we know with the only information in B that we are not ready.
* - easy case, the parent is missing from the global map
- * - more complex case we look at the Stack IDs
+ * - more complex case we look at the Stack IDs.
*
* The Stack-IDs are added to the procedure order as incremental index
* tracking how many times that procedure was executed, which is equivalent
@@ -664,7 +664,7 @@ public class ProcedureWALFormatReader {
* executed before.
* To identify when a Procedure is ready we do the sum of the stackIds of
* the procedure and the parent. if the stackIdSum is equals to the
- * sum of {1..maxStackId} then everything we need is avaiable.
+ * sum of {1..maxStackId} then everything we need is available.
*
* Example-2
* wal-2 | A | A stackIds = [0, 2]
@@ -676,7 +676,7 @@ public class ProcedureWALFormatReader {
assert !rootEntry.hasParent() : "expected root procedure, got " + rootEntry;
if (rootEntry.isFinished()) {
- // if the root procedure is finished, sub-procedures should be gone
+ // If the root procedure is finished, sub-procedures should be gone
if (rootEntry.childHead != null) {
LOG.error("unexpected active children for root-procedure: " + rootEntry);
for (Entry p = rootEntry.childHead; p != null; p = p.linkNext) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/f56592fd/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java
index 4712c30..7eeb2df 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java
@@ -292,9 +292,9 @@ public class WALProcedureStore extends ProcedureStoreBase {
}
@Override
- public void setRunningProcedureCount(final int count) {
- LOG.debug("Set running procedure count=" + count + ", slots=" + slots.length);
+ public int setRunningProcedureCount(final int count) {
this.runningProcCount = count > 0 ? Math.min(count, slots.length) : slots.length;
+ return this.runningProcCount;
}
public ProcedureStoreTracker getStoreTracker() {
http://git-wip-us.apache.org/repos/asf/hbase/blob/f56592fd/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/util/DelayedUtil.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/util/DelayedUtil.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/util/DelayedUtil.java
index cde37bd..faf8e7e 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/util/DelayedUtil.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/util/DelayedUtil.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+// FIX namings. TODO.
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class DelayedUtil {
@@ -148,6 +149,9 @@ public final class DelayedUtil {
}
}
+ /**
+ * Has a timeout.
+ */
public static class DelayedContainerWithTimestamp<T> extends DelayedContainer<T> {
private long timeout;
@@ -165,4 +169,4 @@ public final class DelayedUtil {
this.timeout = timeout;
}
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hbase/blob/f56592fd/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java
index 06a4e01..e83a7ac 100644
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java
@@ -1024,7 +1024,7 @@ public final class AccessControlProtos {
* <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
*/
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
- org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermissionOrBuilder>
+ org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermissionOrBuilder>
getGlobalPermissionFieldBuilder() {
if (globalPermissionBuilder_ == null) {
globalPermissionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -1142,7 +1142,7 @@ public final class AccessControlProtos {
* <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
*/
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
- org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermissionOrBuilder>
+ org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermissionOrBuilder>
getNamespacePermissionFieldBuilder() {
if (namespacePermissionBuilder_ == null) {
namespacePermissionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -1260,7 +1260,7 @@ public final class AccessControlProtos {
* <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
*/
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
- org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermissionOrBuilder>
+ org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermissionOrBuilder>
getTablePermissionFieldBuilder() {
if (tablePermissionBuilder_ == null) {
tablePermissionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -2074,7 +2074,7 @@ public final class AccessControlProtos {
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
- org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -4130,7 +4130,7 @@ public final class AccessControlProtos {
* <code>required .hbase.pb.Permission permission = 3;</code>
*/
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
- org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
getPermissionFieldBuilder() {
if (permissionBuilder_ == null) {
permissionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -4198,7 +4198,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.UsersAndPermissions.UserPermissions user_permissions = 1;</code>
*/
- java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions>
+ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions>
getUserPermissionsList();
/**
* <code>repeated .hbase.pb.UsersAndPermissions.UserPermissions user_permissions = 1;</code>
@@ -4211,7 +4211,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.UsersAndPermissions.UserPermissions user_permissions = 1;</code>
*/
- java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissionsOrBuilder>
+ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissionsOrBuilder>
getUserPermissionsOrBuilderList();
/**
* <code>repeated .hbase.pb.UsersAndPermissions.UserPermissions user_permissions = 1;</code>
@@ -4319,7 +4319,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.Permission permissions = 2;</code>
*/
- java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission>
+ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission>
getPermissionsList();
/**
* <code>repeated .hbase.pb.Permission permissions = 2;</code>
@@ -4332,7 +4332,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.Permission permissions = 2;</code>
*/
- java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
getPermissionsOrBuilderList();
/**
* <code>repeated .hbase.pb.Permission permissions = 2;</code>
@@ -4452,7 +4452,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.Permission permissions = 2;</code>
*/
- public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
getPermissionsOrBuilderList() {
return permissions_;
}
@@ -4790,7 +4790,7 @@ public final class AccessControlProtos {
permissionsBuilder_ = null;
permissions_ = other.permissions_;
bitField0_ = (bitField0_ & ~0x00000002);
- permissionsBuilder_ =
+ permissionsBuilder_ =
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getPermissionsFieldBuilder() : null;
} else {
@@ -5064,7 +5064,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.Permission permissions = 2;</code>
*/
- public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
getPermissionsOrBuilderList() {
if (permissionsBuilder_ != null) {
return permissionsBuilder_.getMessageOrBuilderList();
@@ -5090,12 +5090,12 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.Permission permissions = 2;</code>
*/
- public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Builder>
+ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Builder>
getPermissionsBuilderList() {
return getPermissionsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
- org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
getPermissionsFieldBuilder() {
if (permissionsBuilder_ == null) {
permissionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
@@ -5168,7 +5168,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.UsersAndPermissions.UserPermissions user_permissions = 1;</code>
*/
- public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissionsOrBuilder>
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissionsOrBuilder>
getUserPermissionsOrBuilderList() {
return userPermissions_;
}
@@ -5480,7 +5480,7 @@ public final class AccessControlProtos {
userPermissionsBuilder_ = null;
userPermissions_ = other.userPermissions_;
bitField0_ = (bitField0_ & ~0x00000001);
- userPermissionsBuilder_ =
+ userPermissionsBuilder_ =
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getUserPermissionsFieldBuilder() : null;
} else {
@@ -5716,7 +5716,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.UsersAndPermissions.UserPermissions user_permissions = 1;</code>
*/
- public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissionsOrBuilder>
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissionsOrBuilder>
getUserPermissionsOrBuilderList() {
if (userPermissionsBuilder_ != null) {
return userPermissionsBuilder_.getMessageOrBuilderList();
@@ -5742,12 +5742,12 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.UsersAndPermissions.UserPermissions user_permissions = 1;</code>
*/
- public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions.Builder>
+ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions.Builder>
getUserPermissionsBuilderList() {
return getUserPermissionsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
- org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissionsOrBuilder>
+ org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissionsOrBuilder>
getUserPermissionsFieldBuilder() {
if (userPermissionsBuilder_ == null) {
userPermissionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
@@ -6396,7 +6396,7 @@ public final class AccessControlProtos {
* <code>required .hbase.pb.UserPermission user_permission = 1;</code>
*/
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
- org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
+ org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
getUserPermissionFieldBuilder() {
if (userPermissionBuilder_ == null) {
userPermissionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -7393,7 +7393,7 @@ public final class AccessControlProtos {
* <code>required .hbase.pb.UserPermission user_permission = 1;</code>
*/
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
- org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
+ org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
getUserPermissionFieldBuilder() {
if (userPermissionBuilder_ == null) {
userPermissionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -8507,7 +8507,7 @@ public final class AccessControlProtos {
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
- org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -8610,7 +8610,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.UserPermission user_permission = 1;</code>
*/
- java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission>
+ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission>
getUserPermissionList();
/**
* <code>repeated .hbase.pb.UserPermission user_permission = 1;</code>
@@ -8623,7 +8623,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.UserPermission user_permission = 1;</code>
*/
- java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
+ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
getUserPermissionOrBuilderList();
/**
* <code>repeated .hbase.pb.UserPermission user_permission = 1;</code>
@@ -8721,7 +8721,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.UserPermission user_permission = 1;</code>
*/
- public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
getUserPermissionOrBuilderList() {
return userPermission_;
}
@@ -9028,7 +9028,7 @@ public final class AccessControlProtos {
userPermissionBuilder_ = null;
userPermission_ = other.userPermission_;
bitField0_ = (bitField0_ & ~0x00000001);
- userPermissionBuilder_ =
+ userPermissionBuilder_ =
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getUserPermissionFieldBuilder() : null;
} else {
@@ -9264,7 +9264,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.UserPermission user_permission = 1;</code>
*/
- public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
getUserPermissionOrBuilderList() {
if (userPermissionBuilder_ != null) {
return userPermissionBuilder_.getMessageOrBuilderList();
@@ -9290,12 +9290,12 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.UserPermission user_permission = 1;</code>
*/
- public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission.Builder>
+ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission.Builder>
getUserPermissionBuilderList() {
return getUserPermissionFieldBuilder().getBuilderList();
}
private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
- org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
+ org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
getUserPermissionFieldBuilder() {
if (userPermissionBuilder_ == null) {
userPermissionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
@@ -9364,7 +9364,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.Permission permission = 1;</code>
*/
- java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission>
+ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission>
getPermissionList();
/**
* <code>repeated .hbase.pb.Permission permission = 1;</code>
@@ -9377,7 +9377,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.Permission permission = 1;</code>
*/
- java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
getPermissionOrBuilderList();
/**
* <code>repeated .hbase.pb.Permission permission = 1;</code>
@@ -9475,7 +9475,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.Permission permission = 1;</code>
*/
- public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
getPermissionOrBuilderList() {
return permission_;
}
@@ -9782,7 +9782,7 @@ public final class AccessControlProtos {
permissionBuilder_ = null;
permission_ = other.permission_;
bitField0_ = (bitField0_ & ~0x00000001);
- permissionBuilder_ =
+ permissionBuilder_ =
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getPermissionFieldBuilder() : null;
} else {
@@ -10018,7 +10018,7 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.Permission permission = 1;</code>
*/
- public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
getPermissionOrBuilderList() {
if (permissionBuilder_ != null) {
return permissionBuilder_.getMessageOrBuilderList();
@@ -10044,12 +10044,12 @@ public final class AccessControlProtos {
/**
* <code>repeated .hbase.pb.Permission permission = 1;</code>
*/
- public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Builder>
+ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Builder>
getPermissionBuilderList() {
return getPermissionFieldBuilder().getBuilderList();
}
private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
- org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
getPermissionFieldBuilder() {
if (permissionBuilder_ == null) {
permissionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
@@ -10931,77 +10931,77 @@ public final class AccessControlProtos {
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_Permission_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_Permission_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_TablePermission_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_TablePermission_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_NamespacePermission_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_NamespacePermission_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GlobalPermission_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GlobalPermission_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_UserPermission_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_UserPermission_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_UsersAndPermissions_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_UsersAndPermissions_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_UsersAndPermissions_UserPermissions_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_UsersAndPermissions_UserPermissions_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GrantRequest_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GrantRequest_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GrantResponse_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GrantResponse_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_RevokeRequest_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_RevokeRequest_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_RevokeResponse_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_RevokeResponse_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetUserPermissionsRequest_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GetUserPermissionsRequest_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetUserPermissionsResponse_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GetUserPermissionsResponse_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_CheckPermissionsRequest_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_CheckPermissionsRequest_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_CheckPermissionsResponse_descriptor;
- private static final
+ private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_CheckPermissionsResponse_fieldAccessorTable;