You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2014/08/30 02:14:40 UTC
[3/4] HBASE-11822 Convert EnvironmentEdge#getCurrentTimeMillis to
getCurrentTime
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
index eed98e5..310cd07 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
@@ -375,7 +375,7 @@ class MemStoreFlusher implements FlushRequester {
if (!region.getRegionInfo().isMetaRegion() &&
isTooManyStoreFiles(region)) {
if (fqe.isMaximumWait(this.blockingWaitTime)) {
- LOG.info("Waited " + (EnvironmentEdgeManager.currentTimeMillis() - fqe.createTime) +
+ LOG.info("Waited " + (EnvironmentEdgeManager.currentTime() - fqe.createTime) +
"ms on a compaction to clean up 'too many store files'; waited " +
"long enough... proceeding with flush of " +
region.getRegionNameAsString());
@@ -504,7 +504,7 @@ class MemStoreFlusher implements FlushRequester {
if (Trace.isTracing()) {
scope.getSpan().addTimelineAnnotation("Force Flush. We're above high water mark.");
}
- long start = EnvironmentEdgeManager.currentTimeMillis();
+ long start = EnvironmentEdgeManager.currentTime();
synchronized (this.blockSignal) {
boolean blocked = false;
long startTime = 0;
@@ -512,7 +512,7 @@ class MemStoreFlusher implements FlushRequester {
try {
while (isAboveHighWaterMark() && !server.isStopped()) {
if (!blocked) {
- startTime = EnvironmentEdgeManager.currentTimeMillis();
+ startTime = EnvironmentEdgeManager.currentTime();
LOG.info("Blocking updates on " + server.toString() +
": the global memstore size " +
StringUtils.humanReadableInt(server.getRegionServerAccounting().getGlobalMemstoreSize()) +
@@ -529,7 +529,7 @@ class MemStoreFlusher implements FlushRequester {
LOG.warn("Interrupted while waiting");
interrupted = true;
}
- long took = EnvironmentEdgeManager.currentTimeMillis() - start;
+ long took = EnvironmentEdgeManager.currentTime() - start;
LOG.warn("Memstore is above high water mark and block " + took + "ms");
}
} finally {
@@ -539,7 +539,7 @@ class MemStoreFlusher implements FlushRequester {
}
if(blocked){
- final long totalTime = EnvironmentEdgeManager.currentTimeMillis() - startTime;
+ final long totalTime = EnvironmentEdgeManager.currentTime() - startTime;
if(totalTime > 0){
this.updatesBlockedMsHighWater.add(totalTime);
}
@@ -643,7 +643,7 @@ class MemStoreFlusher implements FlushRequester {
FlushRegionEntry(final HRegion r) {
this.region = r;
- this.createTime = EnvironmentEdgeManager.currentTimeMillis();
+ this.createTime = EnvironmentEdgeManager.currentTime();
this.whenToExpire = this.createTime;
}
@@ -652,7 +652,7 @@ class MemStoreFlusher implements FlushRequester {
* @return True if we have been delayed > <code>maximumWait</code> milliseconds.
*/
public boolean isMaximumWait(final long maximumWait) {
- return (EnvironmentEdgeManager.currentTimeMillis() - this.createTime) > maximumWait;
+ return (EnvironmentEdgeManager.currentTime() - this.createTime) > maximumWait;
}
/**
@@ -665,19 +665,19 @@ class MemStoreFlusher implements FlushRequester {
/**
* @param when When to expire, when to come up out of the queue.
- * Specify in milliseconds. This method adds EnvironmentEdgeManager.currentTimeMillis()
+ * Specify in milliseconds. This method adds EnvironmentEdgeManager.currentTime()
* to whatever you pass.
* @return This.
*/
public FlushRegionEntry requeue(final long when) {
- this.whenToExpire = EnvironmentEdgeManager.currentTimeMillis() + when;
+ this.whenToExpire = EnvironmentEdgeManager.currentTime() + when;
this.requeueCount++;
return this;
}
@Override
public long getDelay(TimeUnit unit) {
- return unit.convert(this.whenToExpire - EnvironmentEdgeManager.currentTimeMillis(),
+ return unit.convert(this.whenToExpire - EnvironmentEdgeManager.currentTime(),
TimeUnit.MILLISECONDS);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
index 5da1ea1..ff39a1e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
@@ -460,7 +460,7 @@ class MetricsRegionServerWrapperImpl
//Compute the number of requests per second
- long currentTime = EnvironmentEdgeManager.currentTimeMillis();
+ long currentTime = EnvironmentEdgeManager.currentTime();
// assume that it took PERIOD seconds to start the executor.
// this is a guess but it's a pretty good one.
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 023040d..9087a30 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -394,7 +394,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
*/
private Result append(final HRegion region, final MutationProto m,
final CellScanner cellScanner, long nonceGroup) throws IOException {
- long before = EnvironmentEdgeManager.currentTimeMillis();
+ long before = EnvironmentEdgeManager.currentTime();
Append append = ProtobufUtil.toAppend(m, cellScanner);
Result r = null;
if (region.getCoprocessorHost() != null) {
@@ -415,7 +415,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
}
if (regionServer.metricsRegionServer != null) {
regionServer.metricsRegionServer.updateAppend(
- EnvironmentEdgeManager.currentTimeMillis() - before);
+ EnvironmentEdgeManager.currentTime() - before);
}
return r;
}
@@ -430,7 +430,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
*/
private Result increment(final HRegion region, final MutationProto mutation,
final CellScanner cells, long nonceGroup) throws IOException {
- long before = EnvironmentEdgeManager.currentTimeMillis();
+ long before = EnvironmentEdgeManager.currentTime();
Increment increment = ProtobufUtil.toIncrement(mutation, cells);
Result r = null;
if (region.getCoprocessorHost() != null) {
@@ -451,7 +451,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
}
if (regionServer.metricsRegionServer != null) {
regionServer.metricsRegionServer.updateIncrement(
- EnvironmentEdgeManager.currentTimeMillis() - before);
+ EnvironmentEdgeManager.currentTime() - before);
}
return r;
}
@@ -569,7 +569,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
private void doBatchOp(final RegionActionResult.Builder builder, final HRegion region,
final List<ClientProtos.Action> mutations, final CellScanner cells) {
Mutation[] mArray = new Mutation[mutations.size()];
- long before = EnvironmentEdgeManager.currentTimeMillis();
+ long before = EnvironmentEdgeManager.currentTime();
boolean batchContainsPuts = false, batchContainsDelete = false;
try {
int i = 0;
@@ -622,7 +622,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
}
}
if (regionServer.metricsRegionServer != null) {
- long after = EnvironmentEdgeManager.currentTimeMillis();
+ long after = EnvironmentEdgeManager.currentTime();
if (batchContainsPuts) {
regionServer.metricsRegionServer.updatePut(after - before);
}
@@ -645,7 +645,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
private OperationStatus [] doReplayBatchOp(final HRegion region,
final List<HLogSplitter.MutationReplay> mutations, long replaySeqId) throws IOException {
- long before = EnvironmentEdgeManager.currentTimeMillis();
+ long before = EnvironmentEdgeManager.currentTime();
boolean batchContainsPuts = false, batchContainsDelete = false;
try {
for (Iterator<HLogSplitter.MutationReplay> it = mutations.iterator(); it.hasNext();) {
@@ -677,7 +677,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
new HLogSplitter.MutationReplay[mutations.size()]), replaySeqId);
} finally {
if (regionServer.metricsRegionServer != null) {
- long after = EnvironmentEdgeManager.currentTimeMillis();
+ long after = EnvironmentEdgeManager.currentTime();
if (batchContainsPuts) {
regionServer.metricsRegionServer.updatePut(after - before);
}
@@ -1352,7 +1352,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
@QosPriority(priority = HConstants.REPLAY_QOS)
public ReplicateWALEntryResponse replay(final RpcController controller,
final ReplicateWALEntryRequest request) throws ServiceException {
- long before = EnvironmentEdgeManager.currentTimeMillis();
+ long before = EnvironmentEdgeManager.currentTime();
CellScanner cells = ((PayloadCarryingRpcController) controller).cellScanner();
try {
checkOpen();
@@ -1429,7 +1429,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
} finally {
if (regionServer.metricsRegionServer != null) {
regionServer.metricsRegionServer.updateReplay(
- EnvironmentEdgeManager.currentTimeMillis() - before);
+ EnvironmentEdgeManager.currentTime() - before);
}
}
}
@@ -1622,7 +1622,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
@Override
public GetResponse get(final RpcController controller,
final GetRequest request) throws ServiceException {
- long before = EnvironmentEdgeManager.currentTimeMillis();
+ long before = EnvironmentEdgeManager.currentTime();
try {
checkOpen();
requestCount.increment();
@@ -1672,7 +1672,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
} finally {
if (regionServer.metricsRegionServer != null) {
regionServer.metricsRegionServer.updateGet(
- EnvironmentEdgeManager.currentTimeMillis() - before);
+ EnvironmentEdgeManager.currentTime() - before);
}
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java
index a826675..9dd6c0f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java
@@ -64,7 +64,7 @@ class RegionMergeRequest implements Runnable {
return;
}
try {
- final long startTime = EnvironmentEdgeManager.currentTimeMillis();
+ final long startTime = EnvironmentEdgeManager.currentTime();
RegionMergeTransaction mt = new RegionMergeTransaction(region_a,
region_b, forcible);
@@ -116,7 +116,7 @@ class RegionMergeRequest implements Runnable {
+ region_a + ", region_b=" + region_b + ",merged region="
+ mt.getMergedRegionInfo().getRegionNameAsString()
+ ". Region merge took "
- + StringUtils.formatTimeDiff(EnvironmentEdgeManager.currentTimeMillis(), startTime));
+ + StringUtils.formatTimeDiff(EnvironmentEdgeManager.currentTime(), startTime));
} catch (IOException ex) {
ex = ex instanceof RemoteException ? ((RemoteException) ex).unwrapRemoteException() : ex;
LOG.error("Merge failed " + this, ex);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransaction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransaction.java
index cb28c9a..fd5fff5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransaction.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransaction.java
@@ -442,7 +442,7 @@ public class RegionMergeTransaction {
*/
public static HRegionInfo getMergedRegionInfo(final HRegionInfo a,
final HRegionInfo b) {
- long rid = EnvironmentEdgeManager.currentTimeMillis();
+ long rid = EnvironmentEdgeManager.currentTime();
// Regionid is timestamp. Merged region's id can't be less than that of
// merging regions else will insert at wrong location in hbase:meta
if (rid < a.getRegionId() || rid < b.getRegionId()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
index 205c7f8..f2e8bfb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
@@ -337,7 +337,7 @@ public class ScanQueryMatcher {
if ((!isUserScan)
&& timeToPurgeDeletes > 0
- && (EnvironmentEdgeManager.currentTimeMillis() - timestamp)
+ && (EnvironmentEdgeManager.currentTime() - timestamp)
<= timeToPurgeDeletes) {
return MatchCode.INCLUDE;
} else if (retainDeletesInOutput || mvccVersion > maxReadPointToTrackVersions) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
index faf9138..421f54e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
@@ -95,7 +95,7 @@ public class ServerNonceManager {
}
public void reportActivity() {
- long now = EnvironmentEdgeManager.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
this.data = (this.data & ALL_FLAG_BITS) | (now << 3);
}
@@ -237,7 +237,7 @@ public class ServerNonceManager {
public void reportOperationFromWal(long group, long nonce, long writeTime) {
if (nonce == HConstants.NO_NONCE) return;
// Give the write time some slack in case the clocks are not synchronized.
- long now = EnvironmentEdgeManager.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
if (now > writeTime + (deleteNonceGracePeriod * 1.5)) return;
OperationContext newResult = new OperationContext();
newResult.setState(OperationContext.DONT_PROCEED);
@@ -267,7 +267,7 @@ public class ServerNonceManager {
}
private void cleanUpOldNonces() {
- long cutoff = EnvironmentEdgeManager.currentTimeMillis() - deleteNonceGracePeriod;
+ long cutoff = EnvironmentEdgeManager.currentTime() - deleteNonceGracePeriod;
for (Map.Entry<NonceKey, OperationContext> entry : nonces.entrySet()) {
OperationContext oc = entry.getValue();
if (!oc.isExpired(cutoff)) continue;
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransaction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransaction.java
index 30b55dd..2eb01f5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransaction.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransaction.java
@@ -176,7 +176,7 @@ public class SplitTransaction {
* @return Daughter region id (timestamp) to use.
*/
private static long getDaughterRegionIdTimestamp(final HRegionInfo hri) {
- long rid = EnvironmentEdgeManager.currentTimeMillis();
+ long rid = EnvironmentEdgeManager.currentTime();
// Regionid is timestamp. Can't be less than that of parent else will insert
// at wrong location in hbase:meta (See HBASE-710).
if (rid < hri.getRegionId()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index 1ef3e91..7160b30 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -122,7 +122,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner
explicitColumnQuery = numCol > 0;
this.scan = scan;
this.columns = columns;
- oldestUnexpiredTS = EnvironmentEdgeManager.currentTimeMillis() - ttl;
+ oldestUnexpiredTS = EnvironmentEdgeManager.currentTime() - ttl;
this.minVersions = minVersions;
if (store != null && ((HStore)store).getHRegion() != null
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java
index 1665713..92a86e1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java
@@ -79,7 +79,7 @@ public class StorefileRefresherChore extends Chore {
continue;
}
String encodedName = r.getRegionInfo().getEncodedName();
- long time = EnvironmentEdgeManager.currentTimeMillis();
+ long time = EnvironmentEdgeManager.currentTime();
if (!lastRefreshTimes.containsKey(encodedName)) {
lastRefreshTimes.put(encodedName, time);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java
index 5c9a233..0fc64d2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java
@@ -63,7 +63,7 @@ public class CompactionRequest implements Comparable<CompactionRequest> {
* This ctor should be used by coprocessors that want to subclass CompactionRequest.
*/
public CompactionRequest() {
- this.selectionTime = EnvironmentEdgeManager.currentTimeMillis();
+ this.selectionTime = EnvironmentEdgeManager.currentTime();
this.timeInNanos = System.nanoTime();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
index 22697e9..9edb317 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
@@ -303,7 +303,7 @@ public class StripeCompactionPolicy extends CompactionPolicy {
if (cfTtl == Long.MAX_VALUE) {
return null; // minversion might be set, cannot delete old files
}
- long timestampCutoff = EnvironmentEdgeManager.currentTimeMillis() - cfTtl;
+ long timestampCutoff = EnvironmentEdgeManager.currentTime() - cfTtl;
// Merge the longest sequence of stripes where all files have expired, if any.
int start = -1, bestStart = -1, length = 0, bestLength = 0;
ArrayList<ImmutableList<StoreFile>> stripes = si.getStripes();
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
index 8474836..99c6254 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
@@ -1919,7 +1919,7 @@ class FSHLog implements HLog, Syncable {
// TODO: WORK ON MAKING THIS APPEND FASTER. DOING WAY TOO MUCH WORK WITH CPs, PBing, etc.
atHeadOfRingBufferEventHandlerAppend();
- long start = EnvironmentEdgeManager.currentTimeMillis();
+ long start = EnvironmentEdgeManager.currentTime();
byte [] encodedRegionName = entry.getKey().getEncodedRegionName();
long regionSequenceId = HLog.NO_SEQUENCE_ID;
try {
@@ -1962,7 +1962,7 @@ class FSHLog implements HLog, Syncable {
coprocessorHost.postWALWrite(entry.getHRegionInfo(), entry.getKey(), entry.getEdit());
// Update metrics.
- postAppend(entry, EnvironmentEdgeManager.currentTimeMillis() - start);
+ postAppend(entry, EnvironmentEdgeManager.currentTime() - start);
} catch (Exception e) {
LOG.fatal("Could not append. Requesting close of hlog", e);
requestLogRoll();
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFactory.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFactory.java
index 276e16c..41d0910 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFactory.java
@@ -103,7 +103,7 @@ public class HLogFactory {
// A hlog file could be under recovery, so it may take several
// tries to get it open. Instead of claiming it is corrupted, retry
// to open it up to 5 minutes by default.
- long startWaiting = EnvironmentEdgeManager.currentTimeMillis();
+ long startWaiting = EnvironmentEdgeManager.currentTime();
long openTimeout = conf.getInt("hbase.hlog.open.timeout", 300000) + startWaiting;
int nbAttempt = 0;
while (true) {
@@ -138,9 +138,9 @@ public class HLogFactory {
if (reporter != null && !reporter.progress()) {
throw new InterruptedIOException("Operation is cancelled");
}
- if (nbAttempt > 2 && openTimeout < EnvironmentEdgeManager.currentTimeMillis()) {
+ if (nbAttempt > 2 && openTimeout < EnvironmentEdgeManager.currentTime()) {
LOG.error("Can't open after " + nbAttempt + " attempts and "
- + (EnvironmentEdgeManager.currentTimeMillis() - startWaiting)
+ + (EnvironmentEdgeManager.currentTime() - startWaiting)
+ "ms " + " for " + path);
} else {
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java
index cc10cb2..60eda98 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java
@@ -209,7 +209,7 @@ public class HLogKey implements WritableComparable<HLogKey>, SequenceNumber {
*/
public HLogKey(final byte [] encodedRegionName, final TableName tablename, long logSeqNum,
long nonceGroup, long nonce) {
- init(encodedRegionName, tablename, logSeqNum, EnvironmentEdgeManager.currentTimeMillis(),
+ init(encodedRegionName, tablename, logSeqNum, EnvironmentEdgeManager.currentTime(),
EMPTY_UUIDS, nonceGroup, nonce);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
index 67b936f..8833524 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
@@ -1693,14 +1693,14 @@ public class HLogSplitter {
private HRegionLocation waitUntilRegionOnline(HRegionLocation loc, byte[] row,
final long timeout, AtomicBoolean isRecovering)
throws IOException {
- final long endTime = EnvironmentEdgeManager.currentTimeMillis() + timeout;
+ final long endTime = EnvironmentEdgeManager.currentTime() + timeout;
final long pause = conf.getLong(HConstants.HBASE_CLIENT_PAUSE,
HConstants.DEFAULT_HBASE_CLIENT_PAUSE);
boolean reloadLocation = false;
TableName tableName = loc.getRegionInfo().getTable();
int tries = 0;
Throwable cause = null;
- while (endTime > EnvironmentEdgeManager.currentTimeMillis()) {
+ while (endTime > EnvironmentEdgeManager.currentTime()) {
try {
// Try and get regioninfo from the hosting server.
HConnection hconn = getConnectionByTableName(tableName);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
index 4b38027..9ba3353 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
@@ -268,7 +268,7 @@ public class WALEdit implements Writable, HeapSize {
public static WALEdit createFlushWALEdit(HRegionInfo hri, FlushDescriptor f) {
KeyValue kv = new KeyValue(getRowForRegion(hri), METAFAMILY, FLUSH,
- EnvironmentEdgeManager.currentTimeMillis(), f.toByteArray());
+ EnvironmentEdgeManager.currentTime(), f.toByteArray());
return new WALEdit().add(kv);
}
@@ -282,7 +282,7 @@ public class WALEdit implements Writable, HeapSize {
public static WALEdit createRegionEventWALEdit(HRegionInfo hri,
RegionEventDescriptor regionEventDesc) {
KeyValue kv = new KeyValue(getRowForRegion(hri), METAFAMILY, REGION_EVENT,
- EnvironmentEdgeManager.currentTimeMillis(), regionEventDesc.toByteArray());
+ EnvironmentEdgeManager.currentTime(), regionEventDesc.toByteArray());
return new WALEdit().add(kv);
}
@@ -301,7 +301,7 @@ public class WALEdit implements Writable, HeapSize {
public static WALEdit createCompaction(final HRegionInfo hri, final CompactionDescriptor c) {
byte [] pbbytes = c.toByteArray();
KeyValue kv = new KeyValue(getRowForRegion(hri), METAFAMILY, COMPACTION,
- EnvironmentEdgeManager.currentTimeMillis(), pbbytes);
+ EnvironmentEdgeManager.currentTime(), pbbytes);
return new WALEdit().add(kv); //replication scope null so that this won't be replicated
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java
index 97e9b86..58bbe83 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java
@@ -120,7 +120,7 @@ public class WALEditsReplaySink {
regionEntries.add(entry);
}
- long startTime = EnvironmentEdgeManager.currentTimeMillis();
+ long startTime = EnvironmentEdgeManager.currentTime();
// replaying edits by region
for (Map.Entry<HRegionInfo, List<HLog.Entry>> _entry : entriesByRegion.entrySet()) {
@@ -139,7 +139,7 @@ public class WALEditsReplaySink {
}
}
- long endTime = EnvironmentEdgeManager.currentTimeMillis() - startTime;
+ long endTime = EnvironmentEdgeManager.currentTime() - startTime;
LOG.debug("number of rows:" + entries.size() + " are sent by batch! spent " + endTime
+ "(ms)!");
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
index 94dec7c..eadaead 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
@@ -83,7 +83,7 @@ public class MetricsSource {
* @param timestamp write time of the edit
*/
public void setAgeOfLastShippedOp(long timestamp) {
- long age = EnvironmentEdgeManager.currentTimeMillis() - timestamp;
+ long age = EnvironmentEdgeManager.currentTime() - timestamp;
rms.setGauge(ageOfLastShippedOpKey, age);
rms.setGauge(SOURCE_AGE_OF_LAST_SHIPPED_OP, age);
this.lastTimestamp = timestamp;
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationThrottler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationThrottler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationThrottler.java
index 0aba8a6..93c2ee8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationThrottler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationThrottler.java
@@ -42,7 +42,7 @@ public class ReplicationThrottler {
this.enabled = this.bandwidth > 0;
if (this.enabled) {
this.cyclePushSize = 0;
- this.cycleStartTick = EnvironmentEdgeManager.currentTimeMillis();
+ this.cycleStartTick = EnvironmentEdgeManager.currentTime();
}
}
@@ -67,7 +67,7 @@ public class ReplicationThrottler {
}
long sleepTicks = 0;
- long now = EnvironmentEdgeManager.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
// 1. if cyclePushSize exceeds bandwidth, we need to sleep some
// following cycles to amortize, this case can occur when a single push
// exceeds the bandwidth
@@ -115,7 +115,7 @@ public class ReplicationThrottler {
*/
public void resetStartTick() {
if (this.enabled) {
- this.cycleStartTick = EnvironmentEdgeManager.currentTimeMillis();
+ this.cycleStartTick = EnvironmentEdgeManager.currentTime();
}
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 53d3f35..bda02b3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -657,7 +657,7 @@ public class AccessController extends BaseMasterAndRegionObserver
// any cells found there inclusively.
long latestTs = Math.max(opTs, latestCellTs);
if (latestTs == 0 || latestTs == HConstants.LATEST_TIMESTAMP) {
- latestTs = EnvironmentEdgeManager.currentTimeMillis();
+ latestTs = EnvironmentEdgeManager.currentTime();
}
get.setTimeRange(0, latestTs + 1);
// In case of Put operation we set to read all versions. This was done to consider the case
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
index 2892c51..5cdddb8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
@@ -126,7 +126,7 @@ public class AuthenticationTokenSecretManager
@Override
protected byte[] createPassword(AuthenticationTokenIdentifier identifier) {
- long now = EnvironmentEdgeManager.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
AuthenticationKey secretKey = currentKey;
identifier.setKeyId(secretKey.getKeyId());
identifier.setIssueDate(now);
@@ -139,7 +139,7 @@ public class AuthenticationTokenSecretManager
@Override
public byte[] retrievePassword(AuthenticationTokenIdentifier identifier)
throws InvalidToken {
- long now = EnvironmentEdgeManager.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
if (identifier.getExpirationDate() < now) {
throw new InvalidToken("Token has expired");
}
@@ -223,7 +223,7 @@ public class AuthenticationTokenSecretManager
return;
}
- long now = EnvironmentEdgeManager.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
Iterator<AuthenticationKey> iter = allKeys.values().iterator();
while (iter.hasNext()) {
AuthenticationKey key = iter.next();
@@ -247,7 +247,7 @@ public class AuthenticationTokenSecretManager
return;
}
- long now = EnvironmentEdgeManager.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
AuthenticationKey prev = currentKey;
AuthenticationKey newKey = new AuthenticationKey(++idSeq,
Long.MAX_VALUE, // don't allow to expire until it's replaced by a new key
@@ -314,7 +314,7 @@ public class AuthenticationTokenSecretManager
isMaster = true;
while (!stopped) {
- long now = EnvironmentEdgeManager.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
// clear any expired
removeExpiredKeys();
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
index 203f6de..03ba8b8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
@@ -231,9 +231,9 @@ public class SnapshotDescriptionUtils {
// set the creation time, if one hasn't been set
long time = snapshot.getCreationTime();
if (time == SnapshotDescriptionUtils.NO_SNAPSHOT_START_TIME_SPECIFIED) {
- time = EnvironmentEdgeManager.currentTimeMillis();
+ time = EnvironmentEdgeManager.currentTime();
LOG.debug("Creation time not specified, setting to:" + time + " (current time:"
- + EnvironmentEdgeManager.currentTimeMillis() + ").");
+ + EnvironmentEdgeManager.currentTime() + ").");
SnapshotDescription.Builder builder = snapshot.toBuilder();
builder.setCreationTime(time);
snapshot = builder.build();
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
index 3df9e8b..da76251 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
@@ -179,7 +179,7 @@ public class ConnectionCache {
private boolean closed;
ConnectionInfo(HConnection conn, String user) {
- lastAccessTime = EnvironmentEdgeManager.currentTimeMillis();
+ lastAccessTime = EnvironmentEdgeManager.currentTime();
connection = conn;
closed = false;
userName = user;
@@ -194,13 +194,13 @@ public class ConnectionCache {
connections.remove(userName);
return false;
}
- lastAccessTime = EnvironmentEdgeManager.currentTimeMillis();
+ lastAccessTime = EnvironmentEdgeManager.currentTime();
return true;
}
synchronized boolean timedOut(int maxIdleTime) {
long timeoutTime = lastAccessTime + maxIdleTime;
- if (EnvironmentEdgeManager.currentTimeMillis() > timeoutTime) {
+ if (EnvironmentEdgeManager.currentTime() > timeoutTime) {
connections.remove(userName);
closed = true;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
index acdbd08..cb8f751 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
@@ -171,7 +171,7 @@ public class FSHDFSUtils extends FSUtils {
final Configuration conf, final CancelableProgressable reporter)
throws IOException {
LOG.info("Recovering lease on dfs file " + p);
- long startWaiting = EnvironmentEdgeManager.currentTimeMillis();
+ long startWaiting = EnvironmentEdgeManager.currentTime();
// Default is 15 minutes. It's huge, but the idea is that if we have a major issue, HDFS
// usually needs 10 minutes before marking the nodes as dead. So we're putting ourselves
// beyond that limit 'to be safe'.
@@ -200,8 +200,8 @@ public class FSHDFSUtils extends FSUtils {
} else {
// Cycle here until subsequentPause elapses. While spinning, check isFileClosed if
// available (should be in hadoop 2.0.5... not in hadoop 1 though.
- long localStartWaiting = EnvironmentEdgeManager.currentTimeMillis();
- while ((EnvironmentEdgeManager.currentTimeMillis() - localStartWaiting) <
+ long localStartWaiting = EnvironmentEdgeManager.currentTime();
+ while ((EnvironmentEdgeManager.currentTime() - localStartWaiting) <
subsequentPause) {
Thread.sleep(conf.getInt("hbase.lease.recovery.pause", 1000));
if (findIsFileClosedMeth) {
@@ -232,7 +232,7 @@ public class FSHDFSUtils extends FSUtils {
boolean checkIfTimedout(final Configuration conf, final long recoveryTimeout,
final int nbAttempt, final Path p, final long startWaiting) {
- if (recoveryTimeout < EnvironmentEdgeManager.currentTimeMillis()) {
+ if (recoveryTimeout < EnvironmentEdgeManager.currentTime()) {
LOG.warn("Cannot recoverLease after trying for " +
conf.getInt("hbase.lease.recovery.timeout", 900000) +
"ms (hbase.lease.recovery.timeout); continuing, but may be DATALOSS!!!; " +
@@ -279,7 +279,7 @@ public class FSHDFSUtils extends FSUtils {
*/
private String getLogMessageDetail(final int nbAttempt, final Path p, final long startWaiting) {
return "attempt=" + nbAttempt + " on file=" + p + " after " +
- (EnvironmentEdgeManager.currentTimeMillis() - startWaiting) + "ms";
+ (EnvironmentEdgeManager.currentTime() - startWaiting) + "ms";
}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
index 580ca05..6f80ec6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
@@ -1729,7 +1729,7 @@ public abstract class FSUtils {
public static boolean renameAndSetModifyTime(final FileSystem fs, final Path src, final Path dest)
throws IOException {
// set the modify time for TimeToLive Cleaner
- fs.setTimes(src, EnvironmentEdgeManager.currentTimeMillis(), -1);
+ fs.setTimes(src, EnvironmentEdgeManager.currentTime(), -1);
return fs.rename(src, dest);
}
@@ -1808,7 +1808,7 @@ public abstract class FSUtils {
throws IOException {
FileSystem fs = FileSystem.get(conf);
Path rootPath = FSUtils.getRootDir(conf);
- long startTime = EnvironmentEdgeManager.currentTimeMillis();
+ long startTime = EnvironmentEdgeManager.currentTime();
Path queryPath;
// The table files are in ${hbase.rootdir}/data/<namespace>/<table>/*
if (null == desiredTable) {
@@ -1899,7 +1899,7 @@ public abstract class FSUtils {
}
}
- long overhead = EnvironmentEdgeManager.currentTimeMillis() - startTime;
+ long overhead = EnvironmentEdgeManager.currentTime() - startTime;
String overheadMsg = "Scan DFS for locality info takes " + overhead + " ms";
LOG.info(overheadMsg);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ManualEnvironmentEdge.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ManualEnvironmentEdge.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ManualEnvironmentEdge.java
index e33bf0c..dcb072e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ManualEnvironmentEdge.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ManualEnvironmentEdge.java
@@ -39,7 +39,7 @@ public class ManualEnvironmentEdge implements EnvironmentEdge {
}
@Override
- public long currentTimeMillis() {
+ public long currentTime() {
return this.value;
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableLockChecker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableLockChecker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableLockChecker.java
index a7afc10..3e069d9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableLockChecker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableLockChecker.java
@@ -50,7 +50,7 @@ public class TableLockChecker {
public void checkTableLocks() throws IOException {
TableLockManager tableLockManager
= TableLockManager.createTableLockManager(zkWatcher.getConfiguration(), zkWatcher, null);
- final long expireDate = EnvironmentEdgeManager.currentTimeMillis() - expireTimeout;
+ final long expireDate = EnvironmentEdgeManager.currentTime() - expireTimeout;
MetadataHandler handler = new MetadataHandler() {
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/lock/ZKInterProcessLockBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/lock/ZKInterProcessLockBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/lock/ZKInterProcessLockBase.java
index c666c14..7e49df5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/lock/ZKInterProcessLockBase.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/lock/ZKInterProcessLockBase.java
@@ -167,7 +167,7 @@ public abstract class ZKInterProcessLockBase implements InterProcessLock {
throws IOException, InterruptedException {
boolean hasTimeout = timeoutMs != -1;
long waitUntilMs =
- hasTimeout ?EnvironmentEdgeManager.currentTimeMillis() + timeoutMs : -1;
+ hasTimeout ?EnvironmentEdgeManager.currentTime() + timeoutMs : -1;
String createdZNode;
try {
createdZNode = createLockZNode();
@@ -196,7 +196,7 @@ public abstract class ZKInterProcessLockBase implements InterProcessLock {
if (ZKUtil.setWatchIfNodeExists(zkWatcher, zkPathToWatch)) {
// Wait for the watcher to fire
if (hasTimeout) {
- long remainingMs = waitUntilMs - EnvironmentEdgeManager.currentTimeMillis();
+ long remainingMs = waitUntilMs - EnvironmentEdgeManager.currentTime();
if (remainingMs < 0 ||
!deletedLatch.await(remainingMs, TimeUnit.MILLISECONDS)) {
LOG.warn("Unable to acquire the lock in " + timeoutMs +
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index b59db4e..34bd90b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -4659,7 +4659,7 @@ public class TestFromClientSide {
HTable table = TEST_UTIL.createTable(tableName, new byte[][] { FAMILY },
conf, Integer.MAX_VALUE);
- final long ts = EnvironmentEdgeManager.currentTimeMillis();
+ final long ts = EnvironmentEdgeManager.currentTime();
Get get = new Get(ROW);
get.addColumn(FAMILY, QUALIFIER);
get.setMaxVersions();
@@ -4696,7 +4696,7 @@ public class TestFromClientSide {
final HTable table = TEST_UTIL.createTable(tableName,
new byte[][] { FAMILY }, conf, 3);
- final long ts = EnvironmentEdgeManager.currentTimeMillis();
+ final long ts = EnvironmentEdgeManager.currentTime();
final Get get = new Get(ROW);
get.addColumn(FAMILY, QUALIFIER);
get.setMaxVersions();
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
index 54232b6..0b84ec9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
@@ -1128,7 +1128,7 @@ public class TestHCM {
ManualEnvironmentEdge timeMachine = new ManualEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(timeMachine);
try {
- long timeBase = timeMachine.currentTimeMillis();
+ long timeBase = timeMachine.currentTime();
long largeAmountOfTime = ANY_PAUSE * 1000;
ConnectionManager.ServerErrorTracker tracker =
new ConnectionManager.ServerErrorTracker(largeAmountOfTime, 100);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
index a7b3319..11a1857 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
@@ -472,14 +472,14 @@ public class TestScannersFromClientSide {
HRegionServer rs = cluster.getRegionServer(i);
ProtobufUtil.closeRegion(
rs.getRSRpcServices(), rs.getServerName(), regionName);
- long startTime = EnvironmentEdgeManager.currentTimeMillis();
+ long startTime = EnvironmentEdgeManager.currentTime();
long timeOut = 300000;
while (true) {
if (rs.getOnlineRegion(regionName) == null) {
break;
}
assertTrue("Timed out in closing the testing region",
- EnvironmentEdgeManager.currentTimeMillis() < startTime + timeOut);
+ EnvironmentEdgeManager.currentTime() < startTime + timeOut);
Thread.sleep(500);
}
@@ -489,13 +489,13 @@ public class TestScannersFromClientSide {
states.regionOffline(hri);
states.updateRegionState(hri, State.OPENING);
ProtobufUtil.openRegion(rs.getRSRpcServices(), rs.getServerName(), hri);
- startTime = EnvironmentEdgeManager.currentTimeMillis();
+ startTime = EnvironmentEdgeManager.currentTime();
while (true) {
if (rs.getOnlineRegion(regionName) != null) {
break;
}
assertTrue("Timed out in open the testing region",
- EnvironmentEdgeManager.currentTimeMillis() < startTime + timeOut);
+ EnvironmentEdgeManager.currentTime() < startTime + timeOut);
Thread.sleep(500);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
index 27b807f..b7319b7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
@@ -473,12 +473,12 @@ public class TestRegionObserverInterface {
@Override
public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e,
Store store, StoreFile resultFile) {
- lastCompaction = EnvironmentEdgeManager.currentTimeMillis();
+ lastCompaction = EnvironmentEdgeManager.currentTime();
}
@Override
public void postFlush(ObserverContext<RegionCoprocessorEnvironment> e) {
- lastFlush = EnvironmentEdgeManager.currentTimeMillis();
+ lastFlush = EnvironmentEdgeManager.currentTime();
}
}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
index 0264d76..5706cb3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
@@ -187,7 +187,7 @@ public class TestWALObserver {
assertFalse(modifiedFamily1);
// it's where WAL write cp should occur.
- long now = EnvironmentEdgeManager.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
log.append(hri, hri.getTable(), edit, now, htd, sequenceId);
// the edit shall have been change now by the coprocessor.
@@ -242,7 +242,7 @@ public class TestWALObserver {
HLog wal = createWAL(this.conf);
// Put p = creatPutWith2Families(TEST_ROW);
WALEdit edit = new WALEdit();
- long now = EnvironmentEdgeManager.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
// addFamilyMapToWALEdit(p.getFamilyMap(), edit);
final int countPerFamily = 1000;
// for (HColumnDescriptor hcd: hri.getTableDesc().getFamilies()) {
@@ -379,9 +379,8 @@ public class TestWALObserver {
byte[] qualifierBytes = Bytes.toBytes(Integer.toString(j));
byte[] columnBytes = Bytes.toBytes(familyStr + ":" + Integer.toString(j));
WALEdit edit = new WALEdit();
- edit.add(new KeyValue(rowName, family, qualifierBytes, ee
- .currentTimeMillis(), columnBytes));
- wal.append(hri, tableName, edit, ee.currentTimeMillis(), htd, sequenceId);
+ edit.add(new KeyValue(rowName, family, qualifierBytes, ee.currentTime(), columnBytes));
+ wal.append(hri, tableName, edit, ee.currentTime(), htd, sequenceId);
}
}
@@ -406,5 +405,4 @@ public class TestWALObserver {
htd.addFamily(c);
return htd;
}
-
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index 50a9b9f5..c7050c8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -375,7 +375,7 @@ public class TestCacheOnWrite {
.setDataBlockEncoding(encoder.getDataBlockEncoding())
);
int rowIdx = 0;
- long ts = EnvironmentEdgeManager.currentTimeMillis();
+ long ts = EnvironmentEdgeManager.currentTime();
for (int iFile = 0; iFile < 5; ++iFile) {
for (int iRow = 0; iRow < 500; ++iRow) {
String rowStr = "" + (rowIdx * rowIdx * rowIdx) + "row" + iFile + "_" +
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
index 243ff8d..af982d0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
@@ -111,7 +111,7 @@ public class TestScannerSelectionUsingTTL {
HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(info.getEncodedName()),
conf, htd);
- long ts = EnvironmentEdgeManager.currentTimeMillis();
+ long ts = EnvironmentEdgeManager.currentTime();
long version = 0; //make sure each new set of Put's have a new ts
for (int iFile = 0; iFile < totalNumFiles; ++iFile) {
if (iFile == NUM_EXPIRED_FILES) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java
index 519f806..a5a707c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java
@@ -713,11 +713,11 @@ public class TestAssignmentManagerOnCluster {
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
// Region will be opened, but it won't complete
master.assignRegion(hri);
- long end = EnvironmentEdgeManager.currentTimeMillis() + 20000;
+ long end = EnvironmentEdgeManager.currentTime() + 20000;
// Wait till postOpen is called
while (!MyRegionObserver.postOpenCalled ) {
assertFalse("Timed out waiting for postOpen to be called",
- EnvironmentEdgeManager.currentTimeMillis() > end);
+ EnvironmentEdgeManager.currentTime() > end);
Thread.sleep(300);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java
index c9fde2d..299fb06 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java
@@ -61,7 +61,7 @@ public class TestClusterStatusPublisher {
@Override
protected List<Pair<ServerName, Long>> getDeadServers(long since) {
List<Pair<ServerName, Long>> res = new ArrayList<Pair<ServerName, Long>>();
- switch ((int) EnvironmentEdgeManager.currentTimeMillis()) {
+ switch ((int) EnvironmentEdgeManager.currentTime()) {
case 2:
res.add(new Pair<ServerName, Long>(ServerName.valueOf("hn", 10, 10), 1L));
break;
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
index c51428e..5fc079f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
@@ -1016,18 +1016,18 @@ public class TestDistributedLogSplitting {
rsts.get(1).getRegionServer().abort("testing");
rsts.get(2).getRegionServer().abort("testing");
- long start = EnvironmentEdgeManager.currentTimeMillis();
+ long start = EnvironmentEdgeManager.currentTime();
while (cluster.getLiveRegionServerThreads().size() > (NUM_RS - 3)) {
- if (EnvironmentEdgeManager.currentTimeMillis() - start > 60000) {
+ if (EnvironmentEdgeManager.currentTime() - start > 60000) {
assertTrue(false);
}
Thread.sleep(200);
}
- start = EnvironmentEdgeManager.currentTimeMillis();
+ start = EnvironmentEdgeManager.currentTime();
while (HBaseTestingUtility.getAllOnlineRegions(cluster).size()
< (NUM_REGIONS_TO_CREATE + 1)) {
- if (EnvironmentEdgeManager.currentTimeMillis() - start > 60000) {
+ if (EnvironmentEdgeManager.currentTime() - start > 60000) {
assertTrue("Timedout", false);
}
Thread.sleep(200);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
index 000734c..5f56d30 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
@@ -140,7 +140,7 @@ public class TestHFileCleaner {
// set a custom edge manager to handle time checking
EnvironmentEdge setTime = new EnvironmentEdge() {
@Override
- public long currentTimeMillis() {
+ public long currentTime() {
return createTime;
}
};
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
index 16321b4..9d2dda2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
@@ -182,7 +182,7 @@ public class TestSnapshotFromMaster {
Mockito.when(mockHandler.getSnapshot()).thenReturn(desc);
Mockito.when(mockHandler.isFinished()).thenReturn(new Boolean(true));
Mockito.when(mockHandler.getCompletionTimestamp())
- .thenReturn(EnvironmentEdgeManager.currentTimeMillis());
+ .thenReturn(EnvironmentEdgeManager.currentTime());
master.getSnapshotManagerForTesting()
.setSnapshotHandlerForTesting(TABLE_NAME, mockHandler);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
index 3743fdd..383f424 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
@@ -928,7 +928,7 @@ public class TestDefaultMemStore extends TestCase {
private class EnvironmentEdgeForMemstoreTest implements EnvironmentEdge {
long t = 1234;
@Override
- public long currentTimeMillis() {
+ public long currentTime() {
return t;
}
public void setCurrentTimeMillis(long t) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
index 4f8794c..52f261a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
@@ -74,7 +74,7 @@ public class TestKeepDeletes {
* compact timestamps are tracked. Otherwise, forced major compaction will not purge
* Delete's having the same timestamp. see ScanQueryMatcher.match():
* if (retainDeletesInOutput
- * || (!isUserScan && (EnvironmentEdgeManager.currentTimeMillis() - timestamp)
+ * || (!isUserScan && (EnvironmentEdgeManager.currentTime() - timestamp)
* <= timeToPurgeDeletes) ... )
*
*/
@@ -99,7 +99,7 @@ public class TestKeepDeletes {
HConstants.FOREVER, true);
HRegion region = hbu.createLocalHRegion(htd, null, null);
- long ts = EnvironmentEdgeManager.currentTimeMillis();
+ long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
p.add(c0, c0, T1);
region.put(p);
@@ -196,7 +196,7 @@ public class TestKeepDeletes {
HConstants.FOREVER, false);
HRegion region = hbu.createLocalHRegion(htd, null, null);
- long ts = EnvironmentEdgeManager.currentTimeMillis();
+ long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
p.add(c0, c0, T1);
region.put(p);
@@ -241,7 +241,7 @@ public class TestKeepDeletes {
HConstants.FOREVER, false);
HRegion region = hbu.createLocalHRegion(htd, null, null);
- long ts = EnvironmentEdgeManager.currentTimeMillis();
+ long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
p.add(c0, c0, T1);
region.put(p);
@@ -309,7 +309,7 @@ public class TestKeepDeletes {
HConstants.FOREVER, true);
HRegion region = hbu.createLocalHRegion(htd, null, null);
- long ts = EnvironmentEdgeManager.currentTimeMillis();
+ long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
p.add(c0, c0, T1);
region.put(p);
@@ -399,7 +399,7 @@ public class TestKeepDeletes {
HConstants.FOREVER, true);
HRegion region = hbu.createLocalHRegion(htd, null, null);
- long ts = EnvironmentEdgeManager.currentTimeMillis();
+ long ts = EnvironmentEdgeManager.currentTime();
Delete d = new Delete(T1, ts);
d.deleteColumns(c0, c0, ts);
@@ -442,7 +442,7 @@ public class TestKeepDeletes {
HConstants.FOREVER, true);
HRegion region = hbu.createLocalHRegion(htd, null, null);
- long ts = EnvironmentEdgeManager.currentTimeMillis();
+ long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
p.add(c0, c0, T1);
@@ -505,7 +505,7 @@ public class TestKeepDeletes {
HConstants.FOREVER, true);
HRegion region = hbu.createLocalHRegion(htd, null, null);
- long ts = EnvironmentEdgeManager.currentTimeMillis();
+ long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
p.add(c0, c0, T1);
p.add(c0, c1, T1);
@@ -587,7 +587,7 @@ public class TestKeepDeletes {
HConstants.FOREVER, true);
HRegion region = hbu.createLocalHRegion(htd, null, null);
- long ts = EnvironmentEdgeManager.currentTimeMillis();
+ long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
p.add(c0, c0, T1);
region.put(p);
@@ -679,7 +679,7 @@ public class TestKeepDeletes {
HConstants.FOREVER, true);
HRegion region = hbu.createLocalHRegion(htd, null, null);
- long ts = EnvironmentEdgeManager.currentTimeMillis();
+ long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
p.add(c0, c0, T1);
@@ -730,7 +730,7 @@ public class TestKeepDeletes {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, true);
HRegion region = hbu.createLocalHRegion(htd, null, null);
- long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000; // 2s in the past
+ long ts = EnvironmentEdgeManager.currentTime() - 2000; // 2s in the past
Put p = new Put(T1, ts);
p.add(c0, c0, T3);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
index 73a712f..cbb9018 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
@@ -69,7 +69,7 @@ public class TestMinVersions {
try {
// 2s in the past
- long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000;
+ long ts = EnvironmentEdgeManager.currentTime() - 2000;
Put p = new Put(T1, ts);
p.add(c0, c0, T1);
@@ -116,7 +116,7 @@ public class TestMinVersions {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, false);
HRegion region = hbu.createLocalHRegion(htd, null, null);
// 2s in the past
- long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000;
+ long ts = EnvironmentEdgeManager.currentTime() - 2000;
try {
Put p = new Put(T1, ts-1);
@@ -171,7 +171,7 @@ public class TestMinVersions {
HRegion region = hbu.createLocalHRegion(htd, null, null);
// 2s in the past
- long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000;
+ long ts = EnvironmentEdgeManager.currentTime() - 2000;
try {
Put p = new Put(T1, ts-2);
@@ -229,7 +229,7 @@ public class TestMinVersions {
HRegion region = hbu.createLocalHRegion(htd, null, null);
// 2s in the past
- long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000;
+ long ts = EnvironmentEdgeManager.currentTime() - 2000;
try {
// 2nd version
@@ -305,7 +305,7 @@ public class TestMinVersions {
try {
// 2s in the past
- long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000;
+ long ts = EnvironmentEdgeManager.currentTime() - 2000;
// 1st version
Put p = new Put(T1, ts-3);
@@ -396,7 +396,7 @@ public class TestMinVersions {
final byte [] c1 = COLUMNS[1];
// 2s in the past
- long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000;
+ long ts = EnvironmentEdgeManager.currentTime() - 2000;
try {
Put p = new Put(T1, ts-3);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
index 4945ad1..0465b93 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
@@ -94,7 +94,7 @@ public class TestQueryMatcher extends HBaseTestCase {
// 2,4,5
ScanQueryMatcher qm = new ScanQueryMatcher(scan, new ScanInfo(fam2,
0, 1, ttl, false, 0, rowComparator), get.getFamilyMap().get(fam2),
- EnvironmentEdgeManager.currentTimeMillis() - ttl);
+ EnvironmentEdgeManager.currentTime() - ttl);
List<KeyValue> memstore = new ArrayList<KeyValue>();
memstore.add(new KeyValue(row1, fam2, col1, 1, data));
@@ -176,7 +176,7 @@ public class TestQueryMatcher extends HBaseTestCase {
ScanQueryMatcher qm = new ScanQueryMatcher(scan, new ScanInfo(fam2,
0, 1, ttl, false, 0, rowComparator), null,
- EnvironmentEdgeManager.currentTimeMillis() - ttl);
+ EnvironmentEdgeManager.currentTime() - ttl);
List<KeyValue> memstore = new ArrayList<KeyValue>();
memstore.add(new KeyValue(row1, fam2, col1, 1, data));
@@ -227,7 +227,7 @@ public class TestQueryMatcher extends HBaseTestCase {
ScanQueryMatcher.MatchCode.DONE
};
- long now = EnvironmentEdgeManager.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
ScanQueryMatcher qm = new ScanQueryMatcher(scan, new ScanInfo(fam2,
0, 1, testTTL, false, 0, rowComparator), get.getFamilyMap().get(fam2),
now - testTTL);
@@ -281,7 +281,7 @@ public class TestQueryMatcher extends HBaseTestCase {
ScanQueryMatcher.MatchCode.DONE
};
- long now = EnvironmentEdgeManager.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
ScanQueryMatcher qm = new ScanQueryMatcher(scan, new ScanInfo(fam2,
0, 1, testTTL, false, 0, rowComparator), null,
now - testTTL);
@@ -336,7 +336,7 @@ public class TestQueryMatcher extends HBaseTestCase {
private void testDropDeletes(
byte[] from, byte[] to, byte[][] rows, MatchCode... expected) throws IOException {
- long now = EnvironmentEdgeManager.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
// Set time to purge deletes to negative value to avoid it ever happening.
ScanInfo scanInfo = new ScanInfo(fam2, 0, 1, ttl, false, -1L, rowComparator);
NavigableSet<byte[]> cols = get.getFamilyMap().get(fam2);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
index db083ee..c9ad8bf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
@@ -133,10 +133,10 @@ public class TestRegionMergeTransactionOnCluster {
MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
AssignmentManager am = cluster.getMaster().getAssignmentManager();
RegionStates regionStates = am.getRegionStates();
- long start = EnvironmentEdgeManager.currentTimeMillis();
+ long start = EnvironmentEdgeManager.currentTime();
while (!regionStates.isRegionInState(hri, State.MERGED)) {
assertFalse("Timed out in waiting one merged region to be in state MERGED",
- EnvironmentEdgeManager.currentTimeMillis() - start > 60000);
+ EnvironmentEdgeManager.currentTime() - start > 60000);
Thread.sleep(500);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
index 73219d7..3fb1a87 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
@@ -799,10 +799,10 @@ public class TestSplitTransactionOnCluster {
hri = region.getRegionInfo(); // split parent
AssignmentManager am = cluster.getMaster().getAssignmentManager();
RegionStates regionStates = am.getRegionStates();
- long start = EnvironmentEdgeManager.currentTimeMillis();
+ long start = EnvironmentEdgeManager.currentTime();
while (!regionStates.isRegionInState(hri, State.SPLIT)) {
assertFalse("Timed out in waiting split parent to be in state SPLIT",
- EnvironmentEdgeManager.currentTimeMillis() - start > 60000);
+ EnvironmentEdgeManager.currentTime() - start > 60000);
Thread.sleep(500);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
index 9600932..99834ed 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
@@ -297,7 +297,7 @@ public class TestStore {
// store files will be (this.store.ttl / storeFileNum)
for (int i = 1; i <= storeFileNum; i++) {
LOG.info("Adding some data for the store file #" + i);
- timeStamp = EnvironmentEdgeManager.currentTimeMillis();
+ timeStamp = EnvironmentEdgeManager.currentTime();
this.store.add(new KeyValue(row, family, qf1, timeStamp, (byte[]) null));
this.store.add(new KeyValue(row, family, qf2, timeStamp, (byte[]) null));
this.store.add(new KeyValue(row, family, qf3, timeStamp, (byte[]) null));
@@ -318,7 +318,7 @@ public class TestStore {
assertEquals(storeFileNum - i, sfs.size());
// Ensure only non-expired files remain.
for (StoreFile sf : sfs) {
- assertTrue(sf.getReader().getMaxTimestamp() >= (edge.currentTimeMillis() - storeTtl));
+ assertTrue(sf.getReader().getMaxTimestamp() >= (edge.currentTime() - storeTtl));
}
// Let the next store file expired.
edge.incrementTime(sleepTime);
@@ -328,7 +328,7 @@ public class TestStore {
// Assert the last expired file is not removed.
assertEquals(1, sfs.size());
long ts = sfs.iterator().next().getReader().getMaxTimestamp();
- assertTrue(ts < (edge.currentTimeMillis() - storeTtl));
+ assertTrue(ts < (edge.currentTime() - storeTtl));
}
@Test
@@ -661,7 +661,7 @@ public class TestStore {
long oldValue = 1L;
long newValue = 3L;
this.store.add(new KeyValue(row, family, qf1,
- EnvironmentEdgeManager.currentTimeMillis(),
+ EnvironmentEdgeManager.currentTime(),
Bytes.toBytes(oldValue)));
// snapshot the store.
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
index 05ac4b4..f42ff0f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
@@ -27,25 +27,18 @@ import java.util.Arrays;
import java.util.List;
import java.util.NavigableSet;
import java.util.TreeSet;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
import junit.framework.TestCase;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueTestUtil;
import org.apache.hadoop.hbase.MediumTests;
-import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdge;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
-import org.apache.hadoop.hbase.util.Threads;
import org.junit.experimental.categories.Category;
// Can't be small as it plays with EnvironmentEdgeManager
@@ -512,7 +505,7 @@ public class TestStoreScanner extends TestCase {
try {
final long now = System.currentTimeMillis();
EnvironmentEdgeManagerTestHelper.injectEdge(new EnvironmentEdge() {
- public long currentTimeMillis() {
+ public long currentTime() {
return now;
}
});
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
index 4132a5c..653dab6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
@@ -784,14 +784,14 @@ public class TestWALReplay {
// Add an edit to another family, should be skipped.
WALEdit edit = new WALEdit();
- long now = ee.currentTimeMillis();
+ long now = ee.currentTime();
edit.add(new KeyValue(rowName, Bytes.toBytes("another family"), rowName,
now, rowName));
wal.append(hri, tableName, edit, now, htd, sequenceId);
// Delete the c family to verify deletes make it over.
edit = new WALEdit();
- now = ee.currentTimeMillis();
+ now = ee.currentTime();
edit.add(new KeyValue(rowName, Bytes.toBytes("c"), null, now,
KeyValue.Type.DeleteFamily));
wal.append(hri, tableName, edit, now, htd, sequenceId);
@@ -976,8 +976,8 @@ public class TestWALReplay {
byte[] columnBytes = Bytes.toBytes(familyStr + ":" + Integer.toString(j));
WALEdit edit = new WALEdit();
edit.add(new KeyValue(rowName, family, qualifierBytes,
- ee.currentTimeMillis(), columnBytes));
- wal.append(hri, tableName, edit, ee.currentTimeMillis(), htd, sequenceId);
+ ee.currentTime(), columnBytes));
+ wal.append(hri, tableName, edit, ee.currentTime(), htd, sequenceId);
}
}
@@ -989,7 +989,7 @@ public class TestWALReplay {
for (int j = 0; j < count; j++) {
byte[] qualifier = Bytes.toBytes(qualifierPrefix + Integer.toString(j));
Put p = new Put(rowName);
- p.add(family, qualifier, ee.currentTimeMillis(), rowName);
+ p.add(family, qualifier, ee.currentTime(), rowName);
r.put(p);
puts.add(p);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
index 3134d2a..764f01b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
@@ -116,7 +116,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
final byte[] v3 = Bytes.toBytes("v3");
htable1 = new HTable(conf1, tableName);
- long t = EnvironmentEdgeManager.currentTimeMillis();
+ long t = EnvironmentEdgeManager.currentTime();
// create three versions for "row"
Put put = new Put(row);
put.add(famName, row, t, v1);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
index e3ea397..fd8c9a1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
@@ -367,7 +367,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try {
// Store read only ACL at a future time
Put p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1,
- EnvironmentEdgeManager.currentTimeMillis() + 1000000,
+ EnvironmentEdgeManager.currentTime() + 1000000,
ZERO);
p.setACL(USER_OTHER.getShortName(), new Permission(Permission.Action.READ));
t.put(p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3bfbd062/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
index 8f8d92d..1773027 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
@@ -117,7 +117,7 @@ public class TestTokenAuthentication {
public TokenServer(Configuration conf) throws IOException {
this.conf = conf;
- this.startcode = EnvironmentEdgeManager.currentTimeMillis();
+ this.startcode = EnvironmentEdgeManager.currentTime();
// Server to handle client requests.
String hostname =
Strings.domainNamePointerToHostName(DNS.getDefaultHost("default", "default"));