You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by gj...@apache.org on 2020/01/21 22:08:40 UTC

[phoenix] branch master updated: PHOENIX-5677 - Replace System.currentTimeMillis with EnvironmentEdgeManager in non-test code

This is an automated email from the ASF dual-hosted git repository.

gjacoby pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/master by this push:
     new 5968d3b  PHOENIX-5677 - Replace System.currentTimeMillis with EnvironmentEdgeManager in non-test code
5968d3b is described below

commit 5968d3ba8348067775dc2751f06eaa8155a633d5
Author: Geoffrey Jacoby <gj...@apache.org>
AuthorDate: Mon Jan 13 19:58:05 2020 -0800

    PHOENIX-5677 - Replace System.currentTimeMillis with EnvironmentEdgeManager in non-test code
---
 .../apache/phoenix/cache/ServerCacheClient.java    |  7 +++---
 .../UngroupedAggregateRegionObserver.java          |  2 +-
 .../coprocessor/tasks/DropChildViewsTask.java      |  3 ++-
 .../org/apache/phoenix/execute/HashJoinPlan.java   |  3 ++-
 .../org/apache/phoenix/execute/MutationState.java  |  6 ++---
 .../apache/phoenix/iterate/ParallelIterators.java  |  7 ++++--
 .../phoenix/iterate/SpoolingResultIterator.java    |  5 ++--
 .../org/apache/phoenix/jdbc/PhoenixConnection.java |  5 ++--
 .../org/apache/phoenix/jdbc/PhoenixStatement.java  |  5 ++--
 .../phoenix/job/AbstractRoundRobinQueue.java       | 14 ++++++-----
 .../java/org/apache/phoenix/job/JobManager.java    | 17 ++++++++-----
 .../apache/phoenix/mapreduce/OrphanViewTool.java   |  3 ++-
 .../index/IndexScrutinyMapperForTest.java          |  4 +--
 .../phoenix/query/ConnectionQueryServicesImpl.java |  9 ++++---
 .../org/apache/phoenix/tool/PhoenixCanaryTool.java | 12 +++++----
 .../org/apache/phoenix/util/CSVCommonsLoader.java  |  4 +--
 .../org/apache/phoenix/util/PhoenixStopWatch.java  |  4 +--
 .../java/org/apache/phoenix/util/TimeKeeper.java   |  2 +-
 .../org/apache/phoenix/util/TransactionUtil.java   |  3 ++-
 .../java/org/apache/phoenix/util/UpgradeUtil.java  | 10 ++++----
 .../apache/phoenix/pherf/rules/RulesApplier.java   |  3 ++-
 .../org/apache/phoenix/pherf/util/PhoenixUtil.java |  8 +++---
 .../pherf/workload/MultiThreadedRunner.java        | 15 +++++------
 .../pherf/workload/MultithreadedDiffer.java        | 11 +++++---
 .../phoenix/pherf/workload/WriteWorkload.java      | 29 +++++++++++-----------
 25 files changed, 110 insertions(+), 81 deletions(-)

diff --git a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
index 6868714..d6483b8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
@@ -72,6 +72,7 @@ import org.apache.phoenix.query.QueryServicesOptions;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.IndexType;
 import org.apache.phoenix.util.Closeables;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.phoenix.util.SQLCloseable;
 import org.apache.phoenix.util.SQLCloseables;
 import org.apache.phoenix.util.ScanUtil;
@@ -134,7 +135,7 @@ public class ServerCacheClient {
                     QueryServicesOptions.DEFAULT_MAX_SERVER_CACHE_TIME_TO_LIVE_MS);
             this.id = id;
             this.servers = new HashMap();
-            long currentTime = System.currentTimeMillis();
+            long currentTime = EnvironmentEdgeManager.currentTimeMillis();
             for(HRegionLocation loc : servers) {
                 this.servers.put(loc, currentTime);
             }
@@ -183,7 +184,7 @@ public class ServerCacheClient {
             if(this.servers.containsKey(loc)) {
                 return false;
             } else {
-                this.servers.put(loc, System.currentTimeMillis());
+                this.servers.put(loc, EnvironmentEdgeManager.currentTimeMillis());
                 return true;
             }
         }
@@ -191,7 +192,7 @@ public class ServerCacheClient {
         public boolean isExpired(HRegionLocation loc) {
             if(this.servers.containsKey(loc)) {
                 Long time = this.servers.get(loc);
-                if(System.currentTimeMillis() - time > maxServerCacheTTL)
+                if(EnvironmentEdgeManager.currentTimeMillis() - time > maxServerCacheTTL)
                     return true; // cache was send more than maxTTL ms ago, expecting that it's expired
             } else {
                 return false; // should be on server yet.
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
index f4d3ca5..b38e144 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
@@ -1238,7 +1238,7 @@ public class UngroupedAggregateRegionObserver extends BaseScannerRegionObserver
                         statsCollector.updateStatistics(region, scan);
                         LOGGER.info("UPDATE STATISTICS finished successfully for scanner: "
                                 + innerScanner + ". Number of rows scanned: " + rowCount
-                                + ". Time: " + (System.currentTimeMillis() - startTime));
+                                + ". Time: " + (EnvironmentEdgeManager.currentTimeMillis() - startTime));
                     }
                     if (compactionRunning) {
                         LOGGER.info("UPDATE STATISTICS stopped in between because major compaction was running for region "
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/tasks/DropChildViewsTask.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/tasks/DropChildViewsTask.java
index 4609cf7..8ca5dc6 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/tasks/DropChildViewsTask.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/tasks/DropChildViewsTask.java
@@ -19,6 +19,7 @@ package org.apache.phoenix.coprocessor.tasks;
 
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
 import org.apache.phoenix.coprocessor.TaskRegionObserver;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.ViewUtil;
 import org.apache.phoenix.jdbc.PhoenixConnection;
@@ -66,7 +67,7 @@ public class DropChildViewsTask extends BaseTask {
                                 SYSTEM_CHILD_LINK_NAME_BYTES,
                                 env.getConfiguration()).getName());
                 return new TaskRegionObserver.TaskResult(TaskRegionObserver.TaskResultCode.SUCCESS, "");
-            } else if (System.currentTimeMillis() < timeMaxInterval + timestamp.getTime()) {
+            } else if (EnvironmentEdgeManager.currentTimeMillis() < timeMaxInterval + timestamp.getTime()) {
                 // skip this task as it has not been expired and its parent table has not been dropped yet
                 LOGGER.info("Skipping a child view drop task. " +
                         "The parent table has not been dropped yet : " +
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java
index e8f761a..1cb9dc8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java
@@ -80,6 +80,7 @@ import org.apache.phoenix.schema.types.PBoolean;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PVarbinary;
 import org.apache.phoenix.util.CostUtil;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.phoenix.util.SQLCloseables;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -561,7 +562,7 @@ public class HashJoinPlan extends DelegateQueryPlan {
                                 plan.getEstimatedSize(), hashExpressions, singleValueOnly, usePersistentCache,
                                 parent.delegate.getTableRef().getTable(), keyRangeRhsExpression,
                                 keyRangeRhsValues);
-                        long endTime = System.currentTimeMillis();
+                        long endTime = EnvironmentEdgeManager.currentTimeMillis();
                         boolean isSet = parent.firstJobEndTime.compareAndSet(0, endTime);
                         if (!isSet && (endTime
                                 - parent.firstJobEndTime.get()) > parent.maxServerCacheTimeToLive) {
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
index 346daba..79c8c90 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
@@ -1056,7 +1056,7 @@ public class MutationState implements SQLCloseable {
                     GLOBAL_MUTATION_BATCH_SIZE.update(numMutations);
                     mutationSizeBytes = calculateMutationSize(mutationList);
 
-                    startTime = System.currentTimeMillis();
+                    startTime = EnvironmentEdgeManager.currentTimeMillis();
                     child.addTimelineAnnotation("Attempt " + retryCount);
                     Iterator<List<Mutation>> itrListMutation = mutationBatchList.iterator();
                     while (itrListMutation.hasNext()) {
@@ -1128,7 +1128,7 @@ public class MutationState implements SQLCloseable {
                     child.stop();
                     child.stop();
                     shouldRetry = false;
-                    mutationCommitTime = System.currentTimeMillis() - startTime;
+                    mutationCommitTime = EnvironmentEdgeManager.currentTimeMillis() - startTime;
                     GLOBAL_MUTATION_COMMIT_TIME.update(mutationCommitTime);
                     numFailedMutations = 0;
 
@@ -1140,7 +1140,7 @@ public class MutationState implements SQLCloseable {
                         estimatedSize = PhoenixKeyValueUtil.getEstimatedRowMutationSize(mutations);
                     }
                 } catch (Exception e) {
-                    mutationCommitTime = System.currentTimeMillis() - startTime;
+                    mutationCommitTime = EnvironmentEdgeManager.currentTimeMillis() - startTime;
                     long serverTimestamp = ServerUtil.parseServerTimestamp(e);
                     SQLException inferredE = ServerUtil.parseServerExceptionOrNull(e);
                     if (inferredE != null) {
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java
index 3d5c96b..7b1229b 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java
@@ -37,6 +37,7 @@ import org.apache.phoenix.monitoring.ReadMetricQueue;
 import org.apache.phoenix.monitoring.ScanMetricsHolder;
 import org.apache.phoenix.monitoring.TaskExecutionMetricsHolder;
 import org.apache.phoenix.trace.util.Tracing;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.phoenix.util.LogUtil;
 import org.apache.phoenix.util.ScanUtil;
 import org.slf4j.Logger;
@@ -121,9 +122,11 @@ public class ParallelIterators extends BaseResultIterators {
                 
                 @Override
                 public PeekingResultIterator call() throws Exception {
-                    long startTime = System.currentTimeMillis();
+                    long startTime = EnvironmentEdgeManager.currentTimeMillis();
                     if (LOGGER.isDebugEnabled()) {
-                        LOGGER.debug(LogUtil.addCustomAnnotations("Id: " + scanId + ", Time: " + (System.currentTimeMillis() - startTime) + "ms, Scan: " + scan, ScanUtil.getCustomAnnotations(scan)));
+                        LOGGER.debug(LogUtil.addCustomAnnotations("Id: " + scanId + ", Time: " +
+                            (EnvironmentEdgeManager.currentTimeMillis() - startTime) +
+                            "ms, Scan: " + scan, ScanUtil.getCustomAnnotations(scan)));
                     }
                     PeekingResultIterator iterator = iteratorFactory.newIterator(context, tableResultItr, scan, physicalTableName, ParallelIterators.this.plan);
                     if (initFirstScanOnly) {
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/SpoolingResultIterator.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/SpoolingResultIterator.java
index 0823026..6995e79 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/SpoolingResultIterator.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/SpoolingResultIterator.java
@@ -48,6 +48,7 @@ import org.apache.phoenix.query.QueryServicesOptions;
 import org.apache.phoenix.schema.tuple.ResultTuple;
 import org.apache.phoenix.schema.tuple.Tuple;
 import org.apache.phoenix.util.ByteUtil;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.phoenix.util.ResultUtil;
 import org.apache.phoenix.util.ServerUtil;
 import org.apache.phoenix.util.TupleUtil;
@@ -114,9 +115,9 @@ public class SpoolingResultIterator implements PeekingResultIterator {
         this.spoolMetrics = sMetrics;
         this.memoryMetrics = mMetrics;
         boolean success = false;
-        long startTime = System.currentTimeMillis();
+        long startTime = EnvironmentEdgeManager.currentTimeMillis();
         final MemoryChunk chunk = mm.allocate(0, thresholdBytes);
-        long waitTime = System.currentTimeMillis() - startTime;
+        long waitTime = EnvironmentEdgeManager.currentTimeMillis() - startTime;
         GLOBAL_MEMORY_WAIT_TIME.update(waitTime);
         memoryMetrics.getMemoryWaitTimeMetric().change(waitTime);
         DeferredFileOutputStream spoolTo = null;
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java
index 988a7c6..d302bcf 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java
@@ -109,6 +109,7 @@ import org.apache.phoenix.schema.types.PVarbinary;
 import org.apache.phoenix.trace.util.Tracing;
 import org.apache.phoenix.transaction.PhoenixTransactionContext;
 import org.apache.phoenix.util.DateUtil;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.phoenix.util.JDBCUtil;
 import org.apache.phoenix.util.NumberUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
@@ -467,7 +468,7 @@ public class PhoenixConnection implements Connection, MetaDataMutated, SQLClosea
                     for (int i = 0; i < paramMetaData.getParameterCount(); i++) {
                         stmt.setObject(i + 1, binds.get(bindsOffset + i));
                     }
-                    long start = System.currentTimeMillis();
+                    long start = EnvironmentEdgeManager.currentTimeMillis();
                     boolean isQuery = stmt.execute();
                     if (isQuery) {
                         ResultSet rs = stmt.getResultSet();
@@ -542,7 +543,7 @@ public class PhoenixConnection implements Connection, MetaDataMutated, SQLClosea
                         }
                     }
                     bindsOffset += paramMetaData.getParameterCount();
-                    double elapsedDuration = ((System.currentTimeMillis() - start) / 1000.0);
+                    double elapsedDuration = ((EnvironmentEdgeManager.currentTimeMillis() - start) / 1000.0);
                     out.println("Time: " + elapsedDuration + " sec(s)\n");
                     nStatements++;
                 } finally {
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
index 6a405f4..c06055c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
@@ -180,6 +180,7 @@ import org.apache.phoenix.trace.util.Tracing;
 import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.CursorUtil;
 import org.apache.phoenix.util.PhoenixKeyValueUtil;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.phoenix.util.LogUtil;
 import org.apache.phoenix.util.PhoenixContextExecutor;
 import org.apache.phoenix.util.PhoenixRuntime;
@@ -292,7 +293,7 @@ public class PhoenixStatement implements Statement, SQLCloseable {
                 new CallRunner.CallableThrowable<PhoenixResultSet, SQLException>() {
                 @Override
                     public PhoenixResultSet call() throws SQLException {
-                    final long startTime = System.currentTimeMillis();
+                    final long startTime = EnvironmentEdgeManager.currentTimeMillis();
                     try {
                         PhoenixConnection conn = getConnection();
                         
@@ -361,7 +362,7 @@ public class PhoenixStatement implements Statement, SQLCloseable {
                         // Regardless of whether the query was successfully handled or not, 
                         // update the time spent so far. If needed, we can separate out the
                         // success times and failure times.
-                        GLOBAL_QUERY_TIME.update(System.currentTimeMillis() - startTime);
+                        GLOBAL_QUERY_TIME.update(EnvironmentEdgeManager.currentTimeMillis() - startTime);
                     }
                 }
                 }, PhoenixContextExecutor.inContext());
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/job/AbstractRoundRobinQueue.java b/phoenix-core/src/main/java/org/apache/phoenix/job/AbstractRoundRobinQueue.java
index b2504c3..fa68852 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/job/AbstractRoundRobinQueue.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/job/AbstractRoundRobinQueue.java
@@ -17,6 +17,8 @@
  */
 package org.apache.phoenix.job;
 
+import org.apache.phoenix.util.EnvironmentEdgeManager;
+
 import java.util.*;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.TimeUnit;
@@ -66,12 +68,12 @@ public abstract class AbstractRoundRobinQueue<E> extends AbstractQueue<E>
     @Override
     public boolean offer(E o, long timeout, TimeUnit unit) throws InterruptedException {
         boolean taken = false;
-        long endAt = System.currentTimeMillis() + unit.toMillis(timeout);
+        long endAt = EnvironmentEdgeManager.currentTimeMillis() + unit.toMillis(timeout);
         synchronized(lock) {
-            long waitTime = endAt - System.currentTimeMillis();
+            long waitTime = endAt - EnvironmentEdgeManager.currentTimeMillis();
             while (!(taken = offer(o)) && waitTime > 0) {
                 this.lock.wait(waitTime);
-                waitTime = endAt - System.currentTimeMillis();
+                waitTime = endAt - EnvironmentEdgeManager.currentTimeMillis();
             }
         }
         return taken;
@@ -129,12 +131,12 @@ public abstract class AbstractRoundRobinQueue<E> extends AbstractQueue<E>
 
     @Override
     public E poll(long timeout, TimeUnit unit) throws InterruptedException {
-        long endAt = System.currentTimeMillis() + unit.toMillis(timeout);
+        long endAt = EnvironmentEdgeManager.currentTimeMillis() + unit.toMillis(timeout);
         synchronized(lock) {
-            long waitTime = endAt - System.currentTimeMillis();
+            long waitTime = endAt - EnvironmentEdgeManager.currentTimeMillis();
             while (this.size == 0 && waitTime > 0) {
                 this.lock.wait(waitTime);
-                waitTime = endAt - System.currentTimeMillis();
+                waitTime = endAt - EnvironmentEdgeManager.currentTimeMillis();
             }
             return poll();
         }
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/job/JobManager.java b/phoenix-core/src/main/java/org/apache/phoenix/job/JobManager.java
index 7406e46..a34efc9 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/job/JobManager.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/job/JobManager.java
@@ -41,6 +41,8 @@ import javax.annotation.Nullable;
 import org.apache.phoenix.monitoring.TaskExecutionMetricsHolder;
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
+
 /**
  * 
  * Thread pool executor that executes scans in parallel
@@ -169,17 +171,17 @@ public class JobManager<T> extends AbstractRoundRobinQueue<T> {
 
         public InstrumentedJobFutureTask(Runnable r, T t) {
             super(r, t);
-            this.taskSubmissionTime = System.currentTimeMillis();
+            this.taskSubmissionTime = EnvironmentEdgeManager.currentTimeMillis();
         }
 
         public InstrumentedJobFutureTask(Callable<T> c) {
             super(c);
-            this.taskSubmissionTime = System.currentTimeMillis();
+            this.taskSubmissionTime = EnvironmentEdgeManager.currentTimeMillis();
         }
         
         @Override
         public void run() {
-            this.taskExecutionStartTime = System.currentTimeMillis();
+            this.taskExecutionStartTime = EnvironmentEdgeManager.currentTimeMillis();
             super.run();
         }
         
@@ -264,7 +266,8 @@ public class JobManager<T> extends AbstractRoundRobinQueue<T> {
         @Override
         protected void beforeExecute(Thread worker, Runnable task) {
             InstrumentedJobFutureTask instrumentedTask = (InstrumentedJobFutureTask)task;
-            long queueWaitTime = System.currentTimeMillis() - instrumentedTask.getTaskSubmissionTime();
+            long queueWaitTime = EnvironmentEdgeManager.currentTimeMillis() -
+                instrumentedTask.getTaskSubmissionTime();
             GLOBAL_TASK_QUEUE_WAIT_TIME.update(queueWaitTime);
             TaskExecutionMetricsHolder metrics = getRequestMetric(task);
             if (metrics != null) {
@@ -279,8 +282,10 @@ public class JobManager<T> extends AbstractRoundRobinQueue<T> {
             try {
                 super.afterExecute(instrumentedTask, t);
             } finally {
-                long taskExecutionTime = System.currentTimeMillis() - instrumentedTask.getTaskExecutionStartTime();
-                long endToEndTaskTime = System.currentTimeMillis() - instrumentedTask.getTaskSubmissionTime();
+                long taskExecutionTime = EnvironmentEdgeManager.currentTimeMillis() -
+                    instrumentedTask.getTaskExecutionStartTime();
+                long endToEndTaskTime = EnvironmentEdgeManager.currentTimeMillis() -
+                    instrumentedTask.getTaskSubmissionTime();
                 TaskExecutionMetricsHolder metrics = getRequestMetric(task);
                 if (metrics != null) {
                     metrics.getTaskExecutionTime().change(taskExecutionTime);
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/OrphanViewTool.java b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/OrphanViewTool.java
index 3e999c1..dbbb003 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/OrphanViewTool.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/OrphanViewTool.java
@@ -70,6 +70,7 @@ import org.apache.phoenix.schema.MetaDataClient;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.TableNotFoundException;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -842,7 +843,7 @@ public class OrphanViewTool extends Configured implements Tool {
                 }
             }
             Properties props = new Properties();
-            long scn = System.currentTimeMillis() - ageMs;
+            long scn = EnvironmentEdgeManager.currentTimeMillis() - ageMs;
             props.setProperty("CurrentSCN", Long.toString(scn));
             connection = ConnectionUtil.getInputConnection(configuration, props);
             PhoenixConnection phoenixConnection = connection.unwrap(PhoenixConnection.class);
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexScrutinyMapperForTest.java b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexScrutinyMapperForTest.java
index 99d50ee..d2a0f35 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexScrutinyMapperForTest.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexScrutinyMapperForTest.java
@@ -28,13 +28,13 @@ public class IndexScrutinyMapperForTest extends IndexScrutinyMapper {
         long delta;
 
         public ScrutinyTestClock(long delta) {
-            initialTime = System.currentTimeMillis() + delta;
+            initialTime = EnvironmentEdgeManager.currentTimeMillis() + delta;
             this.delta = delta;
         }
 
         @Override
         public long currentTime() {
-            return System.currentTimeMillis() + delta;
+            return EnvironmentEdgeManager.currentTimeMillis() + delta;
         }
     }
 
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index 62a27d3..20308a5 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -681,7 +681,8 @@ public class ConnectionQueryServicesImpl extends DelegateQueryServices implement
             throwConnectionClosedIfNullMetaData();
             PMetaData metaData = latestMetaData;
             PTable table;
-            long endTime = System.currentTimeMillis() + DEFAULT_OUT_OF_ORDER_MUTATIONS_WAIT_TIME_MS;
+            long endTime = EnvironmentEdgeManager.currentTimeMillis() +
+                DEFAULT_OUT_OF_ORDER_MUTATIONS_WAIT_TIME_MS;
             while (true) {
                 try {
                     try {
@@ -703,7 +704,7 @@ public class ConnectionQueryServicesImpl extends DelegateQueryServices implement
                         }
                     } catch (TableNotFoundException e) {
                     }
-                    long waitTime = endTime - System.currentTimeMillis();
+                    long waitTime = endTime - EnvironmentEdgeManager.currentTimeMillis();
                     // We waited long enough - just remove the table from the cache
                     // and the next time it's used it'll be pulled over from the server.
                     if (waitTime <= 0) {
@@ -5215,7 +5216,7 @@ public class ConnectionQueryServicesImpl extends DelegateQueryServices implement
                         // iterate only up to whatever the current count is.
                         int numScanners = scannerQueue.size();
                         int renewed = 0;
-                        long start = System.currentTimeMillis();
+                        long start = EnvironmentEdgeManager.currentTimeMillis();
                         while (numScanners > 0) {
                             // It is guaranteed that this poll won't hang indefinitely because this is the
                             // only thread that removes items from the queue. Still adding a 1 ms timeout
@@ -5258,7 +5259,7 @@ public class ConnectionQueryServicesImpl extends DelegateQueryServices implement
                         }
                         if (renewed > 0) {
                             LOGGER.info("Renewed leases for " + renewed + " scanner/s in "
-                                    + (System.currentTimeMillis() - start) + " ms ");
+                                    + (EnvironmentEdgeManager.currentTimeMillis() - start) + " ms ");
                         }
                         connectionsQueue.offer(connRef);
                     }
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/tool/PhoenixCanaryTool.java b/phoenix-core/src/main/java/org/apache/phoenix/tool/PhoenixCanaryTool.java
index ed35ec1..2310d9f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/tool/PhoenixCanaryTool.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/tool/PhoenixCanaryTool.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.RetryCounter;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -80,14 +81,15 @@ public class PhoenixCanaryTool extends Configured implements Tool {
 
         private void onCreate(Connection connection) {
             result.setTimestamp(getCurrentTimestamp());
-            result.setStartTime(System.currentTimeMillis());
+            result.setStartTime(EnvironmentEdgeManager.currentTimeMillis());
             this.connection = connection;
         }
 
         abstract void onExecute() throws Exception;
 
         private void onExit() {
-            result.setExecutionTime(System.currentTimeMillis() - result.getStartTime());
+            result.setExecutionTime(EnvironmentEdgeManager.currentTimeMillis() - 
+                result.getStartTime());
         }
 
         CanaryTestResult runTest(Connection connection) {
@@ -110,7 +112,7 @@ public class PhoenixCanaryTool extends Configured implements Tool {
         void onExecute() throws Exception {
             result.setTestName("upsertTable");
             // Insert data
-            timestamp = new Timestamp(System.currentTimeMillis());
+            timestamp = new Timestamp(EnvironmentEdgeManager.currentTimeMillis());
             String stmt = "UPSERT INTO " + FQ_TABLE_NAME
                     + "(mykey, mycolumn, insert_date) VALUES (?, ?, ?)";
             PreparedStatement ps = connection.prepareStatement(stmt);
@@ -315,7 +317,7 @@ public class PhoenixCanaryTool extends Configured implements Tool {
             // Dynamically load a class for sink
             sink = (Sink) ClassLoader.getSystemClassLoader().loadClass(logSinkClass).newInstance();
 
-            long startTime = System.currentTimeMillis();
+            long startTime = EnvironmentEdgeManager.currentTimeMillis();
 
             String connectionURL = (conString != null) ? conString :
                     "jdbc:phoenix:thin:serialization=PROTOBUF;url=" + hostName + ":" + port;
@@ -348,7 +350,7 @@ public class PhoenixCanaryTool extends Configured implements Tool {
                 }
             }, timeoutVal, TimeUnit.SECONDS, true);
 
-            long estimatedTime = System.currentTimeMillis() - startTime;
+            long estimatedTime = EnvironmentEdgeManager.currentTimeMillis() - startTime;
 
             appInfo.setExecutionTime(estimatedTime);
             appInfo.setSuccessful(true);
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/CSVCommonsLoader.java b/phoenix-core/src/main/java/org/apache/phoenix/util/CSVCommonsLoader.java
index 4ade283..f946758 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/CSVCommonsLoader.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/CSVCommonsLoader.java
@@ -204,7 +204,7 @@ public class CSVCommonsLoader {
         boolean wasAutoCommit = conn.getAutoCommit();
         try {
             conn.setAutoCommit(false);
-            long start = System.currentTimeMillis();
+            long start = EnvironmentEdgeManager.currentTimeMillis();
             CsvUpsertListener upsertListener = new CsvUpsertListener(conn,
                     conn.getMutateBatchSize(), isStrict);
             CsvUpsertExecutor csvUpsertExecutor = new CsvUpsertExecutor(conn,
@@ -215,7 +215,7 @@ public class CSVCommonsLoader {
             csvUpsertExecutor.close();
 
             conn.commit();
-            double elapsedDuration = ((System.currentTimeMillis() - start) / 1000.0);
+            double elapsedDuration = ((EnvironmentEdgeManager.currentTimeMillis() - start) / 1000.0);
             System.out.println("CSV Upsert complete. " + upsertListener.getTotalUpsertCount()
                     + " rows upserted");
             System.out.println("Time: " + elapsedDuration + " sec(s)\n");
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixStopWatch.java b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixStopWatch.java
index 9001b0a..291a19c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixStopWatch.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixStopWatch.java
@@ -40,7 +40,7 @@ public class PhoenixStopWatch {
      *             if the stopwatch is already running.
      */
     public PhoenixStopWatch start() {
-        long currentTime = System.currentTimeMillis();
+        long currentTime = EnvironmentEdgeManager.currentTimeMillis();
         if (isRunning) { throw new IllegalStateException("Watch is already running"); }
         startTime = currentTime;
         isRunning = true;
@@ -56,7 +56,7 @@ public class PhoenixStopWatch {
      *             if the stopwatch is already stopped.
      */
     public PhoenixStopWatch stop() {
-        long currentTime = System.currentTimeMillis();
+        long currentTime = EnvironmentEdgeManager.currentTimeMillis();
         if (!isRunning) { throw new IllegalStateException("Watch wasn't started"); }
         elapsedTimeMs = currentTime - startTime;
         startTime = 0;
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/TimeKeeper.java b/phoenix-core/src/main/java/org/apache/phoenix/util/TimeKeeper.java
index 661390d..c4fe6ea 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/TimeKeeper.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/TimeKeeper.java
@@ -21,7 +21,7 @@ public interface TimeKeeper {
     static final TimeKeeper SYSTEM = new TimeKeeper() {
         @Override
         public long getCurrentTime() {
-            return System.currentTimeMillis();
+            return EnvironmentEdgeManager.currentTimeMillis();
         }
     };
     
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/TransactionUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/TransactionUtil.java
index dee02d1..0b5f449 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/TransactionUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/TransactionUtil.java
@@ -44,7 +44,8 @@ public class TransactionUtil {
     public static final int MAX_TRANSACTIONS_PER_MILLISECOND = 1000000;
     // Constant used to empirically determine if a timestamp is a transactional or
     // non transactional timestamp (see TxUtils.MAX_NON_TX_TIMESTAMP)
-    private static final long MAX_NON_TX_TIMESTAMP = (long) (System.currentTimeMillis() * 1.1);
+    private static final long MAX_NON_TX_TIMESTAMP =
+        (long) (EnvironmentEdgeManager.currentTimeMillis() * 1.1);
     
     private TransactionUtil() {
         
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java
index 830d702..eb65840 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java
@@ -668,7 +668,7 @@ public class UpgradeUtil {
         Table sysTable = conn.getQueryServices().getTable(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES);
         try {
             LOGGER.info("Setting SALT_BUCKETS property of SYSTEM.SEQUENCE to " + SaltingUtil.MAX_BUCKET_NUM);
-            Cell saltKV = PhoenixKeyValueUtil.newKeyValue(seqTableKey, 
+            Cell saltKV = PhoenixKeyValueUtil.newKeyValue(seqTableKey,
                     PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
                     PhoenixDatabaseMetaData.SALT_BUCKETS_BYTES,
                     MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP,
@@ -687,7 +687,7 @@ public class UpgradeUtil {
                 // This is needed as a fix for https://issues.apache.org/jira/browse/PHOENIX-1401 
                 if (oldTable.getTimeStamp() == MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_2_0) {
                     byte[] oldSeqNum = PLong.INSTANCE.toBytes(oldTable.getSequenceNumber());
-                    Cell seqNumKV = PhoenixKeyValueUtil.newKeyValue(seqTableKey, 
+                    Cell seqNumKV = PhoenixKeyValueUtil.newKeyValue(seqTableKey,
                             PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
                             PhoenixDatabaseMetaData.TABLE_SEQ_NUM_BYTES,
                             MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP,
@@ -781,7 +781,7 @@ public class UpgradeUtil {
                             if (!success) {
                                 if (!committed) { // Try to recover by setting salting back to off, as we haven't successfully committed anything
                                     // Don't use Delete here as we'd never be able to change it again at this timestamp.
-                                    Cell unsaltKV = PhoenixKeyValueUtil.newKeyValue(seqTableKey, 
+                                    Cell unsaltKV = PhoenixKeyValueUtil.newKeyValue(seqTableKey,
                                             PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
                                             PhoenixDatabaseMetaData.SALT_BUCKETS_BYTES,
                                             MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP,
@@ -1728,7 +1728,7 @@ public class UpgradeUtil {
 
     private static void upgradeDescVarLengthRowKeys(PhoenixConnection upgradeConn, PhoenixConnection globalConn, String schemaName, String tableName, boolean isTable, boolean bypassUpgrade) throws SQLException {
         TableName physicalName = TableName.valueOf(SchemaUtil.getTableName(schemaName, tableName));
-        long currentTime = System.currentTimeMillis();
+        long currentTime = EnvironmentEdgeManager.currentTimeMillis();
         String snapshotName = physicalName + "_" + currentTime;
         Admin admin = null;
         if (isTable && !bypassUpgrade) {
@@ -2439,7 +2439,7 @@ public class UpgradeUtil {
     public static final String getSysCatalogSnapshotName(long currentSystemTableTimestamp) {
         String tableString = SYSTEM_CATALOG_NAME;
         Format formatter = new SimpleDateFormat("yyyyMMddHHmmss");
-        String date = formatter.format(new Date(System.currentTimeMillis()));
+        String date = formatter.format(new Date(EnvironmentEdgeManager.currentTimeMillis()));
         String upgradingFrom = getVersion(currentSystemTableTimestamp);
         return "SNAPSHOT_" + tableString + "_" + upgradingFrom + "_TO_" + CURRENT_CLIENT_VERSION + "_" + date;
     }
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/rules/RulesApplier.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/rules/RulesApplier.java
index 8f3ec59..abfa924 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/rules/RulesApplier.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/rules/RulesApplier.java
@@ -26,6 +26,7 @@ import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.commons.lang3.RandomUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.phoenix.pherf.configuration.*;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.joda.time.format.DateTimeFormat;
@@ -58,7 +59,7 @@ public class RulesApplier {
 
 
     public RulesApplier(XMLConfigParser parser) {
-        this(parser, System.currentTimeMillis());
+        this(parser, EnvironmentEdgeManager.currentTimeMillis());
     }
 
     public RulesApplier(XMLConfigParser parser, long seed) {
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
index cda9b99..d654138 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
@@ -27,6 +27,7 @@ import org.apache.phoenix.pherf.result.DataLoadThreadTime;
 import org.apache.phoenix.pherf.result.DataLoadTimeSummary;
 import org.apache.phoenix.pherf.rules.RulesApplier;
 import org.apache.phoenix.pherf.util.GoogleChartGenerator.Node;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -312,13 +313,14 @@ public class PhoenixUtil {
             Connection conn = null;
             try {
             	for (Ddl ddl : ddls) {
-	                LOGGER.info("\nExecuting DDL:" + ddl + " on tenantId:" +tenantId);
-	                long startTime = System.currentTimeMillis();
+                    LOGGER.info("\nExecuting DDL:" + ddl + " on tenantId:" +tenantId);
+	                long startTime = EnvironmentEdgeManager.currentTimeMillis();
 	                executeStatement(ddl.toString(), conn = getConnection(tenantId));
 	                if (ddl.getStatement().toUpperCase().contains(ASYNC_KEYWORD)) {
 	                	waitForAsyncIndexToFinish(ddl.getTableName());
 	                }
-	                dataLoadTimeSummary.add(ddl.getTableName(), 0, (int)(System.currentTimeMillis() - startTime));
+	                dataLoadTimeSummary.add(ddl.getTableName(), 0,
+                        (int)(EnvironmentEdgeManager.currentTimeMillis() - startTime));
             	}
             } finally {
                 if (null != conn) {
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java
index ecc432b..9fcc38e 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java
@@ -30,6 +30,7 @@ import org.apache.phoenix.pherf.result.ResultManager;
 import org.apache.phoenix.pherf.result.RunTime;
 import org.apache.phoenix.pherf.result.ThreadTime;
 import org.apache.phoenix.pherf.rules.RulesApplier;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.phoenix.pherf.PherfConstants.GeneratePhoenixStats;
@@ -48,7 +49,7 @@ class MultiThreadedRunner implements Callable<Void> {
     private DataModelResult dataModelResult;
     private long numberOfExecutions;
     private long executionDurationInMs;
-    private static long lastResultWritten = System.currentTimeMillis() - 1000;
+    private static long lastResultWritten = EnvironmentEdgeManager.currentTimeMillis() - 1000;
     private final ResultManager resultManager;
     private final RulesApplier ruleApplier;
     private final Scenario scenario;
@@ -89,14 +90,14 @@ class MultiThreadedRunner implements Callable<Void> {
     public Void call() throws Exception {
         LOGGER.info("\n\nThread Starting " + threadName + " ; " + query.getStatement() + " for "
                 + numberOfExecutions + "times\n\n");
-        Long start = System.currentTimeMillis();
-        for (long i = numberOfExecutions; (i > 0 && ((System.currentTimeMillis() - start)
+        Long start = EnvironmentEdgeManager.currentTimeMillis();
+        for (long i = numberOfExecutions; (i > 0 && ((EnvironmentEdgeManager.currentTimeMillis() - start)
                 < executionDurationInMs)); i--) {
             synchronized (workloadExecutor) {
                 timedQuery();
-                if ((System.currentTimeMillis() - lastResultWritten) > 1000) {
+                if ((EnvironmentEdgeManager.currentTimeMillis() - lastResultWritten) > 1000) {
                     resultManager.write(dataModelResult, ruleApplier);
-                    lastResultWritten = System.currentTimeMillis();
+                    lastResultWritten = EnvironmentEdgeManager.currentTimeMillis();
                 }
             }
         }
@@ -127,7 +128,7 @@ class MultiThreadedRunner implements Callable<Void> {
         Connection conn = null;
         PreparedStatement statement = null;
         ResultSet rs = null;
-        Long start = System.currentTimeMillis();
+        Long start = EnvironmentEdgeManager.currentTimeMillis();
         Date startDate = Calendar.getInstance().getTime();
         String exception = null;
         long resultRowCount = 0;
@@ -170,7 +171,7 @@ class MultiThreadedRunner implements Callable<Void> {
             throw e;
         } finally {
             getThreadTime().getRunTimesInMs().add(new RunTime(exception, startDate, resultRowCount,
-                    (int) (System.currentTimeMillis() - start)));
+                    (int) (EnvironmentEdgeManager.currentTimeMillis() - start)));
 
             if (rs != null) rs.close();
             if (statement != null) statement.close();
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultithreadedDiffer.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultithreadedDiffer.java
index 71de762..e3480dd 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultithreadedDiffer.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultithreadedDiffer.java
@@ -27,6 +27,7 @@ import org.apache.phoenix.pherf.configuration.Query;
 import org.apache.phoenix.pherf.result.RunTime;
 import org.apache.phoenix.pherf.result.ThreadTime;
 import org.apache.phoenix.pherf.util.PhoenixUtil;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -50,14 +51,15 @@ class MultithreadedDiffer implements Callable<Void> {
      * @throws Exception
      */
     private void diffQuery() throws Exception {
-        Long start = System.currentTimeMillis();
+        Long start = EnvironmentEdgeManager.currentTimeMillis();
         Date startDate = Calendar.getInstance().getTime();
         String newCSV = queryVerifier.exportCSV(query);
         boolean verifyResult = queryVerifier.doDiff(query, newCSV);
         String explainPlan = pUtil.getExplainPlan(query);
         getThreadTime().getRunTimesInMs().add(new RunTime(
                         verifyResult == true ? PherfConstants.DIFF_PASS : PherfConstants.DIFF_FAIL,
-                        explainPlan, startDate, -1L, (int) (System.currentTimeMillis() - start)));
+                        explainPlan, startDate, -1L,
+            (int) (EnvironmentEdgeManager.currentTimeMillis() - start)));
     }
 
     /**
@@ -84,8 +86,9 @@ class MultithreadedDiffer implements Callable<Void> {
     public Void call() throws Exception {
         LOGGER.info("\n\nThread Starting " + t.getName() + " ; " + query.getStatement() + " for "
                 + numberOfExecutions + "times\n\n");
-        Long start = System.currentTimeMillis();
-        for (long i = numberOfExecutions; (i > 0 && ((System.currentTimeMillis() - start)
+        Long start = EnvironmentEdgeManager.currentTimeMillis();
+        for (long i = numberOfExecutions; (i > 0 &&
+            ((EnvironmentEdgeManager.currentTimeMillis() - start)
                 < executionDurationInMs)); i--) {
             try {
                 diffQuery();
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
index 6489ea9..a3e63a4 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
@@ -48,6 +48,7 @@ import org.apache.phoenix.pherf.rules.DataValue;
 import org.apache.phoenix.pherf.rules.RulesApplier;
 import org.apache.phoenix.pherf.util.PhoenixUtil;
 import org.apache.phoenix.pherf.util.RowCalculator;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -177,7 +178,6 @@ public class WriteWorkload implements Workload {
         };
     }
 
-
     private synchronized void exec(DataLoadTimeSummary dataLoadTimeSummary,
             DataLoadThreadTime dataLoadThreadTime, Scenario scenario) throws Exception {
         LOGGER.info("\nLoading " + scenario.getRowCount() + " rows for " + scenario.getTableName());
@@ -187,7 +187,7 @@ public class WriteWorkload implements Workload {
 
         // Write data
         List<Future<Info>> writeBatches = getBatches(dataLoadThreadTime, scenario);
-        waitForBatches(dataLoadTimeSummary, scenario, System.currentTimeMillis(), writeBatches);
+        waitForBatches(dataLoadTimeSummary, scenario, EnvironmentEdgeManager.currentTimeMillis(), writeBatches);
 
         // Update Phoenix Statistics
         if (this.generateStatistics == GeneratePhoenixStats.YES) {
@@ -243,7 +243,7 @@ public class WriteWorkload implements Workload {
             LOGGER.info("Executor (" + this.hashCode() + ") writes complete with row count ("
                     + writeInfo.getRowCount() + ") in Ms (" + writeInfo.getDuration() + ")");
         }
-        long testDuration = System.currentTimeMillis() - start;
+        long testDuration = EnvironmentEdgeManager.currentTimeMillis() - start;
         LOGGER.info("Writes completed with total row count (" + sumRows
                 + ") with total elapsed time of (" + testDuration
                 + ") ms and total CPU execution time of (" + sumDuration + ") ms");
@@ -263,19 +263,20 @@ public class WriteWorkload implements Workload {
                 PreparedStatement stmt = null;
                 try {
                     connection = pUtil.getConnection(scenario.getTenantId());
-                    long logStartTime = System.currentTimeMillis();
-                    long maxDuration = WriteWorkload.this.writeParams == null ? Long.MAX_VALUE :
-                            WriteWorkload.this.writeParams.getExecutionDurationInMs();
+                    long logStartTime = EnvironmentEdgeManager.currentTimeMillis();
+                    long maxDuration = (WriteWorkload.this.writeParams == null) ? Long.MAX_VALUE :
+                        WriteWorkload.this.writeParams.getExecutionDurationInMs();
+
                     int logPerNRows = PherfConstants.LOG_PER_NROWS;
                     String customizedLogPerNRows = connection.getClientInfo().
                             getProperty(PherfConstants.LOG_PER_NROWS_NAME);
                     if (customizedLogPerNRows!= null) {
                         logPerNRows = Integer.valueOf(customizedLogPerNRows);
                     }
-                    last = start = System.currentTimeMillis();
+                    last = start = EnvironmentEdgeManager.currentTimeMillis();
                     String sql = buildSql(columns, tableName);
                     stmt = connection.prepareStatement(sql);
-                    for (long i = rowCount; (i > 0) && ((System.currentTimeMillis() - logStartTime)
+                    for (long i = rowCount; (i > 0) && ((EnvironmentEdgeManager.currentTimeMillis() - logStartTime)
                             < maxDuration); i--) {
                         stmt = buildStatement(scenario, columns, stmt, simpleDateFormat);
                         if (useBatchApi) {
@@ -298,7 +299,7 @@ public class WriteWorkload implements Workload {
                                 }
                             }
                             connection.commit();
-                            duration = System.currentTimeMillis() - last;
+                            duration = EnvironmentEdgeManager.currentTimeMillis() - last;
                             LOGGER.info("Writer (" + Thread.currentThread().getName()
                                     + ") committed Batch. Total " + getBatchSize()
                                     + " rows for this thread (" + this.hashCode() + ") in ("
@@ -307,14 +308,14 @@ public class WriteWorkload implements Workload {
                             if (i % logPerNRows == 0 && i != 0) {
                                 dataLoadThreadTime.add(tableName,
                                     Thread.currentThread().getName(), i,
-                                    System.currentTimeMillis() - logStartTime);
+                                    EnvironmentEdgeManager.currentTimeMillis() - logStartTime);
                             }
 
-                            logStartTime = System.currentTimeMillis();
+                            logStartTime = EnvironmentEdgeManager.currentTimeMillis();
                             // Pause for throttling if configured to do so
                             Thread.sleep(threadSleepDuration);
                             // Re-compute the start time for the next batch
-                            last = System.currentTimeMillis();
+                            last = EnvironmentEdgeManager.currentTimeMillis();
                         }
                     }
                 } catch (SQLException e) {
@@ -344,7 +345,7 @@ public class WriteWorkload implements Workload {
 
                         try {
                             connection.commit();
-                            duration = System.currentTimeMillis() - start;
+                            duration = EnvironmentEdgeManager.currentTimeMillis() - start;
                             LOGGER.info("Writer ( " + Thread.currentThread().getName()
                                     + ") committed Final Batch. Duration (" + duration + ") Ms");
                             connection.close();
@@ -354,7 +355,7 @@ public class WriteWorkload implements Workload {
                         }
                     }
                 }
-                totalDuration = System.currentTimeMillis() - start;
+                totalDuration = EnvironmentEdgeManager.currentTimeMillis() - start;
                 return new Info(totalDuration, rowsCreated);
             }
         });