You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by co...@apache.org on 2015/09/11 19:59:18 UTC

[1/4] phoenix git commit: PHOENIX-2184 - Pherf - Add plugable custom result writer

Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 18ec7d6a4 -> 2af5f8cb4


PHOENIX-2184 - Pherf - Add plugable custom result writer


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f9a5a925
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f9a5a925
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f9a5a925

Branch: refs/heads/4.x-HBase-0.98
Commit: f9a5a9251bd63eed7083ba6d6ac82f570b13d426
Parents: 18ec7d6
Author: Cody Marcel <co...@gmail.com>
Authored: Mon Aug 3 15:10:58 2015 -0700
Committer: Cody Marcel <cm...@cmarcel-wsl1.internal.salesforce.com>
Committed: Fri Sep 11 10:57:04 2015 -0700

----------------------------------------------------------------------
 .../apache/phoenix/util/InstanceResolver.java   |  26 ++-
 phoenix-pherf/config/pherf.properties           |   3 +-
 .../org/apache/phoenix/pherf/DataIngestIT.java  |   7 +-
 .../apache/phoenix/pherf/ResultBaseTestIT.java  |   2 +-
 .../java/org/apache/phoenix/pherf/Pherf.java    |  14 +-
 .../apache/phoenix/pherf/PherfConstants.java    |  35 ++--
 .../phoenix/pherf/jmx/MonitorManager.java       |  14 +-
 .../phoenix/pherf/result/QueryResult.java       |  31 +--
 .../phoenix/pherf/result/ResultHandler.java     |   6 +
 .../phoenix/pherf/result/ResultManager.java     |  61 ++++--
 .../apache/phoenix/pherf/result/ResultUtil.java |  34 ++-
 .../pherf/result/file/ResultFileDetails.java    |   5 +
 .../pherf/result/impl/CSVFileResultHandler.java |  89 ++++++++
 .../pherf/result/impl/CSVResultHandler.java     | 108 ++--------
 .../pherf/result/impl/DefaultResultHandler.java |  67 ++++++
 .../pherf/result/impl/ImageResultHandler.java   |  50 ++---
 .../pherf/result/impl/XMLResultHandler.java     |  44 ++--
 .../apache/phoenix/pherf/util/PhoenixUtil.java  | 209 ++++++++++---------
 .../pherf/workload/MultiThreadedRunner.java     |   5 +-
 .../phoenix/pherf/workload/QueryExecutor.java   |  37 ++--
 .../pherf/workload/WorkloadExecutor.java        |  17 +-
 .../phoenix/pherf/workload/WriteWorkload.java   |   3 +-
 .../org/apache/phoenix/pherf/PherfTest.java     |   2 +-
 .../apache/phoenix/pherf/ResultBaseTest.java    |   5 +-
 .../org/apache/phoenix/pherf/ResultTest.java    |  97 +++++----
 25 files changed, 570 insertions(+), 401 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java b/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java
index e238c64..dd99d1e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java
@@ -17,6 +17,10 @@
  */
 package org.apache.phoenix.util;
 
+import org.apache.commons.collections.IteratorUtils;
+
+import java.util.Iterator;
+import java.util.List;
 import java.util.ServiceLoader;
 import java.util.concurrent.ConcurrentHashMap;
 
@@ -52,7 +56,27 @@ public class InstanceResolver {
         }
         return (T)obj;
     }
-    
+
+    /**
+     * Resolves all instances of a specified class and add it to the list of default implementations
+     * @param clazz Type of the instance to resolve
+     * @param defaultInstances {@link List} of instances that match the type clazz
+     * @param <T> Type of class passed
+     * @return {@link List} of instance of the specified class. Newly found instances will be added
+     *          to the existing contents of defaultInstances
+     */
+    @SuppressWarnings("unchecked")
+    public static <T> List get(Class<T> clazz, List<T> defaultInstances) {
+        Iterator<T> iterator = ServiceLoader.load(clazz).iterator();
+        if (defaultInstances != null) {
+            defaultInstances.addAll(IteratorUtils.toList(iterator));
+        } else {
+            defaultInstances = IteratorUtils.toList(iterator);
+        }
+
+        return defaultInstances;
+    }
+
     private synchronized static <T> T resolveSingleton(Class<T> clazz, T defaultInstance) {
         ServiceLoader<T> loader = ServiceLoader.load(clazz);
         // returns the first registered instance found

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/config/pherf.properties
----------------------------------------------------------------------
diff --git a/phoenix-pherf/config/pherf.properties b/phoenix-pherf/config/pherf.properties
index 1142f9b5..152e09c 100644
--- a/phoenix-pherf/config/pherf.properties
+++ b/phoenix-pherf/config/pherf.properties
@@ -30,5 +30,4 @@ pherf.default.dataloader.threadpool=0
 pherf.default.dataloader.batchsize=1000
 
 # Directory where results from a scenario run will be written
-pherf.default.results.dir=RESULTS
-
+pherf.default.results.dir=RESULTS
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
index 2b900df..b821c7b 100644
--- a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
@@ -35,8 +35,6 @@ import org.apache.phoenix.pherf.workload.WriteWorkload;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.jcabi.jdbc.JdbcSession;
-import com.jcabi.jdbc.Outcome;
 
 import java.sql.Connection;
 import java.sql.ResultSet;
@@ -99,7 +97,7 @@ public class DataIngestIT extends ResultBaseTestIT {
             }
 
             // Run some queries
-            Workload query = new QueryExecutor(parser, util, executor.getPool());
+            Workload query = new QueryExecutor(parser, util, executor);
             executor.add(query);
             executor.get();
 
@@ -119,8 +117,7 @@ public class DataIngestIT extends ResultBaseTestIT {
         dataModels.add(dataModel);
         QueryExecutor
                 qe =
-                new QueryExecutor(parser, util, executor.getPool(), dataModels, null, false,
-                        PherfConstants.RunMode.PERFORMANCE);
+                new QueryExecutor(parser, util, executor, dataModels, null, false);
         executor.add(qe);
         Scenario scenario = parser.getScenarioByName("testScenarioRW");
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
index d2c5173..1841d71 100644
--- a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
@@ -43,7 +43,7 @@ public class ResultBaseTestIT extends BaseHBaseManagedTimeIT {
     @BeforeClass public static void setUp() throws Exception {
 
         PherfConstants constants = PherfConstants.create();
-        properties = constants.getProperties(PherfConstants.PHERF_PROPERTIES);
+        properties = constants.getProperties(PherfConstants.PHERF_PROPERTIES, false);
         String dir = properties.getProperty("pherf.default.results.dir");
         String targetDir = "target/" + dir;
         properties.setProperty("pherf.default.results.dir", targetDir);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java
index 0421b6f..70fdb11 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java
@@ -87,7 +87,7 @@ public class Pherf {
     private final String dropPherfTablesRegEx;
     private final boolean executeQuerySets;
     private final boolean exportCSV;
-    private final boolean diff;
+    private final boolean isFunctional;
     private final boolean monitor;
     private final int rowCountOverride;
     private final boolean listFiles;
@@ -106,7 +106,7 @@ public class Pherf {
             System.exit(1);
         }
 
-        properties = PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES);
+        properties = PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES, false);
         dropPherfTablesRegEx = command.getOptionValue("drop", null);
         monitor = command.hasOption("m");
         String
@@ -123,7 +123,7 @@ public class Pherf {
         zookeeper = command.getOptionValue("z", "localhost");
         queryHint = command.getOptionValue("hint", null);
         exportCSV = command.hasOption("export");
-        diff = command.hasOption("diff");
+        isFunctional = command.hasOption("diff");
         listFiles = command.hasOption("listFiles");
         applySchema = !command.hasOption("disableSchemaApply");
         scenarioFile =
@@ -158,7 +158,7 @@ public class Pherf {
     public void run() throws Exception {
         MonitorManager monitorManager = null;
         List<Workload> workloads = new ArrayList<>();
-        WorkloadExecutor workloadExecutor = new WorkloadExecutor(properties, workloads);
+        WorkloadExecutor workloadExecutor = new WorkloadExecutor(properties, workloads, !isFunctional);
         try {
             if (listFiles) {
                 ResourceList list = new ResourceList(PherfConstants.RESOURCE_DATAMODEL);
@@ -224,10 +224,8 @@ public class Pherf {
                 logger.info("\nStarting to apply Execute Queries...");
 
                 workloadExecutor
-                        .add(new QueryExecutor(parser, phoenixUtil, workloadExecutor.getPool(),
-                                parser.getDataModels(), queryHint, exportCSV, diff ?
-                                PherfConstants.RunMode.FUNCTIONAL :
-                                PherfConstants.RunMode.PERFORMANCE));
+                        .add(new QueryExecutor(parser, phoenixUtil, workloadExecutor, parser.getDataModels(), queryHint,
+                                isFunctional));
 
             } else {
                 logger.info(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/PherfConstants.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/PherfConstants.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/PherfConstants.java
index 12580d4..3acf5a5 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/PherfConstants.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/PherfConstants.java
@@ -18,12 +18,18 @@
 
 package org.apache.phoenix.pherf;
 
+import java.io.IOException;
 import java.io.InputStream;
 import java.util.Properties;
 
 public class PherfConstants {
+    public enum GeneratePhoenixStats {
+        YES,
+        NO
+    }
+
     private static PherfConstants instance = null;
-    private Properties properties = null;
+    private static Properties instanceProperties = null;
 
     public static final int DEFAULT_THREAD_POOL_SIZE = 10;
     public static final int DEFAULT_BATCH_SIZE = 1000;
@@ -62,16 +68,6 @@ public class PherfConstants {
     public static final int MONITOR_FREQUENCY = 5000;
     public static final String MONITOR_FILE_NAME = "STATS_MONITOR";
 
-    public static enum GeneratePhoenixStats {
-        YES,
-        NO
-    }
-    
-    public static enum RunMode {
-        PERFORMANCE,
-        FUNCTIONAL
-    }
-
     private PherfConstants() {
     }
 
@@ -82,12 +78,19 @@ public class PherfConstants {
         return instance;
     }
 
-    public Properties getProperties(final String fileName) throws Exception {
-        if (properties != null) {
-            return properties;
+    public Properties getProperties(final String fileName, boolean getDefault) throws Exception {
+
+        if (instanceProperties == null) {
+            instanceProperties = loadProperties(fileName);
+        } else {
+            return getDefault ? loadProperties(fileName) : instanceProperties;
         }
 
-        properties = new Properties();
+        return instanceProperties;
+    }
+
+    private Properties loadProperties(String fileName) throws IOException{
+        Properties properties = new Properties();
         InputStream is = null;
         try {
             is = getClass().getClassLoader().getResourceAsStream(fileName);
@@ -109,7 +112,7 @@ public class PherfConstants {
     public String getProperty(final String fileName, String property) {
         String value = null;
         try {
-            value = getProperties(fileName).getProperty(property);
+            value = getProperties(fileName, false).getProperty(property);
         } catch (Exception e) {
             e.printStackTrace();
         }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java
index 5b39b2b..bb29902 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java
@@ -24,7 +24,7 @@ import org.apache.phoenix.pherf.jmx.monitors.Monitor;
 import org.apache.phoenix.pherf.result.Result;
 import org.apache.phoenix.pherf.result.ResultHandler;
 import org.apache.phoenix.pherf.result.file.ResultFileDetails;
-import org.apache.phoenix.pherf.result.impl.CSVResultHandler;
+import org.apache.phoenix.pherf.result.impl.CSVFileResultHandler;
 import org.apache.phoenix.pherf.workload.Workload;
 import org.apache.phoenix.util.DateUtil;
 
@@ -42,7 +42,6 @@ import java.util.concurrent.atomic.AtomicLong;
  * This class starts JMX stats for the configured monitors.
  * Monitors should be configured in MonitorDetails Enum.
  * Each stat implements {@link org.apache.phoenix.pherf.jmx.monitors.Monitor}.
- *
  * For the duration of any Pherf run, when the configured
  * {@link org.apache.phoenix.pherf.PherfConstants#MONITOR_FREQUENCY} is reached a snapshot of
  * each monitor is taken and dumped out to a log file.
@@ -83,8 +82,9 @@ public class MonitorManager implements Workload {
             }
         }
         rowCount = new AtomicLong(0);
-        this.resultHandler =
-                new CSVResultHandler(PherfConstants.MONITOR_FILE_NAME, ResultFileDetails.CSV);
+        this.resultHandler = new CSVFileResultHandler();
+        this.resultHandler.setResultFileDetails(ResultFileDetails.CSV);
+        this.resultHandler.setResultFileName(PherfConstants.MONITOR_FILE_NAME);
     }
 
     @Override public synchronized void complete() {
@@ -176,9 +176,9 @@ public class MonitorManager implements Workload {
         ResultHandler handler = null;
         try {
             if (resultHandler.isClosed()) {
-                handler =
-                        new CSVResultHandler(PherfConstants.MONITOR_FILE_NAME,
-                                ResultFileDetails.CSV);
+                handler = new CSVFileResultHandler();
+                handler.setResultFileDetails(ResultFileDetails.CSV);
+                handler.setResultFileName(PherfConstants.MONITOR_FILE_NAME);
                 return handler.read();
             } else {
                 return resultHandler.read();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/QueryResult.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/QueryResult.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/QueryResult.java
index 1a682da..c0b4bf7 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/QueryResult.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/QueryResult.java
@@ -18,8 +18,8 @@
 
 package org.apache.phoenix.pherf.result;
 
-import org.apache.phoenix.pherf.PherfConstants.RunMode;
 import org.apache.phoenix.pherf.configuration.Query;
+import org.apache.phoenix.pherf.result.file.ResultFileDetails;
 import org.apache.phoenix.util.DateUtil;
 
 import java.util.ArrayList;
@@ -27,7 +27,11 @@ import java.util.Date;
 import java.util.List;
 
 public class QueryResult extends Query {
-    private List<ThreadTime> threadTimes = new ArrayList<ThreadTime>();
+    private List<ThreadTime> threadTimes = new ArrayList<>();
+
+    public QueryResult() {
+        super();
+    }
 
     public synchronized List<ThreadTime> getThreadTimes() {
         return this.threadTimes;
@@ -47,9 +51,6 @@ public class QueryResult extends Query {
         this.setId(query.getId());
     }
 
-    @SuppressWarnings("unused") public QueryResult() {
-    }
-
     public Date getStartTime() {
         Date startTime = null;
         for (ThreadTime tt : getThreadTimes()) {
@@ -108,18 +109,10 @@ public class QueryResult extends Query {
         return rowValues;
     }
 
-    private int getRunCount() {
-        int totalRunCount = 0;
-        for (ThreadTime tt : getThreadTimes()) {
-            totalRunCount += tt.getRunCount();
-        }
-        return totalRunCount;
-    }
-
-    public List<List<ResultValue>> getCsvDetailedRepresentation(ResultUtil util, RunMode runMode) {
+    public List<List<ResultValue>> getCsvDetailedRepresentation(ResultUtil util, ResultFileDetails details) {
         List<List<ResultValue>> rows = new ArrayList<>();
         for (ThreadTime tt : getThreadTimes()) {
-            for (List<ResultValue> runTime : runMode == RunMode.PERFORMANCE ?
+            for (List<ResultValue> runTime : details.isPerformance() ?
                     tt.getCsvPerformanceRepresentation(util) :
                     tt.getCsvFunctionalRepresentation(util)) {
                 List<ResultValue> rowValues = new ArrayList<>();
@@ -134,6 +127,14 @@ public class QueryResult extends Query {
         return rows;
     }
 
+    private int getRunCount() {
+        int totalRunCount = 0;
+        for (ThreadTime tt : getThreadTimes()) {
+            totalRunCount += tt.getRunCount();
+        }
+        return totalRunCount;
+    }
+
     private String getStartTimeText() {
         return (null == this.getStartTime()) ?
                 "" :

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultHandler.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultHandler.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultHandler.java
index 5b71300..2d2acf7 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultHandler.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultHandler.java
@@ -39,4 +39,10 @@ public interface ResultHandler {
     public boolean isClosed();
 
     public ResultFileDetails getResultFileDetails();
+
+    public String getResultFileName();
+
+    public void setResultFileDetails(ResultFileDetails details);
+
+    public void setResultFileName(String resultFileName);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java
index 39d6a9c..6a79486 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java
@@ -19,33 +19,52 @@
 package org.apache.phoenix.pherf.result;
 
 import org.apache.phoenix.pherf.PherfConstants;
-import org.apache.phoenix.pherf.PherfConstants.RunMode;
 import org.apache.phoenix.pherf.result.file.ResultFileDetails;
-import org.apache.phoenix.pherf.result.impl.CSVResultHandler;
+import org.apache.phoenix.pherf.result.impl.CSVFileResultHandler;
 import org.apache.phoenix.pherf.result.impl.ImageResultHandler;
 import org.apache.phoenix.pherf.result.impl.XMLResultHandler;
+import org.apache.phoenix.util.InstanceResolver;
 
-import java.util.Arrays;
+import java.util.ArrayList;
 import java.util.List;
 
 public class ResultManager {
     private final List<ResultHandler> resultHandlers;
     private final ResultUtil util;
-    private final PherfConstants.RunMode runMode;
+    private static final List<ResultHandler> defaultHandlers;
 
-    public ResultManager(String fileNameSeed, PherfConstants.RunMode runMode) {
-        this(runMode, Arrays.asList(new XMLResultHandler(fileNameSeed, ResultFileDetails.XML),
-                new ImageResultHandler(fileNameSeed, ResultFileDetails.IMAGE),
-                new CSVResultHandler(fileNameSeed, runMode == RunMode.PERFORMANCE ?
-                        ResultFileDetails.CSV_DETAILED_PERFORMANCE :
-                        ResultFileDetails.CSV_DETAILED_FUNCTIONAL),
-                new CSVResultHandler(fileNameSeed, ResultFileDetails.CSV_AGGREGATE_PERFORMANCE)));
+    static {
+        defaultHandlers = new ArrayList<>();
+        XMLResultHandler xmlResultHandler = new XMLResultHandler();
+        xmlResultHandler.setResultFileDetails(ResultFileDetails.XML);
+        defaultHandlers.add(xmlResultHandler);
+
+        ImageResultHandler imageResultHandler = new ImageResultHandler();
+        imageResultHandler.setResultFileDetails(ResultFileDetails.IMAGE);
+        defaultHandlers.add(imageResultHandler);
+
+        ResultHandler handlerAgg = new CSVFileResultHandler();
+        handlerAgg.setResultFileDetails(ResultFileDetails.CSV_AGGREGATE_PERFORMANCE);
+        defaultHandlers.add(handlerAgg);
+
+        ResultHandler handlerDet = new CSVFileResultHandler();
+        handlerDet.setResultFileDetails(ResultFileDetails.CSV_DETAILED_PERFORMANCE);
+        defaultHandlers.add(handlerDet);
+    }
+
+    public ResultManager(String fileNameSeed) {
+        this(fileNameSeed, InstanceResolver.get(ResultHandler.class, defaultHandlers));
     }
 
-    public ResultManager(PherfConstants.RunMode runMode, List<ResultHandler> resultHandlers) {
+    public ResultManager(String fileNameSeed, List<ResultHandler> resultHandlers) {
         this.resultHandlers = resultHandlers;
         util = new ResultUtil();
-        this.runMode = runMode;
+
+        for (ResultHandler resultHandler : resultHandlers) {
+            if (resultHandler.getResultFileName() == null) {
+                resultHandler.setResultFileName(fileNameSeed);
+            }
+        }
     }
 
     /**
@@ -59,7 +78,7 @@ public class ResultManager {
             util.ensureBaseResultDirExists();
             final DataModelResult dataModelResultCopy = new DataModelResult(result);
             for (ResultHandler handler : resultHandlers) {
-                util.write(handler, dataModelResultCopy, runMode);
+                util.write(handler, dataModelResultCopy);
             }
         } finally {
             for (ResultHandler handler : resultHandlers) {
@@ -84,13 +103,13 @@ public class ResultManager {
     public synchronized void write(List<DataModelResult> dataModelResults) throws Exception {
         util.ensureBaseResultDirExists();
 
-        CSVResultHandler detailsCSVWriter = null;
+        CSVFileResultHandler detailsCSVWriter = null;
         try {
-            detailsCSVWriter =
-                    new CSVResultHandler(PherfConstants.COMBINED_FILE_NAME,
-                            ResultFileDetails.CSV_DETAILED_PERFORMANCE);
+            detailsCSVWriter = new CSVFileResultHandler();
+            detailsCSVWriter.setResultFileDetails(ResultFileDetails.CSV_DETAILED_PERFORMANCE);
+            detailsCSVWriter.setResultFileName(PherfConstants.COMBINED_FILE_NAME);
             for (DataModelResult dataModelResult : dataModelResults) {
-                util.write(detailsCSVWriter, dataModelResult, runMode);
+                util.write(detailsCSVWriter, dataModelResult);
             }
         } finally {
             if (detailsCSVWriter != null) {
@@ -99,4 +118,8 @@ public class ResultManager {
             }
         }
     }
+
+    public List<ResultHandler> getResultHandlers() {
+        return resultHandlers;
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java
index 07dfa86..9a589f5 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java
@@ -19,8 +19,8 @@
 package org.apache.phoenix.pherf.result;
 
 import org.apache.phoenix.pherf.PherfConstants;
-import org.apache.phoenix.pherf.PherfConstants.RunMode;
 import org.apache.phoenix.pherf.result.file.ResultFileDetails;
+import org.apache.phoenix.pherf.result.impl.CSVFileResultHandler;
 import org.apache.phoenix.pherf.result.impl.CSVResultHandler;
 import org.apache.phoenix.pherf.util.PhoenixUtil;
 
@@ -50,7 +50,10 @@ public class ResultUtil {
         CSVResultHandler writer = null;
         try {
             if (!dataLoadThreadTime.getThreadTime().isEmpty()) {
-                writer = new CSVResultHandler("Data_Load_Details", ResultFileDetails.CSV);
+                writer = new CSVFileResultHandler();
+                writer.setResultFileName("Data_Load_Details");
+                writer.setResultFileDetails(ResultFileDetails.CSV);
+
                 for (WriteThreadTime writeThreadTime : dataLoadThreadTime.getThreadTime()) {
                     List<ResultValue> rowValues = new ArrayList<>();
                     rowValues.add(new ResultValue(PhoenixUtil.getZookeeper()));
@@ -82,7 +85,10 @@ public class ResultUtil {
         CSVResultHandler writer = null;
         ResultFileDetails resultFileDetails = ResultFileDetails.CSV_AGGREGATE_DATA_LOAD;
         try {
-            writer = new CSVResultHandler("Data_Load_Summary", ResultFileDetails.CSV);
+            writer = new CSVFileResultHandler();
+            writer.setResultFileName("Data_Load_Summary");
+            writer.setResultFileDetails(resultFileDetails);
+
             for (TableLoadTime loadTime : dataLoadTime.getTableLoadTime()) {
                 List<ResultValue> rowValues = new ArrayList<>();
                 rowValues.add(new ResultValue(PhoenixUtil.getZookeeper()));
@@ -101,8 +107,8 @@ public class ResultUtil {
         }
     }
 
-    public synchronized void write(ResultHandler resultHandler, DataModelResult dataModelResult,
-            RunMode runMode) throws Exception {
+    public synchronized void write(ResultHandler resultHandler, DataModelResult dataModelResult)
+            throws Exception {
         ResultFileDetails resultFileDetails = resultHandler.getResultFileDetails();
         switch (resultFileDetails) {
         case CSV_AGGREGATE_PERFORMANCE:
@@ -110,7 +116,7 @@ public class ResultUtil {
         case CSV_DETAILED_FUNCTIONAL:
             List<List<ResultValue>>
                     rowDetails =
-                    getCSVResults(dataModelResult, resultFileDetails, runMode);
+                    getCSVResults(dataModelResult, resultFileDetails);
             for (List<ResultValue> row : rowDetails) {
                 Result
                         result =
@@ -159,8 +165,20 @@ public class ResultUtil {
         return str;
     }
 
+    /**
+     * Used by custom ResultWriter out Pherf's normal code base
+     *
+     * @return Header field as a {@link Result}
+     */
+    @SuppressWarnings("unused")
+    public Result getCSVHeaderAsResult(String row) {
+        List<ResultValue> resultValues = new ArrayList<>();
+        resultValues.add(new ResultValue(row));
+        return new Result(ResultFileDetails.CSV, row, resultValues);
+    }
+
     private List<List<ResultValue>> getCSVResults(DataModelResult dataModelResult,
-            ResultFileDetails resultFileDetails, RunMode runMode) {
+            ResultFileDetails resultFileDetails) {
         List<List<ResultValue>> rowList = new ArrayList<>();
 
         for (ScenarioResult result : dataModelResult.getScenarioResult()) {
@@ -175,7 +193,7 @@ public class ResultUtil {
                     case CSV_DETAILED_FUNCTIONAL:
                         List<List<ResultValue>>
                                 detailedRows =
-                                queryResult.getCsvDetailedRepresentation(this, runMode);
+                                queryResult.getCsvDetailedRepresentation(this, resultFileDetails);
                         for (List<ResultValue> detailedRowList : detailedRows) {
                             List<ResultValue> valueList = new ArrayList<>();
                             valueList.add(new ResultValue(convertNull(result.getTableName())));

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/file/ResultFileDetails.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/file/ResultFileDetails.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/file/ResultFileDetails.java
index 63b6284..a85f830 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/file/ResultFileDetails.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/file/ResultFileDetails.java
@@ -43,4 +43,9 @@ public enum ResultFileDetails {
     public Header getHeader() {
         return header;
     }
+
+    public boolean isPerformance() {
+        return (this == ResultFileDetails.CSV_AGGREGATE_PERFORMANCE)
+                || (this == CSV_DETAILED_PERFORMANCE);
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVFileResultHandler.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVFileResultHandler.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVFileResultHandler.java
new file mode 100644
index 0000000..8ddae67
--- /dev/null
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVFileResultHandler.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   "License"); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing, software
+ *   distributed under the License is distributed on an "AS IS" BASIS,
+ *   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *   See the License for the specific language governing permissions and
+ *   limitations under the License.
+ */
+
+package org.apache.phoenix.pherf.result.impl;
+
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVParser;
+import org.apache.commons.csv.CSVPrinter;
+import org.apache.commons.csv.CSVRecord;
+import org.apache.phoenix.pherf.PherfConstants;
+import org.apache.phoenix.pherf.result.Result;
+import org.apache.phoenix.pherf.result.ResultValue;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.List;
+
+public class CSVFileResultHandler extends CSVResultHandler {
+
+    public CSVFileResultHandler() {
+        super();
+    }
+
+    @Override
+    public synchronized void write(Result result) throws IOException{
+        util.ensureBaseResultDirExists();
+        open(result.getHeader());
+        super.write(result);
+    }
+
+    public synchronized List<Result> read() throws IOException {
+        CSVParser parser = null;
+        util.ensureBaseResultDirExists();
+        try {
+            File file = new File(resultFileName);
+            parser = CSVParser.parse(file, Charset.defaultCharset(), CSVFormat.DEFAULT);
+            List<CSVRecord> records = parser.getRecords();
+            List<Result> results = new ArrayList<>();
+            String header = null;
+            for (CSVRecord record : records) {
+
+                // First record is the CSV Header
+                if (record.getRecordNumber() == 1) {
+                    header = record.toString();
+                    continue;
+                }
+                List<ResultValue> resultValues = new ArrayList<>();
+                for (String val : record.toString().split(PherfConstants.RESULT_FILE_DELIMETER)) {
+                    resultValues.add(new ResultValue(val));
+                }
+                Result result = new Result(resultFileDetails, header, resultValues);
+                results.add(result);
+            }
+            return results;
+        } finally {
+            parser.close();
+        }
+    }
+
+    @Override
+    protected void open(String header) throws IOException {
+        // Check if already so we only open one writer
+        if (csvPrinter != null) {
+            return;
+        }
+        csvPrinter = new CSVPrinter(new PrintWriter(resultFileName), CSVFormat.DEFAULT);
+        Object[] records = header.split(PherfConstants.RESULT_FILE_DELIMETER);
+        csvPrinter.printRecord(records);
+        isClosed = false;
+    }
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVResultHandler.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVResultHandler.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVResultHandler.java
index e69f600..41fadb1 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVResultHandler.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVResultHandler.java
@@ -15,72 +15,38 @@
  *   See the License for the specific language governing permissions and
  *   limitations under the License.
  */
-
 package org.apache.phoenix.pherf.result.impl;
 
-import org.apache.commons.csv.CSVFormat;
-import org.apache.commons.csv.CSVParser;
 import org.apache.commons.csv.CSVPrinter;
-import org.apache.commons.csv.CSVRecord;
-import org.apache.phoenix.pherf.PherfConstants;
 import org.apache.phoenix.pherf.result.Result;
-import org.apache.phoenix.pherf.result.ResultHandler;
 import org.apache.phoenix.pherf.result.ResultUtil;
-import org.apache.phoenix.pherf.result.ResultValue;
-import org.apache.phoenix.pherf.result.file.ResultFileDetails;
 
-import java.io.File;
 import java.io.IOException;
-import java.io.PrintWriter;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * TODO Doc this class. Note that each instance that has a non unique file name will overwrite the last
- */
-public class CSVResultHandler implements ResultHandler {
-
-    private final ResultUtil util;
-    private final ResultFileDetails resultFileDetails;
-    private final String resultFileName;
-    private volatile CSVPrinter csvPrinter = null;
-    private volatile boolean isClosed = true;
 
-    public CSVResultHandler(String resultFileName, ResultFileDetails resultFileDetails) {
-        this(resultFileName, resultFileDetails, true);
-    }
+public abstract class CSVResultHandler extends DefaultResultHandler {
+    protected final ResultUtil util;
+    protected volatile CSVPrinter csvPrinter = null;
+    protected volatile boolean isClosed = true;
 
-    public CSVResultHandler(String resultFileName, ResultFileDetails resultFileDetails,
-            boolean generateFullFileName) {
+    public CSVResultHandler() {
         this.util = new ResultUtil();
-        PherfConstants constants = PherfConstants.create();
-        String resultDir = constants.getProperty("pherf.default.results.dir");
-
-        this.resultFileName =
-                generateFullFileName ?
-                        resultDir + PherfConstants.PATH_SEPARATOR + PherfConstants.RESULT_PREFIX
-                                + resultFileName + util.getSuffix() + resultFileDetails
-                                .getExtension().toString() :
-                        resultFileName;
-        this.resultFileDetails = resultFileDetails;
     }
 
-    @Override public synchronized void write(Result result) throws IOException {
-        util.ensureBaseResultDirExists();
-
-        open(result);
+    @Override
+    public synchronized void write(Result result) throws IOException {
         csvPrinter.printRecord(result.getResultValues());
         flush();
     }
 
-    @Override public synchronized void flush() throws IOException {
+    @Override
+    public synchronized void flush() throws IOException {
         if (csvPrinter != null) {
             csvPrinter.flush();
         }
     }
 
-    @Override public synchronized void close() throws IOException {
+    @Override
+    public synchronized void close() throws IOException {
         if (csvPrinter != null) {
             csvPrinter.flush();
             csvPrinter.close();
@@ -88,51 +54,15 @@ public class CSVResultHandler implements ResultHandler {
         }
     }
 
-    @Override public synchronized List<Result> read() throws IOException {
-        CSVParser parser = null;
-        util.ensureBaseResultDirExists();
-        try {
-            File file = new File(resultFileName);
-            parser = CSVParser.parse(file, Charset.defaultCharset(), CSVFormat.DEFAULT);
-            List<CSVRecord> records = parser.getRecords();
-            List<Result> results = new ArrayList<>();
-            String header = null;
-            for (CSVRecord record : records) {
-
-                // First record is the CSV Header
-                if (record.getRecordNumber() == 1) {
-                    header = record.toString();
-                    continue;
-                }
-                List<ResultValue> resultValues = new ArrayList<>();
-                for (String val : record.toString().split(PherfConstants.RESULT_FILE_DELIMETER)) {
-                    resultValues.add(new ResultValue(val));
-                }
-                Result result = new Result(resultFileDetails, header, resultValues);
-                results.add(result);
-            }
-            return results;
-        } finally {
-            parser.close();
-        }
-    }
-
-    private void open(Result result) throws IOException {
-        // Check if already so we only open one writer
-        if (csvPrinter != null) {
-            return;
-        }
-        csvPrinter = new CSVPrinter(new PrintWriter(resultFileName), CSVFormat.DEFAULT);
-        Object[] records = result.getHeader().split(PherfConstants.RESULT_FILE_DELIMETER);
-        csvPrinter.printRecord(records);
-        isClosed = false;
-    }
-
-    @Override public synchronized boolean isClosed() {
+    @Override
+    public synchronized boolean isClosed() {
         return isClosed;
     }
 
-    @Override public ResultFileDetails getResultFileDetails() {
-        return resultFileDetails;
-    }
+    /**
+     * This method is meant to open the connection to the target CSV location
+     * @param header {@link String} Comma separated list of header values for CSV
+     * @throws IOException
+     */
+    protected abstract void open(String header) throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/DefaultResultHandler.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/DefaultResultHandler.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/DefaultResultHandler.java
new file mode 100644
index 0000000..22fb625
--- /dev/null
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/DefaultResultHandler.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   "License"); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing, software
+ *   distributed under the License is distributed on an "AS IS" BASIS,
+ *   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *   See the License for the specific language governing permissions and
+ *   limitations under the License.
+ */
+package org.apache.phoenix.pherf.result.impl;
+
+import org.apache.phoenix.pherf.PherfConstants;
+import org.apache.phoenix.pherf.result.ResultHandler;
+import org.apache.phoenix.pherf.result.ResultUtil;
+import org.apache.phoenix.pherf.result.file.ResultFileDetails;
+
+public abstract class DefaultResultHandler implements ResultHandler{
+    protected String resultFileName;
+    protected ResultFileDetails resultFileDetails;
+    protected final String resultDir;
+    protected final ResultUtil util;
+
+    public DefaultResultHandler() {
+        util = new ResultUtil();
+        PherfConstants constants = PherfConstants.create();
+        this.resultDir = constants.getProperty("pherf.default.results.dir");
+    }
+
+    /**
+     * {@link DefaultResultHandler#setResultFileDetails(ResultFileDetails)} Must be called prior to
+     * setting the file name. Otherwise you will get a NPE.
+     *
+     * TODO Change this so NPE is not possible. Needs a bit of refactoring here
+     *
+     * @param resultFileName Base name of file
+     */
+    @Override
+    public void setResultFileName(String resultFileName) {
+        this.resultFileName =
+                resultDir + PherfConstants.PATH_SEPARATOR + PherfConstants.RESULT_PREFIX
+                        + resultFileName + util.getSuffix() + getResultFileDetails()
+                        .getExtension().toString();
+    }
+
+    @Override
+    public void setResultFileDetails(ResultFileDetails details) {
+        this.resultFileDetails = details;
+    }
+
+    @Override
+    public String getResultFileName() {
+        return resultFileName;
+    }
+
+    @Override
+    public ResultFileDetails getResultFileDetails() {
+        return resultFileDetails;
+    }
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/ImageResultHandler.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/ImageResultHandler.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/ImageResultHandler.java
index 5c3eac1..f25ce4e 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/ImageResultHandler.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/ImageResultHandler.java
@@ -18,7 +18,6 @@
 
 package org.apache.phoenix.pherf.result.impl;
 
-import org.apache.phoenix.pherf.PherfConstants;
 import org.apache.phoenix.pherf.result.*;
 import org.apache.phoenix.pherf.result.file.ResultFileDetails;
 import org.jfree.chart.ChartFactory;
@@ -34,30 +33,14 @@ import java.io.File;
 import java.io.IOException;
 import java.util.List;
 
-public class ImageResultHandler implements ResultHandler {
-    private final String resultFileName;
-    private final ResultFileDetails resultFileDetails;
+public class ImageResultHandler extends DefaultResultHandler{
 
-    public ImageResultHandler(String resultFileName, ResultFileDetails resultFileDetails) {
-        this(resultFileName, resultFileDetails, true);
+    public ImageResultHandler() {
+        super();
     }
 
-    public ImageResultHandler(String resultFileName, ResultFileDetails resultFileDetails,
-            boolean generateFullFileName) {
-        ResultUtil util = new ResultUtil();
-        PherfConstants constants = PherfConstants.create();
-        String resultDir = constants.getProperty("pherf.default.results.dir");
-
-        this.resultFileName =
-                generateFullFileName ?
-                        resultDir + PherfConstants.PATH_SEPARATOR + PherfConstants.RESULT_PREFIX
-                                + resultFileName + util.getSuffix() + resultFileDetails
-                                .getExtension().toString() :
-                        resultFileName;
-        this.resultFileDetails = resultFileDetails;
-    }
-
-    @Override public synchronized void write(Result result) throws Exception {
+    @Override
+    public synchronized void write(Result result) throws Exception {
         TimeSeriesCollection timeSeriesCollection = new TimeSeriesCollection();
         int rowCount = 0;
         int maxLegendCount = 20;
@@ -112,23 +95,32 @@ public class ImageResultHandler implements ResultHandler {
 
     }
 
-    @Override public synchronized void flush() throws Exception {
+    @Override
+    public synchronized void flush() throws Exception {
 
     }
 
-    @Override public synchronized void close() throws Exception {
+    @Override
+    public synchronized void close() throws Exception {
 
     }
 
-    @Override public List<Result> read() throws Exception {
+    @Override
+    public List<Result> read() throws Exception {
         return null;
     }
 
-    @Override public boolean isClosed() {
-        return false;
+    /**
+     * File is never left open. This impl always overwrites existing file.
+     * @return
+     */
+    @Override
+    public boolean isClosed() {
+        return true;
     }
 
-    @Override public ResultFileDetails getResultFileDetails() {
-        return resultFileDetails;
+    @Override
+    public void setResultFileDetails(ResultFileDetails details) {
+        super.setResultFileDetails(ResultFileDetails.IMAGE);
     }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/XMLResultHandler.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/XMLResultHandler.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/XMLResultHandler.java
index 009ae21..990c9be 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/XMLResultHandler.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/XMLResultHandler.java
@@ -18,7 +18,6 @@
 
 package org.apache.phoenix.pherf.result.impl;
 
-import org.apache.phoenix.pherf.PherfConstants;
 import org.apache.phoenix.pherf.result.*;
 import org.apache.phoenix.pherf.result.file.ResultFileDetails;
 
@@ -31,30 +30,14 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-public class XMLResultHandler implements ResultHandler {
-    private final String resultFileName;
-    private final ResultFileDetails resultFileDetails;
+public class XMLResultHandler extends DefaultResultHandler{
 
-    public XMLResultHandler(String resultFileName, ResultFileDetails resultFileDetails) {
-        this(resultFileName, resultFileDetails, true);
+    public XMLResultHandler() {
+        super();
     }
 
-    public XMLResultHandler(String resultFileName, ResultFileDetails resultFileDetails,
-            boolean generateFullFileName) {
-        ResultUtil util = new ResultUtil();
-        PherfConstants constants = PherfConstants.create();
-        String resultDir = constants.getProperty("pherf.default.results.dir");
-
-        this.resultFileName =
-                generateFullFileName ?
-                        resultDir + PherfConstants.PATH_SEPARATOR + PherfConstants.RESULT_PREFIX
-                                + resultFileName + util.getSuffix() + resultFileDetails
-                                .getExtension().toString() :
-                        resultFileName;
-        this.resultFileDetails = resultFileDetails;
-    }
-
-    @Override public synchronized void write(Result result) throws Exception {
+    @Override
+    public synchronized void write(Result result) throws Exception {
         FileOutputStream os = null;
         JAXBContext jaxbContext = JAXBContext.newInstance(DataModelResult.class);
         Marshaller jaxbMarshaller = jaxbContext.createMarshaller();
@@ -71,15 +54,18 @@ public class XMLResultHandler implements ResultHandler {
         }
     }
 
-    @Override public synchronized void flush() throws IOException {
+    @Override
+    public synchronized void flush() throws IOException {
         return;
     }
 
-    @Override public synchronized void close() throws IOException {
+    @Override
+    public synchronized void close() throws IOException {
         return;
     }
 
-    @Override public synchronized List<Result> read() throws Exception {
+    @Override
+    public synchronized List<Result> read() throws Exception {
 
         JAXBContext jaxbContext = JAXBContext.newInstance(DataModelResult.class);
         Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller();
@@ -91,11 +77,13 @@ public class XMLResultHandler implements ResultHandler {
         return results;
     }
 
-    @Override public boolean isClosed() {
+    @Override
+    public boolean isClosed() {
         return true;
     }
 
-    @Override public ResultFileDetails getResultFileDetails() {
-        return resultFileDetails;
+    @Override
+    public void setResultFileDetails(ResultFileDetails details) {
+        super.setResultFileDetails(ResultFileDetails.XML);
     }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
index 19b6bd2..fad06a1 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
@@ -18,34 +18,26 @@
 
 package org.apache.phoenix.pherf.util;
 
-import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_NAME;
-import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SCHEM;
+import org.apache.phoenix.pherf.PherfConstants;
+import org.apache.phoenix.pherf.configuration.*;
+import org.apache.phoenix.pherf.jmx.MonitorManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import java.sql.Connection;
-import java.sql.DatabaseMetaData;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
+import java.sql.*;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Properties;
 
-import org.apache.phoenix.pherf.PherfConstants;
-import org.apache.phoenix.pherf.configuration.Column;
-import org.apache.phoenix.pherf.configuration.DataTypeMapping;
-import org.apache.phoenix.pherf.configuration.Query;
-import org.apache.phoenix.pherf.configuration.QuerySet;
-import org.apache.phoenix.pherf.configuration.Scenario;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_NAME;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SCHEM;
 
 // TODO This class needs to be cleanup up a bit. I just wanted to get an initial placeholder in.
 public class PhoenixUtil {
-	private static final Logger logger = LoggerFactory.getLogger(PhoenixUtil.class);
-	private static String zookeeper;
-	private static int rowCountOverride = 0;
+    private static final Logger logger = LoggerFactory.getLogger(PhoenixUtil.class);
+    private static String zookeeper;
+    private static int rowCountOverride = 0;
     private boolean testEnabled;
     private static PhoenixUtil instance;
 
@@ -66,10 +58,10 @@ public class PhoenixUtil {
         return instance;
     }
 
-    public Connection getConnection() throws Exception{
-    	return getConnection(null);
+    public Connection getConnection() throws Exception {
+        return getConnection(null);
     }
-	
+
     public Connection getConnection(String tenantId) throws Exception {
         return getConnection(tenantId, testEnabled);
     }
@@ -104,15 +96,17 @@ public class PhoenixUtil {
 
     /**
      * Execute statement
+     *
      * @param sql
      * @param connection
      * @return
      * @throws SQLException
      */
-    public boolean executeStatementThrowException(String sql, Connection connection) throws SQLException {
-    	boolean result = false;
-    	PreparedStatement preparedStatement = null;
-    	try {
+    public boolean executeStatementThrowException(String sql, Connection connection)
+            throws SQLException {
+        boolean result = false;
+        PreparedStatement preparedStatement = null;
+        try {
             preparedStatement = connection.prepareStatement(sql);
             result = preparedStatement.execute();
             connection.commit();
@@ -121,9 +115,9 @@ public class PhoenixUtil {
         }
         return result;
     }
-    
+
     public boolean executeStatement(String sql, Connection connection) {
-    	boolean result = false;
+        boolean result = false;
         PreparedStatement preparedStatement = null;
         try {
             preparedStatement = connection.prepareStatement(sql);
@@ -143,7 +137,7 @@ public class PhoenixUtil {
 
     @SuppressWarnings("unused")
     public boolean executeStatement(PreparedStatement preparedStatement, Connection connection) {
-    	boolean result = false;
+        boolean result = false;
         try {
             result = preparedStatement.execute();
             connection.commit();
@@ -154,72 +148,75 @@ public class PhoenixUtil {
     }
 
     /**
-     * Delete existing tables with schema name set as {@link PherfConstants#PHERF_SCHEMA_NAME} with regex comparison 
-     * 
+     * Delete existing tables with schema name set as {@link PherfConstants#PHERF_SCHEMA_NAME} with regex comparison
+     *
      * @param regexMatch
      * @throws SQLException
      * @throws Exception
      */
     public void deleteTables(String regexMatch) throws Exception {
-    	regexMatch = regexMatch.toUpperCase().replace("ALL", ".*");
-    	Connection conn = getConnection();
-    	try {
-        	ResultSet resultSet = getTableMetaData(PherfConstants.PHERF_SCHEMA_NAME, null, conn);
-			while (resultSet.next()) {
-				String tableName = resultSet.getString(TABLE_SCHEM) == null ? resultSet
-						.getString(TABLE_NAME) : resultSet
-						.getString(TABLE_SCHEM)
-						+ "."
-						+ resultSet.getString(TABLE_NAME);
-				if (tableName.matches(regexMatch)) {
-					logger.info("\nDropping " + tableName);
-					try {
-						executeStatementThrowException("DROP TABLE "
-								+ tableName + " CASCADE", conn);
-					} catch (org.apache.phoenix.schema.TableNotFoundException tnf) {
-						logger.error("Table might be already be deleted via cascade. Schema: "
-								+ tnf.getSchemaName()
-								+ " Table: "
-								+ tnf.getTableName());
-					}
-				}
-			}
-    	} finally {
-    		conn.close();
-    	}
+        regexMatch = regexMatch.toUpperCase().replace("ALL", ".*");
+        Connection conn = getConnection();
+        try {
+            ResultSet resultSet = getTableMetaData(PherfConstants.PHERF_SCHEMA_NAME, null, conn);
+            while (resultSet.next()) {
+                String tableName = resultSet.getString(TABLE_SCHEM) == null ? resultSet
+                        .getString(TABLE_NAME) : resultSet
+                        .getString(TABLE_SCHEM)
+                        + "."
+                        + resultSet.getString(TABLE_NAME);
+                if (tableName.matches(regexMatch)) {
+                    logger.info("\nDropping " + tableName);
+                    try {
+                        executeStatementThrowException("DROP TABLE "
+                                + tableName + " CASCADE", conn);
+                    } catch (org.apache.phoenix.schema.TableNotFoundException tnf) {
+                        logger.error("Table might be already be deleted via cascade. Schema: "
+                                + tnf.getSchemaName()
+                                + " Table: "
+                                + tnf.getTableName());
+                    }
+                }
+            }
+        } finally {
+            conn.close();
+        }
     }
-    
-    public ResultSet getTableMetaData(String schemaName, String tableName, Connection connection) throws SQLException {
-    	DatabaseMetaData dbmd = connection.getMetaData();
-    	ResultSet resultSet = dbmd.getTables(null, schemaName, tableName, null);
-    	return resultSet;
+
+    public ResultSet getTableMetaData(String schemaName, String tableName, Connection connection)
+            throws SQLException {
+        DatabaseMetaData dbmd = connection.getMetaData();
+        ResultSet resultSet = dbmd.getTables(null, schemaName, tableName, null);
+        return resultSet;
     }
-    
-    public ResultSet getColumnsMetaData(String schemaName, String tableName, Connection connection) throws SQLException {
-    	DatabaseMetaData dbmd = connection.getMetaData();
-    	ResultSet resultSet = dbmd.getColumns(null, schemaName, tableName, null);
-    	return resultSet;
+
+    public ResultSet getColumnsMetaData(String schemaName, String tableName, Connection connection)
+            throws SQLException {
+        DatabaseMetaData dbmd = connection.getMetaData();
+        ResultSet resultSet = dbmd.getColumns(null, schemaName, tableName, null);
+        return resultSet;
     }
-    
-    public synchronized List<Column> getColumnsFromPhoenix(String schemaName, String tableName, Connection connection) throws SQLException {
-    	List<Column> columnList = new ArrayList<Column>();
-    	ResultSet resultSet = null;
-    	try {
-    		resultSet = getColumnsMetaData(schemaName, tableName, connection);
-    		while (resultSet.next()) {
-    			Column column = new Column();
-    	        column.setName(resultSet.getString("COLUMN_NAME"));
-    	        column.setType(DataTypeMapping.valueOf(resultSet.getString("TYPE_NAME")));
-    	        column.setLength(resultSet.getInt("COLUMN_SIZE"));
-    	        columnList.add(column);
-   	        }
-    	} finally {
-    		if (null != resultSet) { 
-    			resultSet.close();
-    		}
-    	}
-    	
-    	return Collections.unmodifiableList(columnList);
+
+    public synchronized List<Column> getColumnsFromPhoenix(String schemaName, String tableName,
+            Connection connection) throws SQLException {
+        List<Column> columnList = new ArrayList<Column>();
+        ResultSet resultSet = null;
+        try {
+            resultSet = getColumnsMetaData(schemaName, tableName, connection);
+            while (resultSet.next()) {
+                Column column = new Column();
+                column.setName(resultSet.getString("COLUMN_NAME"));
+                column.setType(DataTypeMapping.valueOf(resultSet.getString("TYPE_NAME")));
+                column.setLength(resultSet.getInt("COLUMN_SIZE"));
+                columnList.add(column);
+            }
+        } finally {
+            if (null != resultSet) {
+                resultSet.close();
+            }
+        }
+
+        return Collections.unmodifiableList(columnList);
     }
 
     /**
@@ -248,22 +245,22 @@ public class PhoenixUtil {
     }
 
     public static String getZookeeper() {
-		return zookeeper;
-	}
+        return zookeeper;
+    }
+
+    public static void setZookeeper(String zookeeper) {
+        logger.info("Setting zookeeper: " + zookeeper);
+        PhoenixUtil.zookeeper = zookeeper;
+    }
+
+    public static int getRowCountOverride() {
+        return rowCountOverride;
+    }
+
+    public static void setRowCountOverride(int rowCountOverride) {
+        PhoenixUtil.rowCountOverride = rowCountOverride;
+    }
 
-	public static void setZookeeper(String zookeeper) {
-		logger.info("Setting zookeeper: " + zookeeper);
-		PhoenixUtil.zookeeper = zookeeper;
-	}
-	
-	public static int getRowCountOverride() {
-		return rowCountOverride;
-	}
-	
-	public static void setRowCountOverride(int rowCountOverride) {
-		PhoenixUtil.rowCountOverride = rowCountOverride;
-	}
-	
     /**
      * Update Phoenix table stats
      *
@@ -274,4 +271,12 @@ public class PhoenixUtil {
         logger.info("Updating stats for " + tableName);
         executeStatement("UPDATE STATISTICS " + tableName, scenario);
     }
+
+    public MonitorManager loadCustomMonitors(MonitorManager manager) throws Exception {
+        Properties
+                properties =
+                PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES, false);
+
+        return manager;
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java
index efb3da9..524724c 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java
@@ -24,8 +24,6 @@ import java.sql.ResultSet;
 import java.util.Calendar;
 import java.util.Date;
 
-import org.apache.phoenix.pherf.PherfConstants.RunMode;
-
 import org.apache.phoenix.pherf.result.DataModelResult;
 import org.apache.phoenix.pherf.result.ResultManager;
 import org.apache.phoenix.pherf.result.RunTime;
@@ -66,12 +64,13 @@ class MultiThreadedRunner implements Runnable {
         this.dataModelResult = dataModelResult;
         this.numberOfExecutions = numberOfExecutions;
         this.executionDurationInMs = executionDurationInMs;
-        this.resultManager = new ResultManager(dataModelResult.getName(), RunMode.PERFORMANCE);
+        this.resultManager = new ResultManager(dataModelResult.getName());
     }
 
     /**
      * Executes run for a minimum of number of execution or execution duration
      */
+    @Override
     public void run() {
         logger.info("\n\nThread Starting " + threadName + " ; " + query.getStatement() + " for "
                 + numberOfExecutions + "times\n\n");

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java
index 4ab76c8..5a7c49f 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java
@@ -21,7 +21,6 @@ package org.apache.phoenix.pherf.workload;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.phoenix.pherf.PherfConstants.GeneratePhoenixStats;
-import org.apache.phoenix.pherf.PherfConstants.RunMode;
 import org.apache.phoenix.pherf.configuration.*;
 import org.apache.phoenix.pherf.result.*;
 import org.apache.phoenix.pherf.util.PhoenixUtil;
@@ -32,43 +31,42 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Future;
 
 public class QueryExecutor implements Workload {
     private static final Logger logger = LoggerFactory.getLogger(QueryExecutor.class);
     private List<DataModel> dataModels;
     private String queryHint;
-    private final RunMode runMode;
     private final boolean exportCSV;
-    private final ExecutorService pool;
     private final XMLConfigParser parser;
     private final PhoenixUtil util;
+    private final WorkloadExecutor workloadExecutor;
 
-    public QueryExecutor(XMLConfigParser parser, PhoenixUtil util, ExecutorService pool) {
-        this(parser, util, pool, parser.getDataModels(), null, false, RunMode.PERFORMANCE);
+    public QueryExecutor(XMLConfigParser parser, PhoenixUtil util,
+            WorkloadExecutor workloadExecutor) {
+        this(parser, util, workloadExecutor, parser.getDataModels(), null, false);
     }
 
-    public QueryExecutor(XMLConfigParser parser, PhoenixUtil util, ExecutorService pool,
-            List<DataModel> dataModels, String queryHint, boolean exportCSV, RunMode runMode) {
+    public QueryExecutor(XMLConfigParser parser, PhoenixUtil util,
+            WorkloadExecutor workloadExecutor, List<DataModel> dataModels, String queryHint,
+            boolean exportCSV) {
         this.parser = parser;
         this.queryHint = queryHint;
         this.exportCSV = exportCSV;
-        this.runMode = runMode;
         this.dataModels = dataModels;
-        this.pool = pool;
         this.util = util;
+        this.workloadExecutor = workloadExecutor;
     }
 
-    @Override public void complete() {
-
-    }
+    @Override
+    public void complete() {}
 
     /**
      * Calls in Multithreaded Query Executor for all datamodels
      *
      * @throws Exception
      */
+    @Override
     public Runnable execute() throws Exception {
         Runnable runnable = null;
         for (DataModel dataModel : dataModels) {
@@ -89,7 +87,8 @@ public class QueryExecutor implements Workload {
      */
     protected Runnable exportAllScenarios(final DataModel dataModel) throws Exception {
         return new Runnable() {
-            @Override public void run() {
+            @Override
+            public void run() {
                 try {
 
                     List<Scenario> scenarios = dataModel.getScenarios();
@@ -124,7 +123,7 @@ public class QueryExecutor implements Workload {
                         new DataModelResult(dataModel, PhoenixUtil.getZookeeper());
                 ResultManager
                         resultManager =
-                        new ResultManager(dataModelResult.getName(), QueryExecutor.this.runMode);
+                        new ResultManager(dataModelResult.getName());
 
                 dataModelResults.add(dataModelResult);
                 List<Scenario> scenarios = dataModel.getScenarios();
@@ -144,7 +143,7 @@ public class QueryExecutor implements Workload {
                                 logger.debug("Inserting write workload ( " + i + " ) of ( "
                                         + writerThreadCount + " )");
                                 Workload writes = new WriteWorkload(PhoenixUtil.create(), parser, GeneratePhoenixStats.NO);
-                                pool.submit(writes.execute());
+                                workloadExecutor.add(writes);
                             }
                         }
 
@@ -193,7 +192,7 @@ public class QueryExecutor implements Workload {
                             thread =
                             executeRunner((i + 1) + "," + cr, dataModelResult, queryResult,
                                     querySetResult);
-                    threads.add(pool.submit(thread));
+                    threads.add(workloadExecutor.getPool().submit(thread));
                 }
 
                 for (Future thread : threads) {
@@ -228,7 +227,7 @@ public class QueryExecutor implements Workload {
                             thread =
                             executeRunner((i + 1) + "," + cr, dataModelResult, queryResult,
                                     querySetResult);
-                    threads.add(pool.submit(thread));
+                    threads.add(workloadExecutor.getPool().submit(thread));
                 }
 
                 for (Future thread : threads) {
@@ -259,7 +258,7 @@ public class QueryExecutor implements Workload {
         queryResult.setHint(this.queryHint);
         logger.info("\nExecuting query " + queryResult.getStatement());
         Runnable thread;
-        if (this.runMode == RunMode.FUNCTIONAL) {
+        if (workloadExecutor.isPerformance()) {
             thread =
                     new MultithreadedDiffer(threadTime.getThreadName(), queryResult, threadTime,
                             querySet.getNumberOfExecutions(), querySet.getExecutionDurationInMs());

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WorkloadExecutor.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WorkloadExecutor.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WorkloadExecutor.java
index a65b4aa..3cde7ae 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WorkloadExecutor.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WorkloadExecutor.java
@@ -31,6 +31,7 @@ import java.util.concurrent.*;
 public class WorkloadExecutor {
     private static final Logger logger = LoggerFactory.getLogger(WorkloadExecutor.class);
     private final int poolSize;
+    private final boolean isPerformance;
 
     // Jobs can be accessed by multiple threads
     private final Map<Workload, Future> jobs = new ConcurrentHashMap<>();
@@ -38,14 +39,15 @@ public class WorkloadExecutor {
     private final ExecutorService pool;
 
     public WorkloadExecutor() throws Exception {
-        this(PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES));
+        this(PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES, false));
     }
 
     public WorkloadExecutor(Properties properties) throws Exception {
-        this(properties, new ArrayList());
+        this(properties, new ArrayList(), true);
     }
 
-    public WorkloadExecutor(Properties properties, List<Workload> workloads) throws Exception {
+    public WorkloadExecutor(Properties properties, List<Workload> workloads, boolean isPerformance) throws Exception {
+        this.isPerformance = isPerformance;
         this.poolSize =
                 (properties.getProperty("pherf.default.threadpool") == null) ?
                         PherfConstants.DEFAULT_THREAD_POOL_SIZE :
@@ -102,10 +104,19 @@ public class WorkloadExecutor {
         pool.shutdownNow();
     }
 
+    /**
+     * TODO This should be removed, Access to the pool should be restriced and callers should Workflows
+     *
+     * @return {@link ExecutorService} Exposes the underlying thread pool
+     */
     public ExecutorService getPool() {
         return pool;
     }
 
+    public boolean isPerformance() {
+        return isPerformance;
+    }
+
     private void init(List<Workload> workloads) throws Exception {
         for (Workload workload : workloads) {
             this.jobs.put(workload, pool.submit(workload.execute()));

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
index 5705885..f9d1ee6 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
@@ -79,7 +79,8 @@ public class WriteWorkload implements Workload {
 
     public WriteWorkload(PhoenixUtil phoenixUtil, XMLConfigParser parser, Scenario scenario, GeneratePhoenixStats generateStatistics)
             throws Exception {
-        this(phoenixUtil, PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES),
+        this(phoenixUtil, PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES,
+                false),
                 parser, scenario, generateStatistics);
     }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/PherfTest.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/PherfTest.java b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/PherfTest.java
index 731a57a..80ab971 100644
--- a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/PherfTest.java
+++ b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/PherfTest.java
@@ -36,7 +36,7 @@ public class PherfTest {
     public void testUnknownOption() {
         String[] args = {"-drop", "all", "-q", "-m","-bsOption"};
 
-        // Makes sure that System.exit(1) is called. Release is a required param.
+        // Makes sure that System.exit(1) is called.
         exit.expectSystemExitWithStatus(1);
         Pherf.main(args);
     }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultBaseTest.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultBaseTest.java b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultBaseTest.java
index 5c455fc..1497e77 100644
--- a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultBaseTest.java
+++ b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultBaseTest.java
@@ -24,6 +24,7 @@ import org.junit.BeforeClass;
 import java.util.Properties;
 
 public class ResultBaseTest {
+    protected static PherfConstants constants;
     private static boolean isSetUpDone = false;
 
     @BeforeClass
@@ -33,8 +34,8 @@ public class ResultBaseTest {
         }
 
         ResultUtil util = new ResultUtil();
-        PherfConstants constants = PherfConstants.create();
-        Properties properties = constants.getProperties(PherfConstants.PHERF_PROPERTIES);
+        constants = PherfConstants.create();
+        Properties properties = constants.getProperties(PherfConstants.PHERF_PROPERTIES, false);
         String dir = properties.getProperty("pherf.default.results.dir");
         String targetDir = "target/" + dir;
         properties.setProperty("pherf.default.results.dir", targetDir);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f9a5a925/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java
index 4ccf95c..81d5fd0 100644
--- a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java
+++ b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java
@@ -25,12 +25,11 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 
-import org.apache.phoenix.pherf.PherfConstants.RunMode;
 import org.apache.phoenix.pherf.jmx.MonitorManager;
 
 import org.apache.phoenix.pherf.result.file.Extension;
 import org.apache.phoenix.pherf.result.file.ResultFileDetails;
-import org.apache.phoenix.pherf.result.impl.CSVResultHandler;
+import org.apache.phoenix.pherf.result.impl.CSVFileResultHandler;
 import org.apache.phoenix.pherf.result.impl.XMLResultHandler;
 import org.apache.phoenix.pherf.result.*;
 import org.junit.Test;
@@ -49,7 +48,9 @@ public class ResultTest extends ResultBaseTest {
         }
 
         try {
-            resultMonitorWriter = new CSVResultHandler(PherfConstants.MONITOR_FILE_NAME, ResultFileDetails.CSV_MONITOR);
+            resultMonitorWriter = new CSVFileResultHandler();
+            resultMonitorWriter.setResultFileDetails(ResultFileDetails.CSV_MONITOR);
+            resultMonitorWriter.setResultFileName(PherfConstants.MONITOR_FILE_NAME);
             Result
                     result = new Result(ResultFileDetails.CSV_MONITOR, ResultFileDetails.CSV_MONITOR.getHeader().toString(), resultValues);
             resultMonitorWriter.write(result);
@@ -110,9 +111,56 @@ public class ResultTest extends ResultBaseTest {
     public void testResult() throws Exception {
         String filename = "testresult";
         ResultHandler xmlResultHandler = null;
-        ResultManager resultManager = new ResultManager(filename, RunMode.PERFORMANCE);
+        ResultManager resultManager = new ResultManager(filename);
+        assertTrue("Default Handlers were not initialized.", resultManager.getResultHandlers().size() > 0);
 
         // write result to file
+        DataModelResult dataModelResult = setUpDataModelResult();
+        resultManager.write(dataModelResult);
+
+        // Put some stuff in a combined file
+        List<DataModelResult> modelResults = new ArrayList<>();
+        modelResults.add(dataModelResult);
+        modelResults.add(dataModelResult);
+        resultManager.write(modelResults);
+
+        // read result from file
+        xmlResultHandler = new XMLResultHandler();
+        xmlResultHandler.setResultFileDetails(ResultFileDetails.XML);
+        xmlResultHandler.setResultFileName(filename);
+
+        List<Result> resultList = xmlResultHandler.read();
+        ResultValue<DataModelResult> resultValue = resultList.get(0).getResultValues().get(0);
+        DataModelResult dataModelResultFromFile = resultValue.getResultValue();
+
+        ScenarioResult scenarioResultFromFile = dataModelResultFromFile.getScenarioResult().get(0);
+        QuerySetResult querySetResultFromFile = scenarioResultFromFile.getQuerySetResult().get(0);
+        QueryResult queryResultFromFile = querySetResultFromFile.getQueryResults().get(0);
+        ThreadTime ttFromFile = queryResultFromFile.getThreadTimes().get(0);
+
+        // thread level verification
+        assertEquals(10, (int) ttFromFile.getMinTimeInMs().getElapsedDurationInMs());
+        assertEquals(30, (int) ttFromFile.getMaxTimeInMs().getElapsedDurationInMs());
+        assertEquals(20, (int) ttFromFile.getAvgTimeInMs());
+
+        // 3rd runtime has the earliest start time, therefore that's what's expected.
+        QueryResult
+                qr =
+                dataModelResult.getScenarioResult().get(0).getQuerySetResult().get(0)
+                        .getQueryResults().get(0);
+        List<RunTime> runTimes = qr.getThreadTimes().get(0).getRunTimesInMs();
+        assertEquals(runTimes.get(2).getStartTime(), ttFromFile.getStartTime());
+        assertEquals(runTimes.get(0).getResultRowCount(), ttFromFile.getRunTimesInMs().get(0).getResultRowCount());
+        assertEquals(runTimes.get(1).getResultRowCount(), ttFromFile.getRunTimesInMs().get(1).getResultRowCount());
+        assertEquals(runTimes.get(2).getResultRowCount(), ttFromFile.getRunTimesInMs().get(2).getResultRowCount());
+
+        // query result level verification
+        assertEquals(10, queryResultFromFile.getAvgMinRunTimeInMs());
+        assertEquals(30, queryResultFromFile.getAvgMaxRunTimeInMs());
+        assertEquals(20, queryResultFromFile.getAvgRunTimeInMs());
+    }
+
+    private DataModelResult setUpDataModelResult() {
         DataModelResult dataModelResult = new DataModelResult();
         dataModelResult.setZookeeper("mytestzk");
         ScenarioResult scenarioResult = new ScenarioResult();
@@ -134,7 +182,8 @@ public class ResultTest extends ResultBaseTest {
         query.setTenantId("tennantID123");
         query.setStatement("Select    * \n" + "from    FHA");
         query2.setStatement("Select a, b, c  * \n" + "from    FHA2");
-        assertEquals("Expected consecutive spaces to be normalized", "Select * from FHA", query.getStatement());
+        assertEquals("Expected consecutive spaces to be normalized", "Select * from FHA",
+                query.getStatement());
 
         QueryResult queryResult = new QueryResult(query);
         QueryResult queryResult2 = new QueryResult(query2);
@@ -156,42 +205,6 @@ public class ResultTest extends ResultBaseTest {
         queryResult.getThreadTimes().add(tt);
         queryResult2.getThreadTimes().add(tt);
 
-        //resultUtil.writeResultToFile(dataModelResult, filename, RunMode.PERFORMANCE);
-        resultManager.write(dataModelResult);
-
-        // Put some stuff in a combined file
-        List<DataModelResult> modelResults = new ArrayList<>();
-        modelResults.add(dataModelResult);
-        modelResults.add(dataModelResult);
-        resultManager.write(modelResults);
-
-        // read result from file
-        xmlResultHandler = new XMLResultHandler(filename, ResultFileDetails.XML);
-        List<Result> resultList = xmlResultHandler.read();
-        ResultValue<DataModelResult> resultValue = resultList.get(0).getResultValues().get(0);
-        DataModelResult dataModelResultFromFile = resultValue.getResultValue();
-
-        ScenarioResult scenarioResultFromFile = dataModelResultFromFile.getScenarioResult().get(0);
-        QuerySetResult querySetResultFromFile = scenarioResultFromFile.getQuerySetResult().get(0);
-        QueryResult queryResultFromFile = querySetResultFromFile.getQueryResults().get(0);
-        ThreadTime ttFromFile = queryResultFromFile.getThreadTimes().get(0);
-
-        // thread level verification
-        assertEquals(10, (int) ttFromFile.getMinTimeInMs().getElapsedDurationInMs());
-        assertEquals(30, (int) ttFromFile.getMaxTimeInMs().getElapsedDurationInMs());
-        assertEquals(20, (int) ttFromFile.getAvgTimeInMs());
-        // 3rd runtime has the earliest start time, therefore that's what's expected.
-        assertEquals(runtime3.getStartTime(), ttFromFile.getStartTime());
-
-        assertEquals(runtime1.getResultRowCount(), ttFromFile.getRunTimesInMs().get(0).getResultRowCount());
-        assertEquals(runtime2.getResultRowCount(), ttFromFile.getRunTimesInMs().get(1).getResultRowCount());
-        assertEquals(runtime3.getResultRowCount(), ttFromFile.getRunTimesInMs().get(2).getResultRowCount());
-
-        // query result level verification
-        assertEquals(10, queryResultFromFile.getAvgMinRunTimeInMs());
-        assertEquals(30, queryResultFromFile.getAvgMaxRunTimeInMs());
-        assertEquals(20, queryResultFromFile.getAvgRunTimeInMs());
-        // 3rd runtime has the earliest start time, therefore that's what's expected.
-        assertEquals(runtime3.getStartTime(), queryResultFromFile.getStartTime());
+        return dataModelResult;
     }
 }
\ No newline at end of file


[2/4] phoenix git commit: PHOENIX-2227 Added the ability to Pherf to define a DDL statement that will be executed before a scenario is run to support dynamically creating multi-tenant views we are going to write and read from

Posted by co...@apache.org.
PHOENIX-2227 Added the ability to Pherf to define a DDL statement that will be executed before a scenario is run to support dynamically creating multi-tenant views we are going to write and read from


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6789fe7e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6789fe7e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6789fe7e

Branch: refs/heads/4.x-HBase-0.98
Commit: 6789fe7e36062793a1d03871347b146f7e6d5357
Parents: f9a5a92
Author: Jan <jf...@salesforce.com>
Authored: Thu Sep 3 17:48:18 2015 -0700
Committer: Cody Marcel <cm...@cmarcel-wsl1.internal.salesforce.com>
Committed: Fri Sep 11 10:58:20 2015 -0700

----------------------------------------------------------------------
 .../org/apache/phoenix/pherf/DataIngestIT.java  | 21 +++++++++++++++++++-
 .../phoenix/pherf/configuration/Scenario.java   | 13 ++++++++++++
 .../apache/phoenix/pherf/util/PhoenixUtil.java  | 21 ++++++++++++++++++++
 .../phoenix/pherf/workload/WriteWorkload.java   |  5 ++++-
 .../test/resources/scenario/test_scenario.xml   |  6 ++++++
 5 files changed, 64 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6789fe7e/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
index b821c7b..1098799 100644
--- a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
@@ -20,7 +20,6 @@ package org.apache.phoenix.pherf;
 
 import com.jcabi.jdbc.JdbcSession;
 import com.jcabi.jdbc.Outcome;
-
 import org.apache.phoenix.pherf.PherfConstants.GeneratePhoenixStats;
 import org.apache.phoenix.pherf.configuration.Column;
 import org.apache.phoenix.pherf.configuration.DataModel;
@@ -173,6 +172,26 @@ public class DataIngestIT extends ResultBaseTestIT {
 
     }
 
+    
+    @Test
+    public void testMultiTenantScenarioRunBeforeWriteWorkload() throws Exception {
+        // Arrange
+        Scenario scenario = parser.getScenarioByName("testMTDdlWriteScenario");
+        WorkloadExecutor executor = new WorkloadExecutor();
+        executor.add(new WriteWorkload(util, parser, scenario, GeneratePhoenixStats.NO));
+        
+        // Act
+        try {
+            // Wait for data to load up.
+            executor.get();
+            executor.shutdown();
+        } catch (Exception e) {
+            fail("Failed to load data. An exception was thrown: " + e.getMessage());
+        }
+
+        assertExpectedNumberOfRecordsWritten(scenario);
+    }
+    
     private void assertExpectedNumberOfRecordsWritten(Scenario scenario) throws Exception,
             SQLException {
         Connection connection = util.getConnection(scenario.getTenantId());

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6789fe7e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/configuration/Scenario.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/configuration/Scenario.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/configuration/Scenario.java
index 6c949d8..200fdc5 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/configuration/Scenario.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/configuration/Scenario.java
@@ -39,6 +39,7 @@ public class Scenario {
     private WriteParams writeParams;
     private String name;
     private String tenantId;
+    private String ddl;
 
     public Scenario() {
         writeParams = new WriteParams();
@@ -178,6 +179,18 @@ public class Scenario {
         this.tenantId = tenantId;
     }
 
+    /**
+     * Scenario level DDL that is executed before running the scenario.
+     */
+    @XmlAttribute
+    public String getDdl() {
+        return ddl;
+    }
+    
+    public void setDdl(String ddl) {
+        this.ddl = ddl;
+    }
+    
     public WriteParams getWriteParams() {
         return writeParams;
     }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6789fe7e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
index fad06a1..57858a3 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
@@ -243,6 +243,27 @@ public class PhoenixUtil {
             }
         }
     }
+    
+    /**
+     * Executes any ddl defined at the scenario level. This is executed before we commence
+     * the data load.
+     * 
+     * @throws Exception
+     */
+    public void executeScenarioDdl(Scenario scenario) throws Exception {
+        if (null != scenario.getDdl()) {
+            Connection conn = null;
+            try {
+                logger.info("\nExecuting DDL:" + scenario.getDdl() + " on tenantId:"
+                        + scenario.getTenantId());
+                executeStatement(scenario.getDdl(), conn = getConnection(scenario.getTenantId()));
+            } finally {
+                if (null != conn) {
+                    conn.close();
+                }
+            }
+        }
+    }
 
     public static String getZookeeper() {
         return zookeeper;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6789fe7e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
index f9d1ee6..7b5276b 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
@@ -164,7 +164,10 @@ public class WriteWorkload implements Workload {
             DataLoadThreadTime dataLoadThreadTime, Scenario scenario) throws Exception {
         logger.info("\nLoading " + scenario.getRowCount() + " rows for " + scenario.getTableName());
         long start = System.currentTimeMillis();
-
+        
+        // Execute any Scenario DDL before running workload
+        pUtil.executeScenarioDdl(scenario);
+        
         List<Future> writeBatches = getBatches(dataLoadThreadTime, scenario);
 
         waitForBatches(dataLoadTimeSummary, scenario, start, writeBatches);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6789fe7e/phoenix-pherf/src/test/resources/scenario/test_scenario.xml
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/test/resources/scenario/test_scenario.xml b/phoenix-pherf/src/test/resources/scenario/test_scenario.xml
index b5fe564..735e690 100644
--- a/phoenix-pherf/src/test/resources/scenario/test_scenario.xml
+++ b/phoenix-pherf/src/test/resources/scenario/test_scenario.xml
@@ -223,5 +223,11 @@
         <!-- Test writing to a Multi-tenant View -->
         <scenario tableName="PHERF.TEST_VIEW" tenantId="abcdefghijklmno" rowCount="100" name="testMTWriteScenario">
         </scenario>
+        <!--  Test scenario DDL -->
+        <scenario tableName="PHERF.TEST_MT_VIEW" tenantId="abcdefghijklmno" 
+                    ddl="CREATE VIEW IF NOT EXISTS PHERF.TEST_MT_VIEW (field1 VARCHAR) AS SELECT * FROM PHERF.TEST_MULTI_TENANT_TABLE" 
+                    rowCount="100" name="testMTDdlWriteScenario">
+        </scenario>
+        
     </scenarios>
 </datamodel>
\ No newline at end of file


[3/4] phoenix git commit: PHOENIX-2227 Adding comments to test_scenario.xml to explain new features added.

Posted by co...@apache.org.
PHOENIX-2227 Adding comments to test_scenario.xml to explain new features added.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/120426b6
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/120426b6
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/120426b6

Branch: refs/heads/4.x-HBase-0.98
Commit: 120426b6e8ef279b52fed14f7b767c024dd089e8
Parents: 6789fe7
Author: Jan <jf...@salesforce.com>
Authored: Fri Sep 4 13:17:32 2015 -0700
Committer: Cody Marcel <cm...@cmarcel-wsl1.internal.salesforce.com>
Committed: Fri Sep 11 10:58:20 2015 -0700

----------------------------------------------------------------------
 .../src/test/resources/scenario/test_scenario.xml  | 17 +++++++++++++++--
 1 file changed, 15 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/120426b6/phoenix-pherf/src/test/resources/scenario/test_scenario.xml
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/test/resources/scenario/test_scenario.xml b/phoenix-pherf/src/test/resources/scenario/test_scenario.xml
index 735e690..50a603e 100644
--- a/phoenix-pherf/src/test/resources/scenario/test_scenario.xml
+++ b/phoenix-pherf/src/test/resources/scenario/test_scenario.xml
@@ -220,10 +220,23 @@
                 <query id="q4" statement="select sum(SOME_INT) from PHERF.TEST_TABLE"/>
             </querySet>
         </scenario>
-        <!-- Test writing to a Multi-tenant View -->
+        <!-- To configure a Write Workload to write to a tenant specific view users need to
+             specify the tenantId attribute on the scenario, specifying the tenant they 
+             want to write data for as the attribute value. This tells Pherf to take out a 
+             tenant-specific connection for executing the write workload. 
+             The name of the tenant specific view to write to can then be specified as the value of
+             the tablename attribute. This assumes the tenant specific view has been created. To 
+             dynamically create the view see comments below with regard to the ddl attribute. 
+        -->
         <scenario tableName="PHERF.TEST_VIEW" tenantId="abcdefghijklmno" rowCount="100" name="testMTWriteScenario">
         </scenario>
-        <!--  Test scenario DDL -->
+        <!--  Scenario level DDL that is dynamically executed before the Write Workload is run.
+              This pattern is really useful when you want to write data to multi-tenant view and the tenant id is
+              tightly bound to the scenario. In such cases you can't create the view through the data model flow.
+              The value of the tableName attribute is name of the view that is dynamically created based on the DDL
+              in the ddl attribute. Queries accessing the View will need to manually make sure Pherf was run with the -l option at
+              least once. 
+         -->
         <scenario tableName="PHERF.TEST_MT_VIEW" tenantId="abcdefghijklmno" 
                     ddl="CREATE VIEW IF NOT EXISTS PHERF.TEST_MT_VIEW (field1 VARCHAR) AS SELECT * FROM PHERF.TEST_MULTI_TENANT_TABLE" 
                     rowCount="100" name="testMTDdlWriteScenario">


[4/4] phoenix git commit: PHOENIX-2252 - QueryExecutor creates a Differ for performance test

Posted by co...@apache.org.
PHOENIX-2252 - QueryExecutor creates a Differ for performance test


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2af5f8cb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2af5f8cb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2af5f8cb

Branch: refs/heads/4.x-HBase-0.98
Commit: 2af5f8cb403a1b1a0046c2a13f94973c2c2508c8
Parents: 120426b
Author: Cody Marcel <cm...@cmarcel-wsl1.internal.salesforce.com>
Authored: Fri Sep 11 10:04:06 2015 -0700
Committer: Cody Marcel <cm...@cmarcel-wsl1.internal.salesforce.com>
Committed: Fri Sep 11 10:58:43 2015 -0700

----------------------------------------------------------------------
 .../org/apache/phoenix/pherf/workload/QueryExecutor.java     | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2af5f8cb/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java
----------------------------------------------------------------------
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java
index 5a7c49f..50d7190 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java
@@ -260,13 +260,13 @@ public class QueryExecutor implements Workload {
         Runnable thread;
         if (workloadExecutor.isPerformance()) {
             thread =
-                    new MultithreadedDiffer(threadTime.getThreadName(), queryResult, threadTime,
-                            querySet.getNumberOfExecutions(), querySet.getExecutionDurationInMs());
-        } else {
-            thread =
                     new MultiThreadedRunner(threadTime.getThreadName(), queryResult,
                             dataModelResult, threadTime, querySet.getNumberOfExecutions(),
                             querySet.getExecutionDurationInMs());
+        } else {
+            thread =
+                    new MultithreadedDiffer(threadTime.getThreadName(), queryResult, threadTime,
+                            querySet.getNumberOfExecutions(), querySet.getExecutionDurationInMs());
         }
         return thread;
     }