You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@systemml.apache.org by ni...@apache.org on 2018/08/27 20:09:59 UTC

[4/5] systemml git commit: [SYSTEMML-1325] Cleanup static variables in DMLScript

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBasedV2.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBasedV2.java b/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBasedV2.java
index 1a18d4d..9d061bf 100644
--- a/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBasedV2.java
+++ b/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBasedV2.java
@@ -37,8 +37,8 @@ import org.apache.commons.lang3.ArrayUtils;
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.AggBinaryOp;
 import org.apache.sysml.hops.AggUnaryOp;
 import org.apache.sysml.hops.BinaryOp;
@@ -152,7 +152,7 @@ public class PlanSelectionFuseCostBasedV2 extends PlanSelection
 			memo.setDistinct(e.getKey(), e.getValue());
 		
 		//maintain statistics
-		if( DMLScript.STATISTICS ) {
+		if( ConfigurationManager.isStatistics() ) {
 			if( sumMatPoints >= 63 )
 				LOG.warn("Long overflow on maintaining codegen statistics "
 					+ "for a DAG with "+sumMatPoints+" interesting points.");
@@ -321,7 +321,7 @@ public class PlanSelectionFuseCostBasedV2 extends PlanSelection
 				LOG.trace("Enum: Skip "+pskip+" plans (by structure).");
 		}
 		
-		if( DMLScript.STATISTICS ) {
+		if( ConfigurationManager.isStatistics() ) {
 			Statistics.incrementCodegenEnumAllP((rgraph!=null||!STRUCTURAL_PRUNING)?len:0);
 			Statistics.incrementCodegenEnumEval(numEvalPlans);
 			Statistics.incrementCodegenEnumEvalP(numEvalPartPlans);
@@ -739,7 +739,7 @@ public class PlanSelectionFuseCostBasedV2 extends PlanSelection
 				if( !memo.contains(hopID, TemplateType.ROW) )
 					continue;
 				Hop hop = memo.getHopRefs().get(hopID);
-				boolean isSpark = DMLScript.rtplatform == RUNTIME_PLATFORM.SPARK
+				boolean isSpark = ConfigurationManager.getExecutionMode() == RUNTIME_PLATFORM.SPARK
 					|| OptimizerUtils.getTotalMemEstimate(hop.getInput().toArray(new Hop[0]), hop, true)
 						> OptimizerUtils.getLocalMemBudget();
 				boolean validNcol = hop.getDataType().isScalar() || (HopRewriteUtils.isTransposeOperation(hop) ? 
@@ -1197,7 +1197,7 @@ public class PlanSelectionFuseCostBasedV2 extends PlanSelection
 		synchronized( _planCache ) {
 			plan = _planCache.get(pKey);
 		}
-		if( DMLScript.STATISTICS ) {
+		if( ConfigurationManager.isStatistics() ) {
 			if( plan != null )
 				Statistics.incrementCodegenPlanCacheHits();
 			Statistics.incrementCodegenPlanCacheTotal();

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/hops/recompile/LiteralReplacement.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/recompile/LiteralReplacement.java b/src/main/java/org/apache/sysml/hops/recompile/LiteralReplacement.java
index a8b3024..c81848a 100644
--- a/src/main/java/org/apache/sysml/hops/recompile/LiteralReplacement.java
+++ b/src/main/java/org/apache/sysml/hops/recompile/LiteralReplacement.java
@@ -21,7 +21,7 @@ package org.apache.sysml.hops.recompile;
 
 import java.util.ArrayList;
 
-import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.AggUnaryOp;
 import org.apache.sysml.hops.DataOp;
 import org.apache.sysml.hops.Hop;
@@ -463,7 +463,7 @@ public class LiteralReplacement
 	private static double replaceUnaryAggregate( AggUnaryOp auop, MatrixBlock mb )
 	{
 		//setup stats reporting if necessary
-		boolean REPORT_STATS = (DMLScript.STATISTICS && REPORT_LITERAL_REPLACE_OPS_STATS); 
+		boolean REPORT_STATS = (ConfigurationManager.isStatistics() && REPORT_LITERAL_REPLACE_OPS_STATS); 
 		long t0 = REPORT_STATS ? System.nanoTime() : 0;
 		
 		//compute required unary aggregate 

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java b/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java
index 4af5a10..271142d 100644
--- a/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java
+++ b/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java
@@ -27,7 +27,6 @@ import java.util.HashSet;
 import java.util.LinkedHashMap;
 
 import org.apache.commons.lang.ArrayUtils;
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.AggBinaryOp;
@@ -811,7 +810,7 @@ public class HopRewriteUtils
 	{
 		//awareness of forced exec single node (e.g., standalone), where we can 
 		//guarantee a single block independent of the size because always in CP.
-		if( DMLScript.rtplatform == RUNTIME_PLATFORM.SINGLE_NODE ) {
+		if( ConfigurationManager.getExecutionMode() == RUNTIME_PLATFORM.SINGLE_NODE ) {
 			return true;
 		}
 		

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/hops/rewrite/ProgramRewriter.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/rewrite/ProgramRewriter.java b/src/main/java/org/apache/sysml/hops/rewrite/ProgramRewriter.java
index 3d4eafd..695e987 100644
--- a/src/main/java/org/apache/sysml/hops/rewrite/ProgramRewriter.java
+++ b/src/main/java/org/apache/sysml/hops/rewrite/ProgramRewriter.java
@@ -24,7 +24,6 @@ import java.util.List;
 
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.CompilerConfig.ConfigType;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.Hop;
@@ -122,7 +121,7 @@ public class ProgramRewriter
 		// DYNAMIC REWRITES (which do require size information)
 		if( dynamicRewrites )
 		{
-			if ( DMLScript.USE_ACCELERATOR ){
+			if ( ConfigurationManager.isGPU() ){
 				_dagRuleSet.add( new RewriteGPUSpecificOps() );	// gpu-specific rewrites
 			}
 			if ( OptimizerUtils.ALLOW_SUM_PRODUCT_REWRITES) {

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/hops/rewrite/RewriteBlockSizeAndReblock.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/rewrite/RewriteBlockSizeAndReblock.java b/src/main/java/org/apache/sysml/hops/rewrite/RewriteBlockSizeAndReblock.java
index 2c102cc..5cb2371 100644
--- a/src/main/java/org/apache/sysml/hops/rewrite/RewriteBlockSizeAndReblock.java
+++ b/src/main/java/org/apache/sysml/hops/rewrite/RewriteBlockSizeAndReblock.java
@@ -21,7 +21,6 @@ package org.apache.sysml.hops.rewrite;
 
 import java.util.ArrayList;
 
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.DataOp;
@@ -124,7 +123,7 @@ public class RewriteBlockSizeAndReblock extends HopRewriteRule
 				} 
 				else if (dop.getDataOpType() == DataOp.DataOpTypes.TRANSIENTWRITE
 						|| dop.getDataOpType() == DataOp.DataOpTypes.TRANSIENTREAD) {
-					if ( DMLScript.rtplatform == RUNTIME_PLATFORM.SINGLE_NODE ) {
+					if ( ConfigurationManager.getExecutionMode() == RUNTIME_PLATFORM.SINGLE_NODE ) {
 						// simply copy the values from its input
 						dop.setRowsInBlock(hop.getInput().get(0).getRowsInBlock());
 						dop.setColsInBlock(hop.getInput().get(0).getColsInBlock());
@@ -214,6 +213,6 @@ public class RewriteBlockSizeAndReblock extends HopRewriteRule
 	}
 	
 	private static boolean isReblockValid() {
-		return ( DMLScript.rtplatform != RUNTIME_PLATFORM.SINGLE_NODE);
+		return ( ConfigurationManager.getExecutionMode() != RUNTIME_PLATFORM.SINGLE_NODE);
 	}
 }

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/hops/rewrite/RewriteGPUSpecificOps.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/rewrite/RewriteGPUSpecificOps.java b/src/main/java/org/apache/sysml/hops/rewrite/RewriteGPUSpecificOps.java
index b603aa7..acf2e48 100644
--- a/src/main/java/org/apache/sysml/hops/rewrite/RewriteGPUSpecificOps.java
+++ b/src/main/java/org/apache/sysml/hops/rewrite/RewriteGPUSpecificOps.java
@@ -23,7 +23,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
 
-import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.AggUnaryOp;
 import org.apache.sysml.hops.BinaryOp;
 import org.apache.sysml.hops.FunctionOp;
@@ -143,7 +143,7 @@ public class RewriteGPUSpecificOps extends HopRewriteRule {
 	
 	private static boolean fitsOnGPU(Hop h, double multiplier) {
 		double memEst = multiplier*h.getMemEstimate();
-		return DMLScript.USE_ACCELERATOR && h.dimsKnown() && OptimizerUtils.isMemoryBasedOptLevel() &&
+		return ConfigurationManager.isGPU() && h.dimsKnown() && OptimizerUtils.isMemoryBasedOptLevel() &&
 				memEst < OptimizerUtils.getLocalMemBudget() && memEst < GPUContextPool.initialGPUMemBudget();
 	}
 	
@@ -167,7 +167,7 @@ public class RewriteGPUSpecificOps extends HopRewriteRule {
 				memEst += est;
 			}
 		}
-		return DMLScript.USE_ACCELERATOR && OptimizerUtils.isMemoryBasedOptLevel() &&
+		return ConfigurationManager.isGPU() && OptimizerUtils.isMemoryBasedOptLevel() &&
 				memEst < OptimizerUtils.getLocalMemBudget() && memEst < GPUContextPool.initialGPUMemBudget();
 	}
 	

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/hops/rewrite/RewriteMarkLoopVariablesUpdateInPlace.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/rewrite/RewriteMarkLoopVariablesUpdateInPlace.java b/src/main/java/org/apache/sysml/hops/rewrite/RewriteMarkLoopVariablesUpdateInPlace.java
index c3dd512..4032358 100644
--- a/src/main/java/org/apache/sysml/hops/rewrite/RewriteMarkLoopVariablesUpdateInPlace.java
+++ b/src/main/java/org/apache/sysml/hops/rewrite/RewriteMarkLoopVariablesUpdateInPlace.java
@@ -23,8 +23,8 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.DataOp;
 import org.apache.sysml.hops.Hop;
 import org.apache.sysml.hops.Hop.OpOp1;
@@ -55,8 +55,8 @@ public class RewriteMarkLoopVariablesUpdateInPlace extends StatementBlockRewrite
 	@Override
 	public List<StatementBlock> rewriteStatementBlock(StatementBlock sb, ProgramRewriteStatus status)
 	{
-		if( DMLScript.rtplatform == RUNTIME_PLATFORM.HADOOP
-			|| DMLScript.rtplatform == RUNTIME_PLATFORM.SPARK )
+		if( ConfigurationManager.getExecutionMode() == RUNTIME_PLATFORM.HADOOP
+			|| ConfigurationManager.getExecutionMode() == RUNTIME_PLATFORM.SPARK )
 		{
 			// nothing to do here, return original statement block
 			return Arrays.asList(sb);

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/hops/rewrite/RewriteSplitDagDataDependentOperators.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/rewrite/RewriteSplitDagDataDependentOperators.java b/src/main/java/org/apache/sysml/hops/rewrite/RewriteSplitDagDataDependentOperators.java
index a254a1f..bb76cc4 100644
--- a/src/main/java/org/apache/sysml/hops/rewrite/RewriteSplitDagDataDependentOperators.java
+++ b/src/main/java/org/apache/sysml/hops/rewrite/RewriteSplitDagDataDependentOperators.java
@@ -25,7 +25,6 @@ import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.AggBinaryOp;
@@ -75,7 +74,7 @@ public class RewriteSplitDagDataDependentOperators extends StatementBlockRewrite
 	public List<StatementBlock> rewriteStatementBlock(StatementBlock sb, ProgramRewriteStatus state)
 	{
 		//DAG splits not required for forced single node
-		if( DMLScript.rtplatform == RUNTIME_PLATFORM.SINGLE_NODE
+		if( ConfigurationManager.getExecutionMode() == RUNTIME_PLATFORM.SINGLE_NODE
 			|| !HopRewriteUtils.isLastLevelStatementBlock(sb) )
 			return Arrays.asList(sb);
 		

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/hops/rewrite/RewriteSplitDagUnknownCSVRead.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/rewrite/RewriteSplitDagUnknownCSVRead.java b/src/main/java/org/apache/sysml/hops/rewrite/RewriteSplitDagUnknownCSVRead.java
index a4c31d9..cc349ac 100644
--- a/src/main/java/org/apache/sysml/hops/rewrite/RewriteSplitDagUnknownCSVRead.java
+++ b/src/main/java/org/apache/sysml/hops/rewrite/RewriteSplitDagUnknownCSVRead.java
@@ -23,8 +23,8 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.DataOp;
 import org.apache.sysml.hops.Hop;
 import org.apache.sysml.hops.Hop.DataOpTypes;
@@ -53,7 +53,7 @@ public class RewriteSplitDagUnknownCSVRead extends StatementBlockRewriteRule
 	public List<StatementBlock> rewriteStatementBlock(StatementBlock sb, ProgramRewriteStatus state)
 	{
 		//DAG splits not required for forced single node
-		if( DMLScript.rtplatform == RUNTIME_PLATFORM.SINGLE_NODE
+		if( ConfigurationManager.getExecutionMode() == RUNTIME_PLATFORM.SINGLE_NODE
 			|| !HopRewriteUtils.isLastLevelStatementBlock(sb) )
 			return Arrays.asList(sb);
 		

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/lops/compile/Dag.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/lops/compile/Dag.java b/src/main/java/org/apache/sysml/lops/compile/Dag.java
index 452f030..47b497c 100644
--- a/src/main/java/org/apache/sysml/lops/compile/Dag.java
+++ b/src/main/java/org/apache/sysml/lops/compile/Dag.java
@@ -1165,7 +1165,7 @@ public class Dag<N extends Lop>
 	 */
 	private static boolean sendWriteLopToMR(Lop node) 
 	{
-		if ( DMLScript.rtplatform == RUNTIME_PLATFORM.SINGLE_NODE )
+		if ( ConfigurationManager.getExecutionMode() == RUNTIME_PLATFORM.SINGLE_NODE )
 			return false;
 		Lop in = node.getInputs().get(0);
 		Format nodeFormat = node.getOutputParameters().getFormat();

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/lops/runtime/RunMRJobs.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/lops/runtime/RunMRJobs.java b/src/main/java/org/apache/sysml/lops/runtime/RunMRJobs.java
index cf39048..e8145ac 100644
--- a/src/main/java/org/apache/sysml/lops/runtime/RunMRJobs.java
+++ b/src/main/java/org/apache/sysml/lops/runtime/RunMRJobs.java
@@ -23,7 +23,6 @@ import java.io.IOException;
 
 import org.apache.hadoop.fs.Path;
 
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.conf.DMLConfig;
@@ -186,7 +185,7 @@ public class RunMRJobs
 			case DATAGEN:
 				if(    ConfigurationManager.isDynamicRecompilation()
 					&& OptimizerUtils.ALLOW_RAND_JOB_RECOMPILE
-					&& DMLScript.rtplatform != RUNTIME_PLATFORM.HADOOP 
+					&& ConfigurationManager.getExecutionMode() != RUNTIME_PLATFORM.HADOOP 
 					&& Recompiler.checkCPDataGen( inst, rdInst ) ) 
 				{
 					ret = executeInMemoryDataGenOperations(inst, rdInst, outputMatrices);
@@ -221,7 +220,7 @@ public class RunMRJobs
 			case REBLOCK:
 			case CSV_REBLOCK:
 				if(    ConfigurationManager.isDynamicRecompilation() 
-					&& DMLScript.rtplatform != RUNTIME_PLATFORM.HADOOP 
+					&& ConfigurationManager.getExecutionMode() != RUNTIME_PLATFORM.HADOOP 
 					&& Recompiler.checkCPReblock( inst, inputMatrices ) ) 
 				{
 					ret = executeInMemoryReblockOperations(inst, shuffleInst, inputMatrices, outputMatrices);

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/codegen/CodegenUtils.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/codegen/CodegenUtils.java b/src/main/java/org/apache/sysml/runtime/codegen/CodegenUtils.java
index c302b0c..bfd21fb 100644
--- a/src/main/java/org/apache/sysml/runtime/codegen/CodegenUtils.java
+++ b/src/main/java/org/apache/sysml/runtime/codegen/CodegenUtils.java
@@ -42,7 +42,7 @@ import javax.tools.ToolProvider;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.codegen.SpoofCompiler;
 import org.apache.sysml.hops.codegen.SpoofCompiler.CompilerType;
 import org.apache.sysml.runtime.DMLRuntimeException;
@@ -73,7 +73,7 @@ public class CodegenUtils
 		if( ret != null ) 
 			return ret;
 		
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		//compile java source w/ specific compiler
 		if( SpoofCompiler.JAVA_COMPILER == CompilerType.JANINO )
@@ -84,7 +84,7 @@ public class CodegenUtils
 		//keep compiled class for reuse
 		_cache.put(name, ret);
 		
-		if( DMLScript.STATISTICS ) {
+		if( ConfigurationManager.isStatistics() ) {
 			Statistics.incrementCodegenClassCompile();
 			Statistics.incrementCodegenClassCompileTime(System.nanoTime()-t0);
 		}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/FunctionProgramBlock.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/FunctionProgramBlock.java b/src/main/java/org/apache/sysml/runtime/controlprogram/FunctionProgramBlock.java
index 91900ca..1b9be4c 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/FunctionProgramBlock.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/FunctionProgramBlock.java
@@ -23,7 +23,6 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.stream.Collectors;
 
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.recompile.Recompiler;
 import org.apache.sysml.hops.recompile.Recompiler.ResetType;
@@ -95,7 +94,7 @@ public class FunctionProgramBlock extends ProgramBlock
 				&& isRecompileOnce() 
 				&& ParForProgramBlock.RESET_RECOMPILATION_FLAGs )
 			{
-				long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+				long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 				
 				//note: it is important to reset the recompilation flags here
 				// (1) it is safe to reset recompilation flags because a 'recompile_once'
@@ -105,7 +104,7 @@ public class FunctionProgramBlock extends ProgramBlock
 				ResetType reset = ConfigurationManager.isCodegenEnabled() ? ResetType.RESET_KNOWN_DIMS : ResetType.RESET;
 				Recompiler.recompileProgramBlockHierarchy(_childBlocks, tmp, _tid, reset);
 				
-				if( DMLScript.STATISTICS ){
+				if( ConfigurationManager.isStatistics() ){
 					long t1 = System.nanoTime();
 					Statistics.incrementFunRecompileTime(t1-t0);
 					Statistics.incrementFunRecompiles();

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/LocalVariableMap.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/LocalVariableMap.java b/src/main/java/org/apache/sysml/runtime/controlprogram/LocalVariableMap.java
index 842a40a..a4c2fef 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/LocalVariableMap.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/LocalVariableMap.java
@@ -27,6 +27,7 @@ import java.util.Set;
 import java.util.StringTokenizer;
 
 import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.runtime.controlprogram.caching.CacheableData;
 import org.apache.sysml.runtime.util.ProgramConverter;
 import org.apache.sysml.runtime.controlprogram.parfor.util.IDSequence;
@@ -139,7 +140,7 @@ public class LocalVariableMap implements Cloneable
 			if( !dict.containsKey(hash) && e.getValue() instanceof CacheableData ) {
 				dict.put(hash, e.getValue());
 				double size = ((CacheableData<?>) e.getValue()).getDataSize();
-				if (DMLScript.JMLC_MEM_STATISTICS && DMLScript.FINEGRAINED_STATISTICS)
+				if (DMLScript.JMLC_MEM_STATISTICS && ConfigurationManager.isFinegrainedStatistics())
 					Statistics.maintainCPHeavyHittersMem(e.getKey(), size);
 				total += size;
 			}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java b/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java
index 58be1ae..a851a4d 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java
@@ -768,7 +768,7 @@ public class ParForProgramBlock extends ForProgramBlock
 			
 			//maintain statistics
 			long tinit = (long) time.stop();
-			if( DMLScript.STATISTICS )
+			if( ConfigurationManager.isStatistics() )
 				Statistics.incrementParForInitTime(tinit);
 			if( _monitor ) 
 				StatisticMonitor.putPFStat(_ID, Stat.PARFOR_INIT_PARWRK_T, tinit);
@@ -831,7 +831,7 @@ public class ParForProgramBlock extends ForProgramBlock
 
 			// Frees up the GPUContexts used in the threaded Parfor and sets
 			// the main thread to use the GPUContext
-			if (DMLScript.USE_ACCELERATOR) {
+			if (ConfigurationManager.isGPU()) {
 				ec.getGPUContext(0).initializeThread();
 			}
 		}
@@ -1365,7 +1365,7 @@ public class ParForProgramBlock extends ForProgramBlock
 
 			// If GPU mode is enabled, gets a GPUContext from the pool of GPUContexts
 			// and sets it in the ExecutionContext of the parfor
-			if (DMLScript.USE_ACCELERATOR){
+			if (ConfigurationManager.isGPU()){
 				cpEc.setGPUContexts(Arrays.asList(ec.getGPUContext(index)));
 			}
 			
@@ -1726,7 +1726,7 @@ public class ParForProgramBlock extends ForProgramBlock
 		if( numTasks != expTasks || numIters !=expIters ) //consistency check
 			throw new DMLRuntimeException("PARFOR: Number of executed tasks does not match the number of created tasks: tasks "+numTasks+"/"+expTasks+", iters "+numIters+"/"+expIters+".");
 	
-		if( DMLScript.STATISTICS )
+		if( ConfigurationManager.isStatistics() )
 			Statistics.incrementParForMergeTime((long) time.stop());
 	}
 	
@@ -1853,7 +1853,7 @@ public class ParForProgramBlock extends ForProgramBlock
 		long ret = -1;
 		
 		//if forced remote exec and single node
-		if(    DMLScript.rtplatform == RUNTIME_PLATFORM.SINGLE_NODE 
+		if(    ConfigurationManager.getExecutionMode() == RUNTIME_PLATFORM.SINGLE_NODE 
 			&& _execMode == PExecMode.REMOTE_MR
 			&& _optMode == POptMode.NONE      )
 		{

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/ProgramBlock.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/ProgramBlock.java b/src/main/java/org/apache/sysml/runtime/controlprogram/ProgramBlock.java
index 72b5051..9e049f1 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/ProgramBlock.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/ProgramBlock.java
@@ -137,7 +137,7 @@ public class ProgramBlock implements ParseInfo
 			if( DMLScript.isActiveAM() ) //set program block specific remote memory
 				DMLAppMasterUtils.setupProgramBlockRemoteMaxMemory(this);
 
-			long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+			long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 			if(    ConfigurationManager.isDynamicRecompilation()
 				&& _sb != null
 				&& _sb.requiresRecompilation() )
@@ -145,7 +145,7 @@ public class ProgramBlock implements ParseInfo
 				tmp = Recompiler.recompileHopsDag(
 					_sb, _sb.getHops(), ec.getVariables(), null, false, true, _tid);
 			}
-			if( DMLScript.STATISTICS ){
+			if( ConfigurationManager.isStatistics() ){
 				long t1 = System.nanoTime();
 				Statistics.incrementHOPRecompileTime(t1-t0);
 				if( tmp!=_inst )
@@ -177,7 +177,7 @@ public class ProgramBlock implements ParseInfo
 
 		//dynamically recompile instructions if enabled and required
 		try {
-			long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+			long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 			if(    ConfigurationManager.isDynamicRecompilation()
 				&& requiresRecompile )
 			{
@@ -185,7 +185,7 @@ public class ProgramBlock implements ParseInfo
 					hops, ec.getVariables(), null, false, true, _tid);
 				tmp = JMLCUtils.cleanupRuntimeInstructions(tmp, PRED_VAR);
 			}
-			if( DMLScript.STATISTICS ){
+			if( ConfigurationManager.isStatistics() ){
 				long t1 = System.nanoTime();
 				Statistics.incrementHOPRecompileTime(t1-t0);
 				if( tmp!=inst )
@@ -242,7 +242,7 @@ public class ProgramBlock implements ParseInfo
 		try
 		{
 			// start time measurement for statistics
-			long t0 = (DMLScript.STATISTICS || LOG.isTraceEnabled()) ?
+			long t0 = (ConfigurationManager.isStatistics() || LOG.isTraceEnabled()) ?
 				System.nanoTime() : 0;
 
 			// pre-process instruction (debug state, inst patching, listeners)
@@ -255,11 +255,11 @@ public class ProgramBlock implements ParseInfo
 			tmp.postprocessInstruction( ec );
 
 			// maintain aggregate statistics
-			if( DMLScript.STATISTICS) {
+			if( ConfigurationManager.isStatistics()) {
 				Statistics.maintainCPHeavyHitters(
 					tmp.getExtendedOpcode(), System.nanoTime()-t0);
 			}
-			if (DMLScript.JMLC_MEM_STATISTICS && DMLScript.FINEGRAINED_STATISTICS)
+			if (DMLScript.JMLC_MEM_STATISTICS && ConfigurationManager.isFinegrainedStatistics())
 				ec.getVariables().getPinnedDataSize();
 
 			// optional trace information (instruction and runtime)

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java
index b2a78d4..03bc3b3 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java
@@ -208,7 +208,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		_uniqueID = isCachingActive() ? _seq.getNextID() : -1;
 		_cacheStatus = CacheStatus.EMPTY;
 		_numReadThreads = 0;
-		_gpuObjects = DMLScript.USE_ACCELERATOR ? new HashMap<>() : null;
+		_gpuObjects = ConfigurationManager.isGPU() ? new HashMap<>() : null;
 	}
 	
 	/**
@@ -383,7 +383,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 	 * @return cacheable data
 	 */
 	public T acquireRead() {
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		//core internal acquire (synchronized per object)
 		T ret = acquireReadIntern();
@@ -393,7 +393,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		if( !isBelowCachingThreshold() )
 			updateStatusPinned(true);
 		
-		if( DMLScript.STATISTICS ){
+		if( ConfigurationManager.isStatistics() ){
 			long t1 = System.nanoTime();
 			CacheStatistics.incrementAcquireRTime(t1-t0);
 		}
@@ -410,7 +410,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 			getCache();
 		
 		//call acquireHostRead if gpuHandle is set as well as is allocated
-		if( DMLScript.USE_ACCELERATOR && _gpuObjects != null ) {
+		if( ConfigurationManager.isGPU() && _gpuObjects != null ) {
 			boolean copiedFromGPU = false;
 			for (Map.Entry<GPUContext, GPUObject> kv : _gpuObjects.entrySet()) {
 				GPUObject gObj = kv.getValue();
@@ -428,7 +428,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		//(probe data for cache_nowrite / jvm_reuse)
 		if( _data==null && isEmpty(true) ) {
 			try {
-				if( DMLScript.STATISTICS )
+				if( ConfigurationManager.isStatistics() )
 					CacheStatistics.incrementHDFSHits();
 				
 				if( getRDDHandle()==null || getRDDHandle().allowsShortCircuitRead() ) {
@@ -459,7 +459,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 			}
 			_isAcquireFromEmpty = true;
 		}
-		else if( _data!=null && DMLScript.STATISTICS ) {
+		else if( _data!=null && ConfigurationManager.isStatistics() ) {
 			CacheStatistics.incrementMemHits();
 		}
 		
@@ -480,7 +480,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 	 * @return cacheable data
 	 */
 	public T acquireModify(T newData) {
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		//core internal acquire (synchronized per object)
 		T ret = acquireModifyIntern(newData);
@@ -490,7 +490,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		if( !isBelowCachingThreshold() )
 			updateStatusPinned(true);
 		
-		if( DMLScript.STATISTICS ){
+		if( ConfigurationManager.isStatistics() ){
 			long t1 = System.nanoTime();
 			CacheStatistics.incrementAcquireMTime(t1-t0);
 			if (DMLScript.JMLC_MEM_STATISTICS)
@@ -531,7 +531,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 	 * 
 	 */
 	public void release() {
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		//update thread-local status (before unpin but outside
 		//the critical section of accessing a shared object)
@@ -541,7 +541,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		//core internal release (synchronized per object)
 		releaseIntern();
 		
-		if( DMLScript.STATISTICS ){
+		if( ConfigurationManager.isStatistics() ){
 			long t1 = System.nanoTime();
 			CacheStatistics.incrementReleaseTime(t1-t0);
 		}
@@ -620,7 +620,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		if( _gpuObjects != null ) {
 			for (GPUObject gObj : _gpuObjects.values())
 				if (gObj != null)
-					gObj.clearData(null, DMLScript.EAGER_CUDA_FREE);
+					gObj.clearData(null, gObj.getGPUContext().EAGER_CUDA_FREE);
 		}
 		
 		// change object state EMPTY
@@ -677,7 +677,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 	public synchronized void exportData (String fName, String outputFormat, int replication, FileFormatProperties formatProperties, String opcode) {
 		if( LOG.isTraceEnabled() )
 			LOG.trace("Export data "+hashCode()+" "+fName);
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		//prevent concurrent modifications
 		if ( !isAvailableToRead() )
@@ -685,7 +685,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 
 		LOG.trace("Exporting " + this.getDebugName() + " to " + fName + " in format " + outputFormat);
 		
-		if( DMLScript.USE_ACCELERATOR && _gpuObjects != null ) {
+		if( ConfigurationManager.isGPU() && _gpuObjects != null ) {
 			boolean copiedFromGPU = false;
 			for (Map.Entry<GPUContext, GPUObject> kv : _gpuObjects.entrySet()) {
 				GPUObject gObj = kv.getValue();
@@ -791,7 +791,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 			LOG.trace(this.getDebugName() + ": Skip export to hdfs since data already exists.");
 		}
 		  
-		if( DMLScript.STATISTICS ){
+		if( ConfigurationManager.isStatistics() ){
 			long t1 = System.nanoTime();
 			CacheStatistics.incrementExportTime(t1-t0);
 		}
@@ -923,7 +923,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 			
 			// when outputFormat is binaryblock, make sure that matrixCharacteristics has correct blocking dimensions
 			// note: this is only required if singlenode (due to binarycell default) 
-			if ( oinfo == OutputInfo.BinaryBlockOutputInfo && DMLScript.rtplatform == RUNTIME_PLATFORM.SINGLE_NODE &&
+			if ( oinfo == OutputInfo.BinaryBlockOutputInfo && ConfigurationManager.getExecutionMode() == RUNTIME_PLATFORM.SINGLE_NODE &&
 				(mc.getRowsPerBlock() != ConfigurationManager.getBlocksize() || mc.getColsPerBlock() != ConfigurationManager.getBlocksize()) ) 
 			{
 				mc = new MatrixCharacteristics(mc.getRows(), mc.getCols(), ConfigurationManager.getBlocksize(), ConfigurationManager.getBlocksize(), mc.getNonZeros());

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/caching/LazyWriteBuffer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/LazyWriteBuffer.java b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/LazyWriteBuffer.java
index ad97c4b..391f21a 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/LazyWriteBuffer.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/LazyWriteBuffer.java
@@ -26,7 +26,7 @@ import java.util.Map.Entry;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 
-import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer;
 import org.apache.sysml.runtime.util.LocalFileUtils;
 
@@ -102,7 +102,7 @@ public class LazyWriteBuffer
 			//serialize matrix (outside synchronized critical path)
 			bbuff.serializeBlock(cb);
 			
-			if( DMLScript.STATISTICS ) {
+			if( ConfigurationManager.isStatistics() ) {
 				CacheStatistics.incrementFSBuffWrites();
 				CacheStatistics.incrementFSWrites(numEvicted);
 			}
@@ -111,7 +111,7 @@ public class LazyWriteBuffer
 		{
 			//write directly to local FS (bypass buffer if too large)
 			LocalFileUtils.writeCacheBlockToLocal(fname, cb);
-			if( DMLScript.STATISTICS ) {
+			if( ConfigurationManager.isStatistics() ) {
 				CacheStatistics.incrementFSWrites();
 			}
 			numEvicted++;
@@ -165,13 +165,13 @@ public class LazyWriteBuffer
 		if( ldata != null )
 		{
 			cb = ldata.deserializeBlock();
-			if( DMLScript.STATISTICS )
+			if( ConfigurationManager.isStatistics() )
 				CacheStatistics.incrementFSBuffHits();
 		}
 		else
 		{
 			cb = LocalFileUtils.readCacheBlockFromLocal(fname, matrix);
-			if( DMLScript.STATISTICS )
+			if( ConfigurationManager.isStatistics() )
 				CacheStatistics.incrementFSHits();
 		}
 		

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java
index d4a25a4..4ab0f34 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java
@@ -23,7 +23,6 @@ import java.io.IOException;
 import java.lang.ref.SoftReference;
 
 import org.apache.commons.lang.mutable.MutableBoolean;
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.OptimizerUtils;
@@ -243,7 +242,7 @@ public class MatrixObject extends CacheableData<MatrixBlock>
 	public synchronized MatrixBlock readMatrixPartition( IndexRange pred ) {
 		if( LOG.isTraceEnabled() )
 			LOG.trace("Acquire partition "+hashCode()+" "+pred);
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		if ( !_partitioned )
 			throw new DMLRuntimeException("MatrixObject not available to indexed read.");
@@ -345,7 +344,7 @@ public class MatrixObject extends CacheableData<MatrixBlock>
 			throw new DMLRuntimeException(ex);
 		}
 		
-		if( DMLScript.STATISTICS ){
+		if( ConfigurationManager.isStatistics() ){
 			long t1 = System.nanoTime();
 			CacheStatistics.incrementAcquireRTime(t1-t0);
 		}
@@ -536,7 +535,7 @@ public class MatrixObject extends CacheableData<MatrixBlock>
 			
 			// when outputFormat is binaryblock, make sure that matrixCharacteristics has correct blocking dimensions
 			// note: this is only required if singlenode (due to binarycell default) 
-			if ( oinfo == OutputInfo.BinaryBlockOutputInfo && DMLScript.rtplatform == RUNTIME_PLATFORM.SINGLE_NODE &&
+			if ( oinfo == OutputInfo.BinaryBlockOutputInfo && ConfigurationManager.getExecutionMode() == RUNTIME_PLATFORM.SINGLE_NODE &&
 				(mc.getRowsPerBlock() != ConfigurationManager.getBlocksize() || mc.getColsPerBlock() != ConfigurationManager.getBlocksize()) ) 
 			{
 				DataConverter.writeMatrixToHDFS(_data, fname, oinfo, new MatrixCharacteristics(mc.getRows(), mc.getCols(),
@@ -553,7 +552,7 @@ public class MatrixObject extends CacheableData<MatrixBlock>
 			LOG.trace ("Writing matrix to HDFS ("+fname+") - NOTHING TO WRITE (_data == null).");
 		}
 		
-		if( DMLScript.STATISTICS )
+		if( ConfigurationManager.isStatistics() )
 			CacheStatistics.incrementHDFSWrites();
 	}
 	

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java b/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java
index 5740174..443adf4 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java
@@ -27,6 +27,7 @@ import java.util.stream.Collectors;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.debug.DMLFrame;
 import org.apache.sysml.debug.DMLProgramCounter;
 import org.apache.sysml.debug.DebugState;
@@ -256,9 +257,9 @@ public class ExecutionContext {
 	 * @return matrix block
 	 */
 	public MatrixBlock getMatrixInput(String varName, String opcode) {
-		long t1 = opcode != null && DMLScript.STATISTICS && DMLScript.FINEGRAINED_STATISTICS ? System.nanoTime() : 0;
+		long t1 = opcode != null && ConfigurationManager.isStatistics() && ConfigurationManager.isFinegrainedStatistics() ? System.nanoTime() : 0;
 		MatrixBlock mb = getMatrixInput(varName);
-		if(opcode != null && DMLScript.STATISTICS && DMLScript.FINEGRAINED_STATISTICS) {
+		if(opcode != null && ConfigurationManager.isStatistics() && ConfigurationManager.isFinegrainedStatistics()) {
 			long t2 = System.nanoTime();
 			if(mb.isInSparseFormat())
 				GPUStatistics.maintainCPMiscTimes(opcode, CPInstruction.MISC_TIMER_GET_SPARSE_MB, t2-t1);
@@ -404,9 +405,9 @@ public class ExecutionContext {
 	}
 	
 	public void releaseMatrixInput(String varName, String opcode) {
-		long t1 = opcode != null && DMLScript.STATISTICS && DMLScript.FINEGRAINED_STATISTICS ? System.nanoTime() : 0;
+		long t1 = opcode != null && ConfigurationManager.isStatistics() && ConfigurationManager.isFinegrainedStatistics() ? System.nanoTime() : 0;
 		releaseMatrixInput(varName);
-		if(opcode != null && DMLScript.STATISTICS && DMLScript.FINEGRAINED_STATISTICS) {
+		if(opcode != null && ConfigurationManager.isStatistics() && ConfigurationManager.isFinegrainedStatistics()) {
 			long t2 = System.nanoTime();
 			GPUStatistics.maintainCPMiscTimes(opcode, CPInstruction.MISC_TIMER_RELEASE_INPUT_MB, t2-t1);
 		}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContextFactory.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContextFactory.java b/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContextFactory.java
index 968b285..bf52581 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContextFactory.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContextFactory.java
@@ -19,8 +19,8 @@
 
 package org.apache.sysml.runtime.controlprogram.context;
 
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.OptimizerUtils;
 import org.apache.sysml.runtime.controlprogram.LocalVariableMap;
 import org.apache.sysml.runtime.controlprogram.Program;
@@ -45,7 +45,7 @@ public class ExecutionContextFactory
 	{
 		ExecutionContext ec = null;
 		
-		switch( DMLScript.rtplatform )
+		switch( ConfigurationManager.getExecutionMode() )
 		{
 			case SINGLE_NODE:
 				//NOTE: even in case of forced singlenode operations, users might still 

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java b/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java
index f8d2345..879133b 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java
@@ -127,7 +127,7 @@ public class SparkExecutionContext extends ExecutionContext
 		super( allocateVars, prog );
 		
 		//spark context creation via internal initializer
-		if( !LAZY_SPARKCTX_CREATION || DMLScript.rtplatform==RUNTIME_PLATFORM.SPARK ) {
+		if( !LAZY_SPARKCTX_CREATION || ConfigurationManager.getExecutionMode()==RUNTIME_PLATFORM.SPARK ) {
 			initSparkContext();
 		}
 	}
@@ -193,7 +193,7 @@ public class SparkExecutionContext extends ExecutionContext
 		if( _spctx != null )
 			return;
 
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 
 		//create a default spark context (master, appname, etc refer to system properties
 		//as given in the spark configuration or during spark-submit)
@@ -240,7 +240,7 @@ public class SparkExecutionContext extends ExecutionContext
 			MRJobConfiguration.addBinaryBlockSerializationFramework( _spctx.hadoopConfiguration() );
 
 		//statistics maintenance
-		if( DMLScript.STATISTICS ){
+		if( ConfigurationManager.isStatistics() ){
 			Statistics.setSparkCtxCreateTime(System.nanoTime()-t0);
 		}
 	}
@@ -507,7 +507,7 @@ public class SparkExecutionContext extends ExecutionContext
 	}
 
 	public Broadcast<CacheBlock> broadcastVariable(CacheableData<CacheBlock> cd) {
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		Broadcast<CacheBlock> brBlock = null;
 
 		// reuse existing non partitioned broadcast handle
@@ -536,7 +536,7 @@ public class SparkExecutionContext extends ExecutionContext
 					OptimizerUtils.estimateSize(cd.getMatrixCharacteristics()));
 				CacheableData.addBroadcastSize(cd.getBroadcastHandle().getNonPartitionedBroadcastSize());
 
-				if (DMLScript.STATISTICS) {
+				if (ConfigurationManager.isStatistics()) {
 					Statistics.accSparkBroadCastTime(System.nanoTime() - t0);
 					Statistics.incSparkBroadcastCount(1);
 				}
@@ -555,7 +555,7 @@ public class SparkExecutionContext extends ExecutionContext
 		//the broadcasts are created (other than in local mode) in order to avoid 
 		//unnecessary memory requirements during the lifetime of this broadcast handle.
 		
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 
 		PartitionedBroadcast<MatrixBlock> bret = null;
 
@@ -603,7 +603,7 @@ public class SparkExecutionContext extends ExecutionContext
 			CacheableData.addBroadcastSize(mo.getBroadcastHandle().getPartitionedBroadcastSize());
 		}
 
-		if (DMLScript.STATISTICS) {
+		if (ConfigurationManager.isStatistics()) {
 			Statistics.accSparkBroadCastTime(System.nanoTime() - t0);
 			Statistics.incSparkBroadcastCount(1);
 		}
@@ -618,7 +618,7 @@ public class SparkExecutionContext extends ExecutionContext
 
 	@SuppressWarnings("unchecked")
 	public PartitionedBroadcast<FrameBlock> getBroadcastForFrameVariable(String varname) {
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 
 		FrameObject fo = getFrameObject(varname);
 
@@ -668,7 +668,7 @@ public class SparkExecutionContext extends ExecutionContext
 			CacheableData.addBroadcastSize(fo.getBroadcastHandle().getPartitionedBroadcastSize());
 		}
 
-		if (DMLScript.STATISTICS) {
+		if (ConfigurationManager.isStatistics()) {
 			Statistics.accSparkBroadCastTime(System.nanoTime() - t0);
 			Statistics.incSparkBroadcastCount(1);
 		}
@@ -706,7 +706,7 @@ public class SparkExecutionContext extends ExecutionContext
 	
 	public static JavaPairRDD<MatrixIndexes,MatrixBlock> toMatrixJavaPairRDD(JavaSparkContext sc, MatrixBlock src,
 			int brlen, int bclen, int numParts, boolean inclEmpty) {
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		List<Tuple2<MatrixIndexes,MatrixBlock>> list = null;
 
 		if( src.getNumRows() <= brlen && src.getNumColumns() <= bclen ) {
@@ -724,7 +724,7 @@ public class SparkExecutionContext extends ExecutionContext
 		JavaPairRDD<MatrixIndexes,MatrixBlock> result = (numParts > 1) ?
 			sc.parallelizePairs(list, numParts) : sc.parallelizePairs(list);
 		
-		if (DMLScript.STATISTICS) {
+		if (ConfigurationManager.isStatistics()) {
 			Statistics.accSparkParallelizeTime(System.nanoTime() - t0);
 			Statistics.incSparkParallelizeCount(1);
 		}
@@ -755,7 +755,7 @@ public class SparkExecutionContext extends ExecutionContext
 	}
 
 	public static JavaPairRDD<Long,FrameBlock> toFrameJavaPairRDD(JavaSparkContext sc, FrameBlock src) {
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		LinkedList<Tuple2<Long,FrameBlock>> list = new LinkedList<>();
 
 		//create and write subblocks of matrix
@@ -777,7 +777,7 @@ public class SparkExecutionContext extends ExecutionContext
 		}
 
 		JavaPairRDD<Long,FrameBlock> result = sc.parallelizePairs(list);
-		if (DMLScript.STATISTICS) {
+		if (ConfigurationManager.isStatistics()) {
 			Statistics.accSparkParallelizeTime(System.nanoTime() - t0);
 			Statistics.incSparkParallelizeCount(1);
 		}
@@ -819,7 +819,7 @@ public class SparkExecutionContext extends ExecutionContext
 	 * @return matrix block
 	 */
 	public static MatrixBlock toMatrixBlock(JavaPairRDD<MatrixIndexes,MatrixBlock> rdd, int rlen, int clen, int brlen, int bclen, long nnz) {
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 
 		MatrixBlock out = null;
 
@@ -888,7 +888,7 @@ public class SparkExecutionContext extends ExecutionContext
 			out.examSparsity();
 		}
 
-		if (DMLScript.STATISTICS) {
+		if (ConfigurationManager.isStatistics()) {
 			Statistics.accSparkCollectTime(System.nanoTime() - t0);
 			Statistics.incSparkCollectCount(1);
 		}
@@ -915,7 +915,7 @@ public class SparkExecutionContext extends ExecutionContext
 	 */
 	public static MatrixBlock toMatrixBlock(JavaPairRDD<MatrixIndexes, MatrixCell> rdd, int rlen, int clen, long nnz)
 	{
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 
 		MatrixBlock out = null;
 
@@ -946,7 +946,7 @@ public class SparkExecutionContext extends ExecutionContext
 		out.recomputeNonZeros();
 		out.examSparsity();
 
-		if (DMLScript.STATISTICS) {
+		if (ConfigurationManager.isStatistics()) {
 			Statistics.accSparkCollectTime(System.nanoTime() - t0);
 			Statistics.incSparkCollectCount(1);
 		}
@@ -957,7 +957,7 @@ public class SparkExecutionContext extends ExecutionContext
 	public static PartitionedBlock<MatrixBlock> toPartitionedMatrixBlock(JavaPairRDD<MatrixIndexes,MatrixBlock> rdd, int rlen, int clen, int brlen, int bclen, long nnz)
 	{
 
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 
 		PartitionedBlock<MatrixBlock> out = new PartitionedBlock<>(rlen, clen, brlen, bclen);
 		List<Tuple2<MatrixIndexes,MatrixBlock>> list = rdd.collect();
@@ -971,7 +971,7 @@ public class SparkExecutionContext extends ExecutionContext
 			out.setBlock((int)ix.getRowIndex(), (int)ix.getColumnIndex(), block);
 		}
 
-		if (DMLScript.STATISTICS) {
+		if (ConfigurationManager.isStatistics()) {
 			Statistics.accSparkCollectTime(System.nanoTime() - t0);
 			Statistics.incSparkCollectCount(1);
 		}
@@ -986,7 +986,7 @@ public class SparkExecutionContext extends ExecutionContext
 	}
 
 	public static FrameBlock toFrameBlock(JavaPairRDD<Long,FrameBlock> rdd, ValueType[] schema, int rlen, int clen) {
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 
 		if(schema == null)
 			schema = UtilFunctions.nCopies(clen, ValueType.STRING);
@@ -1012,7 +1012,7 @@ public class SparkExecutionContext extends ExecutionContext
 			}
 		}
 
-		if (DMLScript.STATISTICS) {
+		if (ConfigurationManager.isStatistics()) {
 			Statistics.accSparkCollectTime(System.nanoTime() - t0);
 			Statistics.incSparkCollectCount(1);
 		}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/LocalPSWorker.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/LocalPSWorker.java b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/LocalPSWorker.java
index 5ab4e07..df9c925 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/LocalPSWorker.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/LocalPSWorker.java
@@ -23,7 +23,7 @@ import java.util.concurrent.Callable;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.parser.Statement;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
@@ -107,7 +107,7 @@ public class LocalPSWorker extends PSWorker implements Callable<Void> {
 	}
 
 	private ListObject updateModel(ListObject globalParams, ListObject gradients, int i, int j, int batchIter) {
-		Timing tUpd = DMLScript.STATISTICS ? new Timing(true) : null;
+		Timing tUpd = ConfigurationManager.isStatistics() ? new Timing(true) : null;
 
 		globalParams = _ps.updateLocalModel(_ec, gradients, globalParams);
 
@@ -167,7 +167,7 @@ public class LocalPSWorker extends PSWorker implements Callable<Void> {
 		long end = Math.min((j + 1) * _batchSize, dataSize);
 
 		// Get batch features and labels
-		Timing tSlic = DMLScript.STATISTICS ? new Timing(true) : null;
+		Timing tSlic = ConfigurationManager.isStatistics() ? new Timing(true) : null;
 		MatrixObject bFeatures = ParamservUtils.sliceMatrix(_features, begin, end);
 		MatrixObject bLabels = ParamservUtils.sliceMatrix(_labels, begin, end);
 		accBatchIndexingTime(tSlic);
@@ -183,7 +183,7 @@ public class LocalPSWorker extends PSWorker implements Callable<Void> {
 		}
 
 		// Invoke the update function
-		Timing tGrad = DMLScript.STATISTICS ? new Timing(true) : null;
+		Timing tGrad = ConfigurationManager.isStatistics() ? new Timing(true) : null;
 		_inst.processInstruction(_ec);
 		accGradientComputeTime(tGrad);
 
@@ -197,25 +197,25 @@ public class LocalPSWorker extends PSWorker implements Callable<Void> {
 	
 	@Override
 	protected void incWorkerNumber() {
-		if (DMLScript.STATISTICS)
+		if (ConfigurationManager.isStatistics())
 			Statistics.incWorkerNumber();
 	}
 
 	@Override
 	protected void accLocalModelUpdateTime(Timing time) {
-		if (DMLScript.STATISTICS)
+		if (ConfigurationManager.isStatistics())
 			Statistics.accPSLocalModelUpdateTime((long) time.stop());
 	}
 
 	@Override
 	protected void accBatchIndexingTime(Timing time) {
-		if (DMLScript.STATISTICS)
+		if (ConfigurationManager.isStatistics())
 			Statistics.accPSBatchIndexingTime((long) time.stop());
 	}
 
 	@Override
 	protected void accGradientComputeTime(Timing time) {
-		if (DMLScript.STATISTICS)
+		if (ConfigurationManager.isStatistics())
 			Statistics.accPSGradientComputeTime((long) time.stop());
 	}
 }

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamServer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamServer.java b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamServer.java
index 2b2249e..7748ed7 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamServer.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamServer.java
@@ -33,7 +33,7 @@ import java.util.stream.IntStream;
 import org.apache.commons.lang3.ArrayUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.parser.DataIdentifier;
 import org.apache.sysml.parser.Expression;
 import org.apache.sysml.parser.Statement;
@@ -169,9 +169,9 @@ public abstract class ParamServer
 	}
 
 	private void updateGlobalModel(ListObject gradients) {
-		Timing tAgg = DMLScript.STATISTICS ? new Timing(true) : null;
+		Timing tAgg = ConfigurationManager.isStatistics() ? new Timing(true) : null;
 		_model = updateLocalModel(_ec, gradients, _model);
-		if (DMLScript.STATISTICS)
+		if (ConfigurationManager.isStatistics())
 			Statistics.accPSAggregationTime((long) tAgg.stop());
 	}
 
@@ -227,12 +227,12 @@ public abstract class ParamServer
 	}
 
 	private void broadcastModel(int workerID) throws InterruptedException {
-		Timing tBroad = DMLScript.STATISTICS ? new Timing(true) : null;
+		Timing tBroad = ConfigurationManager.isStatistics() ? new Timing(true) : null;
 
 		//broadcast copy of model to specific worker, cleaned up by worker
 		_modelMap.get(workerID).put(ParamservUtils.copyList(_model, false));
 
-		if (DMLScript.STATISTICS)
+		if (ConfigurationManager.isStatistics())
 			Statistics.accPSModelBroadcastTime((long) tBroad.stop());
 	}
 }

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamservUtils.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamservUtils.java b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamservUtils.java
index f8b5dda..58bf311 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamservUtils.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamservUtils.java
@@ -32,7 +32,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.spark.Partitioner;
 import org.apache.spark.api.java.JavaPairRDD;
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.Hop;
 import org.apache.sysml.hops.MultiThreadedHop;
@@ -383,7 +382,7 @@ public class ParamservUtils {
 
 	@SuppressWarnings("unchecked")
 	public static JavaPairRDD<Integer, Tuple2<MatrixBlock, MatrixBlock>> doPartitionOnSpark(SparkExecutionContext sec, MatrixObject features, MatrixObject labels, Statement.PSScheme scheme, int workerNum) {
-		Timing tSetup = DMLScript.STATISTICS ? new Timing(true) : null;
+		Timing tSetup = ConfigurationManager.isStatistics() ? new Timing(true) : null;
 		// Get input RDD
 		JavaPairRDD<MatrixIndexes, MatrixBlock> featuresRDD = (JavaPairRDD<MatrixIndexes, MatrixBlock>)
 			sec.getRDDHandleForMatrixObject(features, InputInfo.BinaryBlockInputInfo);
@@ -416,7 +415,7 @@ public class ParamservUtils {
 			})
 			.mapToPair(new DataPartitionerSparkAggregator(features.getNumColumns(), labels.getNumColumns()));
 
-		if (DMLScript.STATISTICS)
+		if (ConfigurationManager.isStatistics())
 			Statistics.accPSSetupTime((long) tSetup.stop());
 		return result;
 	}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/SparkPSProxy.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/SparkPSProxy.java b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/SparkPSProxy.java
index fd88b83..dad1536 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/SparkPSProxy.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/SparkPSProxy.java
@@ -26,7 +26,7 @@ import java.io.IOException;
 
 import org.apache.spark.network.client.TransportClient;
 import org.apache.spark.util.LongAccumulator;
-import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.paramserv.rpc.PSRpcCall;
 import org.apache.sysml.runtime.controlprogram.paramserv.rpc.PSRpcResponse;
@@ -47,13 +47,13 @@ public class SparkPSProxy extends ParamServer {
 	}
 
 	private void accRpcRequestTime(Timing tRpc) {
-		if (DMLScript.STATISTICS)
+		if (ConfigurationManager.isStatistics())
 			_aRPC.add((long) tRpc.stop());
 	}
 
 	@Override
 	public void push(int workerID, ListObject value) {
-		Timing tRpc = DMLScript.STATISTICS ? new Timing(true) : null;
+		Timing tRpc = ConfigurationManager.isStatistics() ? new Timing(true) : null;
 		PSRpcResponse response;
 		try {
 			response = new PSRpcResponse(_client.sendRpcSync(new PSRpcCall(PUSH, workerID, value).serialize(), _rpcTimeout));
@@ -68,7 +68,7 @@ public class SparkPSProxy extends ParamServer {
 
 	@Override
 	public ListObject pull(int workerID) {
-		Timing tRpc = DMLScript.STATISTICS ? new Timing(true) : null;
+		Timing tRpc = ConfigurationManager.isStatistics() ? new Timing(true) : null;
 		PSRpcResponse response;
 		try {
 			response = new PSRpcResponse(_client.sendRpcSync(new PSRpcCall(PULL, workerID, null).serialize(), _rpcTimeout));

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMR.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMR.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMR.java
index c64c5e9..8195d91 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMR.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMR.java
@@ -25,7 +25,6 @@ import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.lib.NullOutputFormat;
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.conf.DMLConfig;
 import org.apache.sysml.runtime.DMLRuntimeException;
@@ -71,7 +70,7 @@ public class DataPartitionerRemoteMR extends DataPartitioner
 	protected void partitionMatrix(MatrixObject in, String fnameNew, InputInfo ii, OutputInfo oi, long rlen, long clen, int brlen, int bclen)
 	{
 		String jobname = "ParFor-DPMR";
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		JobConf job;
 		job = new JobConf( DataPartitionerRemoteMR.class );
@@ -199,7 +198,7 @@ public class DataPartitionerRemoteMR extends DataPartitioner
 			throw new DMLRuntimeException(ex);
 		}
 		
-		if( DMLScript.STATISTICS && _pfid >= 0 ){ 
+		if( ConfigurationManager.isStatistics() && _pfid >= 0 ){ 
 			long t1 = System.nanoTime(); //only for parfor 
 			Statistics.maintainCPHeavyHitters("MR-Job_"+jobname, t1-t0);
 		}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSpark.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSpark.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSpark.java
index 9f00dc6..daab642 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSpark.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSpark.java
@@ -21,7 +21,7 @@ package org.apache.sysml.runtime.controlprogram.parfor;
 
 import org.apache.spark.api.java.JavaPairRDD;
 
-import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.ParForProgramBlock.PartitionFormat;
 import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
@@ -60,7 +60,7 @@ public class DataPartitionerRemoteSpark extends DataPartitioner
 	protected void partitionMatrix(MatrixObject in, String fnameNew, InputInfo ii, OutputInfo oi, long rlen, long clen, int brlen, int bclen)
 	{
 		String jobname = "ParFor-DPSP";
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		SparkExecutionContext sec = (SparkExecutionContext)_ec;
 
@@ -90,7 +90,7 @@ public class DataPartitionerRemoteSpark extends DataPartitioner
 		//maintain statistics
 	    Statistics.incrementNoOfCompiledSPInst();
 	    Statistics.incrementNoOfExecutedSPInst();
-	    if( DMLScript.STATISTICS ){
+	    if( ConfigurationManager.isStatistics() ){
 			Statistics.maintainCPHeavyHitters(jobname, System.nanoTime()-t0);
 		}
 	}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalParWorker.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalParWorker.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalParWorker.java
index f6cb602..30c0eeb 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalParWorker.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalParWorker.java
@@ -21,7 +21,6 @@ package org.apache.sysml.runtime.controlprogram.parfor;
 
 import java.util.Collection;
 
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.CompilerConfig;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.OptimizerUtils;
@@ -79,7 +78,7 @@ public class LocalParWorker extends ParWorker implements Runnable
 		}
 
 		// Initialize this GPUContext to this thread
-		if (DMLScript.USE_ACCELERATOR) {
+		if (ConfigurationManager.isGPU()) {
 			try {
 				_ec.getGPUContext(0).initializeThread();
 			} catch(DMLRuntimeException e) {

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForMR.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForMR.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForMR.java
index 1f25032..5623827 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForMR.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForMR.java
@@ -36,7 +36,6 @@ import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.conf.DMLConfig;
 import org.apache.sysml.runtime.DMLRuntimeException;
@@ -76,7 +75,7 @@ public class RemoteDPParForMR
 	{
 		RemoteParForJobReturn ret = null;
 		String jobname = "ParFor-DPEMR";
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		JobConf job;
 		job = new JobConf( RemoteDPParForMR.class );
@@ -175,7 +174,7 @@ public class RemoteDPParForMR
 			Group pgroup = runjob.getCounters().getGroup(ParForProgramBlock.PARFOR_COUNTER_GROUP_NAME);
 			int numTasks = (int)pgroup.getCounter( Stat.PARFOR_NUMTASKS.toString() );
 			int numIters = (int)pgroup.getCounter( Stat.PARFOR_NUMITERS.toString() );
-			if( DMLScript.STATISTICS && !InfrastructureAnalyzer.isLocalMode() ) {
+			if( ConfigurationManager.isStatistics() && !InfrastructureAnalyzer.isLocalMode() ) {
 				Statistics.incrementJITCompileTime( pgroup.getCounter( Stat.PARFOR_JITCOMPILE.toString() ) );
 				Statistics.incrementJVMgcCount( pgroup.getCounter( Stat.PARFOR_JVMGC_COUNT.toString() ) );
 				Statistics.incrementJVMgcTime( pgroup.getCounter( Stat.PARFOR_JVMGC_TIME.toString() ) );
@@ -217,7 +216,7 @@ public class RemoteDPParForMR
 			}
 		}
 		
-		if( DMLScript.STATISTICS ){
+		if( ConfigurationManager.isStatistics() ){
 			long t1 = System.nanoTime();
 			Statistics.maintainCPHeavyHitters("MR-Job_"+jobname, t1-t0);
 		}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSpark.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSpark.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSpark.java
index 82cb2fa..866e456 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSpark.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSpark.java
@@ -38,7 +38,7 @@ import org.apache.spark.util.LongAccumulator;
 
 import scala.Tuple2;
 
-import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.runtime.controlprogram.LocalVariableMap;
 import org.apache.sysml.runtime.controlprogram.ParForProgramBlock.PDataPartitionFormat;
 import org.apache.sysml.runtime.controlprogram.ParForProgramBlock.PartitionFormat;
@@ -74,7 +74,7 @@ public class RemoteDPParForSpark
 			boolean tSparseCol, boolean enableCPCaching, int numReducers ) 
 	{
 		String jobname = "ParFor-DPESP";
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		SparkExecutionContext sec = (SparkExecutionContext)ec;
 		JavaSparkContext sc = sec.getSparkContext();
@@ -112,7 +112,7 @@ public class RemoteDPParForSpark
 		//maintain statistics
 	    Statistics.incrementNoOfCompiledSPInst();
 	    Statistics.incrementNoOfExecutedSPInst();
-	    if( DMLScript.STATISTICS ){
+	    if( ConfigurationManager.isStatistics() ){
 			Statistics.maintainCPHeavyHitters(jobname, System.nanoTime()-t0);
 		}
 		

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParWorkerReducer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParWorkerReducer.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParWorkerReducer.java
index 3f235cc..11d1ed9 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParWorkerReducer.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParWorkerReducer.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.ParForProgramBlock.PDataPartitionFormat;
@@ -188,7 +187,7 @@ public class RemoteDPParWorkerReducer extends ParWorker
 		StatisticMonitor.disableStatMonitoring();
 		
 		//always reset stats because counters per map task (for case of JVM reuse)
-		if( DMLScript.STATISTICS && !InfrastructureAnalyzer.isLocalMode(job) )
+		if( ConfigurationManager.isStatistics() && !InfrastructureAnalyzer.isLocalMode(job) )
 			Statistics.reset();
 	}
 
@@ -206,8 +205,8 @@ public class RemoteDPParWorkerReducer extends ParWorker
 			
 			//print heaver hitter per task
 			JobConf job = ConfigurationManager.getCachedJobConf();
-			if( DMLScript.STATISTICS && !InfrastructureAnalyzer.isLocalMode(job) )
-				LOG.info("\nSystemML Statistics:\nHeavy hitter instructions (name, time, count):\n" + Statistics.getHeavyHitters(DMLScript.STATISTICS_COUNT));
+			if( ConfigurationManager.isStatistics() && !InfrastructureAnalyzer.isLocalMode(job) )
+				LOG.info("\nSystemML Statistics:\nHeavy hitter instructions (name, time, count):\n" + Statistics.getHeavyHitters(ConfigurationManager.getDMLOptions().getStatisticsMaxHeavyHitters()));
 		}
 		catch(Exception ex)
 		{

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForMR.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForMR.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForMR.java
index 81a5e65..63e4b40 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForMR.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForMR.java
@@ -37,7 +37,6 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.lib.NLineInputFormat;
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.conf.DMLConfig;
 import org.apache.sysml.runtime.DMLRuntimeException;
@@ -73,7 +72,7 @@ public class RemoteParForMR
 	{
 		RemoteParForJobReturn ret = null;
 		String jobname = "ParFor-EMR";
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		JobConf job;
 		job = new JobConf( RemoteParForMR.class );
@@ -185,7 +184,7 @@ public class RemoteParForMR
 			Group pgroup = runjob.getCounters().getGroup(ParForProgramBlock.PARFOR_COUNTER_GROUP_NAME);
 			int numTasks = (int)pgroup.getCounter( Stat.PARFOR_NUMTASKS.toString() );
 			int numIters = (int)pgroup.getCounter( Stat.PARFOR_NUMITERS.toString() );
-			if( DMLScript.STATISTICS && !InfrastructureAnalyzer.isLocalMode() ) {
+			if( ConfigurationManager.isStatistics() && !InfrastructureAnalyzer.isLocalMode() ) {
 				Statistics.incrementJITCompileTime( pgroup.getCounter( Stat.PARFOR_JITCOMPILE.toString() ) );
 				Statistics.incrementJVMgcCount( pgroup.getCounter( Stat.PARFOR_JVMGC_COUNT.toString() ) );
 				Statistics.incrementJVMgcTime( pgroup.getCounter( Stat.PARFOR_JVMGC_TIME.toString() ) );
@@ -228,7 +227,7 @@ public class RemoteParForMR
 			}
 		}
 		
-		if( DMLScript.STATISTICS ){
+		if( ConfigurationManager.isStatistics() ){
 			long t1 = System.nanoTime();
 			Statistics.maintainCPHeavyHitters("MR-Job_"+jobname, t1-t0);
 		}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForSpark.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForSpark.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForSpark.java
index 7319cd6..e913c93 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForSpark.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForSpark.java
@@ -42,13 +42,13 @@ import org.apache.sysml.runtime.instructions.cp.Data;
 import org.apache.sysml.runtime.instructions.cp.ScalarObject;
 import scala.Tuple2;
 
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.runtime.controlprogram.LocalVariableMap;
 import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
 import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext;
 import org.apache.sysml.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer;
 import org.apache.sysml.runtime.controlprogram.parfor.util.IDSequence;
 import org.apache.sysml.utils.Statistics;
+import org.apache.sysml.conf.ConfigurationManager;
 
 /**
  * This class serves two purposes: (1) isolating Spark imports to enable running in 
@@ -73,7 +73,7 @@ public class RemoteParForSpark
 		ExecutionContext ec, ArrayList<ResultVar> resultVars, boolean cpCaching, int numMappers, boolean topLevelPF)
 	{
 		String jobname = "ParFor-ESP";
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		SparkExecutionContext sec = (SparkExecutionContext)ec;
 		JavaSparkContext sc = sec.getSparkContext();
@@ -111,7 +111,7 @@ public class RemoteParForSpark
 		//maintain statistics
 		Statistics.incrementNoOfCompiledSPInst();
 		Statistics.incrementNoOfExecutedSPInst();
-		if( DMLScript.STATISTICS )
+		if( ConfigurationManager.isStatistics() )
 			Statistics.maintainCPHeavyHitters(jobname, System.nanoTime()-t0);
 		
 		return ret;

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForUtils.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForUtils.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForUtils.java
index 2cf35c7..fa11d06 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForUtils.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParForUtils.java
@@ -34,7 +34,6 @@ import org.apache.hadoop.mapred.Reporter;
 
 import scala.Tuple2;
 
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.parser.Expression.DataType;
 import org.apache.sysml.parser.ParForStatementBlock.ResultVar;
@@ -68,7 +67,7 @@ public class RemoteParForUtils
 			reporter.incrCounter(ParForProgramBlock.PARFOR_COUNTER_GROUP_NAME, Stat.PARFOR_NUMITERS.toString(), deltaIterations);
 		
 		JobConf job = ConfigurationManager.getCachedJobConf();
-		if( DMLScript.STATISTICS  && !InfrastructureAnalyzer.isLocalMode(job) ) 
+		if( ConfigurationManager.isStatistics()  && !InfrastructureAnalyzer.isLocalMode(job) ) 
 		{
 			//report cache statistics
 			reporter.incrCounter( ParForProgramBlock.PARFOR_COUNTER_GROUP_NAME, Stat.PARFOR_JITCOMPILE.toString(), Statistics.getJITCompileTime());

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParWorkerMapper.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParWorkerMapper.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParWorkerMapper.java
index 0bdb92e..7db5bcd 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParWorkerMapper.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteParWorkerMapper.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.runtime.controlprogram.LocalVariableMap;
 import org.apache.sysml.runtime.controlprogram.ParForProgramBlock;
@@ -108,8 +107,8 @@ public class RemoteParWorkerMapper extends ParWorker  //MapReduceBase not requir
 		
 		//print heaver hitter per task
 		JobConf job = ConfigurationManager.getCachedJobConf();
-		if( DMLScript.STATISTICS && !InfrastructureAnalyzer.isLocalMode(job) )
-			LOG.info("\nSystemML Statistics:\nHeavy hitter instructions (name, time, count):\n" + Statistics.getHeavyHitters(DMLScript.STATISTICS_COUNT));
+		if( ConfigurationManager.isStatistics() && !InfrastructureAnalyzer.isLocalMode(job) )
+			LOG.info("\nSystemML Statistics:\nHeavy hitter instructions (name, time, count):\n" + Statistics.getHeavyHitters(ConfigurationManager.getDMLOptions().getStatisticsMaxHeavyHitters()));
 	}
 
 	@Override
@@ -210,7 +209,7 @@ public class RemoteParWorkerMapper extends ParWorker  //MapReduceBase not requir
 		}
 		
 		//always reset stats because counters per map task (for case of JVM reuse)
-		if( DMLScript.STATISTICS && !InfrastructureAnalyzer.isLocalMode(job) )
+		if( ConfigurationManager.isStatistics() && !InfrastructureAnalyzer.isLocalMode(job) )
 			Statistics.reset();
 	}
 

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteMR.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteMR.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteMR.java
index 0a785fd..2884c32 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteMR.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteMR.java
@@ -31,7 +31,6 @@ import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.conf.DMLConfig;
 import org.apache.sysml.runtime.DMLRuntimeException;
@@ -159,7 +158,7 @@ public class ResultMergeRemoteMR extends ResultMerge
 	protected void executeMerge(String fname, String fnameNew, String[] srcFnames, InputInfo ii, OutputInfo oi, long rlen, long clen, int brlen, int bclen)
 	{
 		String jobname = "ParFor-RMMR";
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		JobConf job = new JobConf( ResultMergeRemoteMR.class );
 		job.setJobName(jobname+_pfid);
@@ -304,7 +303,7 @@ public class ResultMergeRemoteMR extends ResultMerge
 			throw new DMLRuntimeException(ex);
 		}
 		
-		if( DMLScript.STATISTICS ){
+		if( ConfigurationManager.isStatistics() ){
 			long t1 = System.nanoTime();
 			Statistics.maintainCPHeavyHitters("MR-Job_"+jobname, t1-t0);
 		}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteSpark.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteSpark.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteSpark.java
index e801dc8..8055c2b 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteSpark.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteSpark.java
@@ -27,7 +27,7 @@ import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaSparkContext;
-import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
 import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
@@ -112,7 +112,7 @@ public class ResultMergeRemoteSpark extends ResultMerge
 	protected RDDObject executeMerge(MatrixObject compare, MatrixObject[] inputs, long rlen, long clen, int brlen, int bclen)
 	{
 		String jobname = "ParFor-RMSP";
-		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
+		long t0 = ConfigurationManager.isStatistics() ? System.nanoTime() : 0;
 		
 		SparkExecutionContext sec = (SparkExecutionContext)_ec;
 		boolean withCompare = (compare!=null);
@@ -187,7 +187,7 @@ public class ResultMergeRemoteSpark extends ResultMerge
 		//maintain statistics
 		Statistics.incrementNoOfCompiledSPInst();
 		Statistics.incrementNoOfExecutedSPInst();
-		if( DMLScript.STATISTICS ){
+		if( ConfigurationManager.isStatistics() ){
 			Statistics.maintainCPHeavyHitters(jobname, System.nanoTime()-t0);
 		}
 		

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimatorHops.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimatorHops.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimatorHops.java
index 4c3e7f0..ca0acf7 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimatorHops.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimatorHops.java
@@ -20,6 +20,7 @@
 package org.apache.sysml.runtime.controlprogram.parfor.opt;
 
 import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.Hop;
 import org.apache.sysml.hops.LeftIndexingOp;
 import org.apache.sysml.hops.OptimizerUtils;
@@ -80,7 +81,7 @@ public class CostEstimatorHops extends CostEstimator
 			}
 			//check for invalid cp memory estimate
 			else if ( h.getExecType()==ExecType.CP && value >= OptimizerUtils.getLocalMemBudget() ) {
-				if( DMLScript.rtplatform != DMLScript.RUNTIME_PLATFORM.SINGLE_NODE && h.getForcedExecType()==null )
+				if( ConfigurationManager.getExecutionMode() != DMLScript.RUNTIME_PLATFORM.SINGLE_NODE && h.getForcedExecType()==null )
 					LOG.warn("Memory estimate larger than budget but CP exec type (op="+h.getOpString()+", name="+h.getName()+", memest="+h.getMemEstimate()+").");
 				value = DEFAULT_MEM_REMOTE;
 			}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizationWrapper.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizationWrapper.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizationWrapper.java
index 83bcae8..d24ac16 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizationWrapper.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizationWrapper.java
@@ -27,7 +27,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.OptimizerUtils;
 import org.apache.sysml.hops.ipa.InterProceduralAnalysis;
@@ -136,7 +135,7 @@ public class OptimizationWrapper
 		Timing time = new Timing(true);
 		
 		//maintain statistics
-		if( DMLScript.STATISTICS )
+		if( ConfigurationManager.isStatistics() )
 			Statistics.incrementParForOptimCount();
 		
 		//create specified optimizer
@@ -256,7 +255,7 @@ public class OptimizationWrapper
 		
 		long ltime = (long) time.stop();
 		LOG.trace("ParFOR Opt: Optimized plan in "+ltime+"ms.");
-		if( DMLScript.STATISTICS )
+		if( ConfigurationManager.isStatistics() )
 			Statistics.incrementParForOptimTime(ltime);
 		
 		//cleanup phase

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java
index 7adf2d8..c4f4fa3 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java
@@ -30,7 +30,6 @@ import java.util.stream.Collectors;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.conf.DMLConfig;
 import org.apache.sysml.hops.AggBinaryOp;
@@ -1224,7 +1223,7 @@ public class OptimizerRuleBased extends Optimizer
 			// otherwise it default to the number of CPU cores and the
 			// operations are run in CP mode
 			//FIXME rework for nested parfor parallelism and body w/o gpu ops
-			if (DMLScript.USE_ACCELERATOR) {
+			if (ConfigurationManager.isGPU()) {
 				long perGPUBudget = GPUContextPool.initialGPUMemBudget();
 				double maxMemUsage = getMaxCPOnlyBudget(n);
 				if (maxMemUsage < perGPUBudget){

http://git-wip-us.apache.org/repos/asf/systemml/blob/ae268a9e/src/main/java/org/apache/sysml/runtime/instructions/Instruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/Instruction.java b/src/main/java/org/apache/sysml/runtime/instructions/Instruction.java
index b0ff996..b73a5c6 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/Instruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/Instruction.java
@@ -21,7 +21,7 @@ package org.apache.sysml.runtime.instructions;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.lops.Lop;
 import org.apache.sysml.parser.DataIdentifier;
 import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
@@ -165,7 +165,7 @@ public abstract class Instruction
 	public String getExtendedOpcode() {
 		if(extendedOpcode != null)
 			return extendedOpcode;
-		if(DMLScript.FINEGRAINED_STATISTICS) {
+		if(ConfigurationManager.isFinegrainedStatistics()) {
 			String scriptInfo;
 			if(filename != null)
 				scriptInfo = " [" + filename + " " + beginLine + ":" + beginCol + "-" + endLine + ":" + endCol + "]";