You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@systemml.apache.org by de...@apache.org on 2016/12/16 19:35:20 UTC

[2/3] incubator-systemml git commit: [SYSTEMML-1153] Remove unused methods from project

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalParWorker.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalParWorker.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalParWorker.java
index 22830cf..e146821 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalParWorker.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalParWorker.java
@@ -57,15 +57,7 @@ public class LocalParWorker extends ParWorker implements Runnable
 		_stopped   = false;
 		_max_retry = max_retry;
 	}
-	
-	/**
-	 * Sets the status to stopped such that execution will be aborted as soon as the
-	 * current task is finished.
-	 */
-	public void setStopped() {
-		_stopped = true;
-	}
-	
+
 	public void setFunctionNames(Collection<String> fnNames) {
 		_fnNames = fnNames;
 	}

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalTaskQueue.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalTaskQueue.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalTaskQueue.java
index 1ebaf00..dc4ca64 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalTaskQueue.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/LocalTaskQueue.java
@@ -106,18 +106,6 @@ public class LocalTaskQueue<T>
 		_closedInput = true;
 		notifyAll(); //notify all waiting readers
 	}
-	
-	/**
-	 * Synchronized read of the current number of tasks in the queue.
-	 * 
-	 * @return number of tasks in queue
-	 * @throws InterruptedException if InterruptedException occurs
-	 */
-	public synchronized int size()
-		throws InterruptedException
-	{
-		return _data.size();
-	}
 
 	@Override
 	public synchronized String toString() 

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ParWorker.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ParWorker.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ParWorker.java
index 5c5c5a6..e0b30f4 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ParWorker.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ParWorker.java
@@ -107,12 +107,6 @@ public abstract class ParWorker
 		return _numIters;
 	}
 
-	public void resetExecutedTasks()
-	{
-		_numTasks = 0;
-		_numIters = 0;
-	}
-
 	protected void pinResultVariables()
 	{
 		for( String var : _resultVars )

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ProgramConverter.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ProgramConverter.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ProgramConverter.java
index bc6370d..3ac5722 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ProgramConverter.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ProgramConverter.java
@@ -323,23 +323,6 @@ public class ProgramConverter
 		return tmpPB;
 	}
 
-	public static ForProgramBlock createShallowCopyParForProgramBlock(ParForProgramBlock pfpb, Program prog ) 
-		throws DMLRuntimeException
-	{
-		ParForProgramBlock tmpPB = new ParForProgramBlock(prog,pfpb.getIterablePredicateVars(),pfpb.getParForParams());
-		
-		tmpPB.setStatementBlock( pfpb.getStatementBlock() );
-		tmpPB.setResultVariables( pfpb.getResultVariables() );
-		
-		tmpPB.setFromInstructions( pfpb.getFromInstructions() );
-		tmpPB.setToInstructions( pfpb.getToInstructions() );
-		tmpPB.setIncrementInstructions( pfpb.getIncrementInstructions() );
-		tmpPB.setExitInstructions( pfpb.getExitInstructions() );
-		tmpPB.setChildBlocks( pfpb.getChildBlocks() );
-		
-		return tmpPB;
-	}
-
 	public static ParForProgramBlock createDeepCopyParForProgramBlock(ParForProgramBlock pfpb, long pid, int IDPrefix, Program prog, HashSet<String> fnStack, HashSet<String> fnCreated, boolean plain, boolean forceDeepCopy) 
 		throws DMLRuntimeException
 	{

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/Task.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/Task.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/Task.java
index 0934ecf..de6a9d0 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/Task.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/Task.java
@@ -87,24 +87,6 @@ public class Task implements Serializable
 		return _iterations.size();
 	}
 
-	public void mergeTask( Task task )
-	{
-		//check for set iteration type
-		if( _type==TaskType.RANGE )
-			throw new RuntimeException("Task Merging not supported for tasks of type ITERATION_RANGE.");
-		
-		//check for same iteration name
-		String var1 = _iterations.getFirst().getName();
-		String var2 = task._iterations.getFirst().getName();
-		if( !var1.equals(var2) )
-			throw new RuntimeException("Task Merging not supported for tasks with different variable names");
-	
-		//merge tasks
-		for( IntObject o : task._iterations )
-			_iterations.addLast( o );
-	}
-	
-
 	@Override
 	public String toString() 
 	{

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimator.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimator.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimator.java
index 476522a..f76d1ef 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimator.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimator.java
@@ -178,22 +178,6 @@ public abstract class CostEstimator
 		return val;
 	}
 
-	public double computeLocalParBound(OptTree plan, OptNode n) 
-	{
-		return Math.floor(rComputeLocalValueBound(plan.getRoot(), n, plan.getCK()));		
-	}
-
-	public double computeLocalMemoryBound(OptTree plan, OptNode n) 
-	{
-		return rComputeLocalValueBound(plan.getRoot(), n, plan.getCM());
-	}
-
-	public double getMinMemoryUsage(OptNode pn) 
-	{
-		// TODO implement for DP enum optimizer
-		throw new RuntimeException("Not implemented yet.");
-	}
-
 	protected double getDefaultEstimate(TestMeasure measure) 
 	{
 		double val = -1;
@@ -239,43 +223,4 @@ public abstract class CostEstimator
 		ret /= len; //weighting
 		return ret;
 	}
-
-	protected double rComputeLocalValueBound( OptNode current, OptNode node, double currentVal )
-	{
-		if( current == node ) //found node
-			return currentVal;
-		else if( current.isLeaf() ) //node not here
-			return -1; 
-		else
-		{
-			switch( current.getNodeType() )
-			{
-				case GENERIC:
-				case FUNCCALL:
-				case IF:
-				case WHILE:
-				case FOR:
-					for( OptNode c : current.getChilds() ) 
-					{
-						double lval = rComputeLocalValueBound(c, node, currentVal);
-						if( lval > 0 )
-							return lval;
-					}
-					break;
-				case PARFOR:
-					for( OptNode c : current.getChilds() ) 
-					{
-						double lval = rComputeLocalValueBound(c, node, currentVal/current.getK());
-						if( lval > 0 )
-							return lval;
-					}
-					break;
-				default:
-					//do nothing
-			}
-		}
-			
-		return -1;
-	}
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimatorRuntime.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimatorRuntime.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimatorRuntime.java
index 4628c8c..1a5d8f4 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimatorRuntime.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/CostEstimatorRuntime.java
@@ -79,19 +79,6 @@ public class CostEstimatorRuntime extends CostEstimator
 		return getLeafNodeEstimate(measure, node);
 	}
 
-	public double getEstimate( TestMeasure measure, String instName, double datasize, double sparsity, DataFormat dataformat ) 
-		throws DMLRuntimeException
-	{
-		return getEstimate(measure, instName, datasize, sparsity, DEFAULT_EST_PARALLELISM, dataformat);
-	}
-
-	public double getEstimate( TestMeasure measure, String instName, double datasize, double sparsity, double parallelism, DataFormat dataformat ) 
-		throws DMLRuntimeException
-	{
-		double dim = Math.sqrt( datasize );		
-		return getEstimate(measure, instName, dim, dim, dim, sparsity, parallelism, dataformat);
-	}
-
 	public double getEstimate( TestMeasure measure, String instName, double dim1, double dim2, double dim3, double sparsity, DataFormat dataformat ) 
 		throws DMLRuntimeException
 	{

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptNode.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptNode.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptNode.java
index a317de2..271d018 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptNode.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptNode.java
@@ -25,9 +25,7 @@ import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.Set;
 
-import org.apache.sysml.lops.LopProperties;
 import org.apache.sysml.lops.Lop;
-
 import org.apache.sysml.runtime.controlprogram.ParForProgramBlock;
 import org.apache.sysml.runtime.controlprogram.ParForProgramBlock.PDataPartitionFormat;
 
@@ -55,17 +53,7 @@ public class OptNode
 		CP,
 		MR,
 		SPARK;
-		
-		public LopProperties.ExecType toLopsExecType() {
-			switch( this ) {
-				case CP: 	return LopProperties.ExecType.CP;
-				case MR: 	return LopProperties.ExecType.MR;
-				case SPARK: return LopProperties.ExecType.SPARK;
-			}
-			
-			return null;
-		}
-		
+
 		public ParForProgramBlock.PExecMode toParForExecMode() {
 			switch( this ) {
 				case CP: 	return ParForProgramBlock.PExecMode.LOCAL;
@@ -261,30 +249,6 @@ public class OptNode
 		return ret;
 	}
 
-	public boolean containsNode( OptNode qn )
-	{
-		boolean ret = (this == qn);
-		if( !ret && !isLeaf() )
-			for( OptNode n : _childs ) {
-				ret |= n.containsNode(qn);
-				if( ret ) break; //early abort
-			}
-		
-		return ret;
-	}
-
-	public boolean containsNode( NodeType type )
-	{
-		boolean ret = (_ntype == type);
-		if( !ret && !isLeaf() )
-			for( OptNode n : _childs ) {
-				ret |= n.containsNode(type);
-				if( ret ) break; //early abort
-			}
-		
-		return ret;
-	}
-
 	public boolean isLeaf()
 	{
 		return ( _childs == null || _childs.isEmpty() );
@@ -626,23 +590,4 @@ public class OptNode
 		return max;
 	}
 
-	@SuppressWarnings("unchecked")
-	public OptNode createShallowClone()
-	{
-		OptNode n = new OptNode(_ntype,_etype);
-		n.setID(_id);
-		n.setK(_k);		
-		if( _childs != null )
-			n.setChilds( (ArrayList<OptNode>)_childs.clone() );
-		if( _params != null )
-			n.setParams((HashMap<ParamType,String>)_params.clone());
-		return n;
-	}
-
-	public OptNode createDeepClone()
-	{
-		throw new RuntimeException("not implemented yet");
-	}
-
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptTreeConverter.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptTreeConverter.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptTreeConverter.java
index dba6dd1..71d0931 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptTreeConverter.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptTreeConverter.java
@@ -91,9 +91,6 @@ public class OptTreeConverter
 	//internal state
 	private static OptTreePlanMappingAbstract _hlMap = null; 
 	private static OptTreePlanMappingRuntime  _rtMap = null;	
-	private static OptNode _tmpParent   = null;
-	private static OptNode _tmpChildOld = null;
-	private static OptNode _tmpChildNew = null;
 	
 	static
 	{
@@ -126,18 +123,6 @@ public class OptTreeConverter
 		return tree;
 	}
 
-	public static OptTree createOptTree( int ck, double cm, ParForProgramBlock pfpb ) 
-		throws DMLRuntimeException
-	{
-		// TODO: Passing an empty variable map here, for now. Must be reevaluated 
-		// whenever this function is used.
-		LocalVariableMap vars = new LocalVariableMap();
-		OptNode root = rCreateOptNode( pfpb, vars, true, true );		
-		OptTree tree = new OptTree(ck, cm, root);
-			
-		return tree;
-	}
-	
 	public static OptTree createAbstractOptTree( int ck, double cm, ParForStatementBlock pfsb, ParForProgramBlock pfpb, Set<String> memo, ExecutionContext ec ) 
 		throws DMLRuntimeException
 	{
@@ -866,106 +851,6 @@ public class OptTreeConverter
 	{
 		return _hlMap;
 	}
-	
-	public static OptTreePlanMappingRuntime getRuntimePlanMapping()
-	{
-		return _rtMap;
-	}
-
-	public static OptNode exchangeTemporary(OptNode pRoot, long hlNodeID, OptNode newRtNode) 
-		throws DMLRuntimeException 
-	{
-		OptNode hlNode = _hlMap.getOptNode(hlNodeID);
-		if( hlNode.getNodeType() == NodeType.PARFOR )
-		{
-			ParForProgramBlock pb = (ParForProgramBlock) _hlMap.getMappedProg(hlNodeID)[1];
-			OptNode rtNode = _rtMap.getOptNode(pb);
-			
-			//copy node internals (because it might be root node)
-			_tmpChildOld = rtNode.createShallowClone();
-			rtNode.setExecType(newRtNode.getExecType()); //TODO extend as required
-		}
-		else if (hlNode.getNodeType() == NodeType.HOP)
-		{
-			long pid1 = _hlMap.getMappedParentID(hlNode.getID()); //pbID
-			ProgramBlock pb = (ProgramBlock) _hlMap.getMappedProg(pid1)[1];
-			OptNode rtNode1 = _rtMap.getOptNode(pb);
-			long pid2 = _rtMap.getMappedParentID(rtNode1.getID());
-			OptNode rtNode2 = _rtMap.getOptNode(pid2);
-			
-			_tmpParent = rtNode2;
-			_tmpChildOld = rtNode1;		
-			_tmpChildNew = newRtNode;
-			_tmpParent.exchangeChild(_tmpChildOld, _tmpChildNew);
-		}
-		else
-		{
-			throw new DMLRuntimeException("Unexpected node type for plan node exchange.");
-		}
-		
-		return pRoot;
-	}
-
-	public static void revertTemporaryChange( long hlNodeID ) 
-		throws DMLRuntimeException 
-	{
-		OptNode node = _hlMap.getOptNode(hlNodeID);
-		
-		if( node.getNodeType() == NodeType.PARFOR )
-		{
-			ParForProgramBlock pb = (ParForProgramBlock) _hlMap.getMappedProg(hlNodeID)[1];
-			OptNode rtNode = _rtMap.getOptNode(pb);
-			rtNode.setExecType(_tmpChildOld.getExecType()); 	
-		}
-		else if( node.getNodeType() == NodeType.HOP )
-		{
-			//revert change (overwrite tmp child)
-			_tmpParent.exchangeChild(_tmpChildNew,_tmpChildOld);	
-		}
-		else
-		{
-			throw new DMLRuntimeException("Unexpected node type for plan node exchange.");
-		}
-		
-		//cleanup
-		_tmpParent = null;
-		_tmpChildOld = null;
-	}
-
-	public static OptNode exchangePermanently(OptNode pRoot, long hlNodeID, OptNode newRtNode) 
-		throws DMLRuntimeException 
-	{
-		OptNode hlNode = _hlMap.getOptNode(hlNodeID);
-		if( hlNode.getNodeType() == NodeType.PARFOR )
-		{
-			ParForProgramBlock pb = (ParForProgramBlock) _hlMap.getMappedProg(hlNodeID)[1];
-			OptNode rtNode = _rtMap.getOptNode(pb);
-			
-			//copy node internals (because it might be root node)
-			//(no need for update mapping)
-			rtNode.setExecType(newRtNode.getExecType()); //
-		}
-		else if (hlNode.getNodeType() == NodeType.HOP)
-		{
-			long pid1 = _hlMap.getMappedParentID(hlNode.getID()); //pbID
-			ProgramBlock pb = (ProgramBlock) _hlMap.getMappedProg(pid1)[1];
-			OptNode rtNode1 = _rtMap.getOptNode(pb);
-			long pid2 = _rtMap.getMappedParentID(rtNode1.getID());
-			OptNode rtNode2 = _rtMap.getOptNode(pid2);
-			
-			rtNode2.exchangeChild(rtNode1, newRtNode);
-			
-			//finally update mapping (all internal repositories)
-			newRtNode.setID(rtNode1.getID());
-			_rtMap.replaceMapping(pb, newRtNode);
-		}
-		else
-		{
-			throw new DMLRuntimeException("Unexpected node type for plan node exchange.");
-		}
-		
-		return pRoot;
-	}
 
 	public static void clear()
 	{
@@ -973,10 +858,6 @@ public class OptTreeConverter
 			_hlMap.clear();
 		if( _rtMap != null )
 			_rtMap.clear();
-		
-		_tmpParent = null;
-		_tmpChildOld = null;
-		_tmpChildNew = null;
 	}
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptTreePlanMappingRuntime.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptTreePlanMappingRuntime.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptTreePlanMappingRuntime.java
index 8efc561..0a7d9f6 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptTreePlanMappingRuntime.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptTreePlanMappingRuntime.java
@@ -21,7 +21,6 @@ package org.apache.sysml.runtime.controlprogram.parfor.opt;
 
 import java.util.HashMap;
 import java.util.Map;
-import java.util.Map.Entry;
 
 import org.apache.sysml.runtime.controlprogram.ProgramBlock;
 import org.apache.sysml.runtime.instructions.Instruction;
@@ -70,15 +69,7 @@ public class OptTreePlanMappingRuntime extends OptTreePlanMapping
 	{
 		return _id_rtprog.get( id );
 	}
-	
-	public OptNode getOptNode( Object prog )
-	{
-		for( Entry<Long,Object> e : _id_rtprog.entrySet() )
-			if( e.getValue() == prog )
-				return _id_optnode.get(e.getKey());
-		return null;
-	}
-	
+
 	@Override
 	public void clear()
 	{

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizationWrapper.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizationWrapper.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizationWrapper.java
index 9a2d7dc..a7064da 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizationWrapper.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizationWrapper.java
@@ -20,9 +20,7 @@
 package org.apache.sysml.runtime.controlprogram.parfor.opt;
 
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Map.Entry;
 import java.util.Set;
 
 import org.apache.commons.logging.Log;
@@ -33,35 +31,22 @@ import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.hops.OptimizerUtils;
 import org.apache.sysml.hops.ipa.InterProceduralAnalysis;
+import org.apache.sysml.hops.recompile.Recompiler;
 import org.apache.sysml.hops.rewrite.HopRewriteRule;
 import org.apache.sysml.hops.rewrite.ProgramRewriteStatus;
 import org.apache.sysml.hops.rewrite.ProgramRewriter;
 import org.apache.sysml.hops.rewrite.RewriteConstantFolding;
 import org.apache.sysml.hops.rewrite.RewriteRemoveUnnecessaryBranches;
 import org.apache.sysml.hops.rewrite.StatementBlockRewriteRule;
-import org.apache.sysml.hops.recompile.Recompiler;
 import org.apache.sysml.parser.DMLProgram;
 import org.apache.sysml.parser.ForStatement;
-import org.apache.sysml.parser.ForStatementBlock;
-import org.apache.sysml.parser.IfStatement;
-import org.apache.sysml.parser.IfStatementBlock;
-import org.apache.sysml.parser.LanguageException;
 import org.apache.sysml.parser.ParForStatementBlock;
-import org.apache.sysml.parser.StatementBlock;
-import org.apache.sysml.parser.WhileStatement;
-import org.apache.sysml.parser.WhileStatementBlock;
 import org.apache.sysml.runtime.DMLRuntimeException;
-import org.apache.sysml.runtime.controlprogram.ForProgramBlock;
 import org.apache.sysml.runtime.controlprogram.FunctionProgramBlock;
-import org.apache.sysml.runtime.controlprogram.IfProgramBlock;
 import org.apache.sysml.runtime.controlprogram.LocalVariableMap;
 import org.apache.sysml.runtime.controlprogram.ParForProgramBlock;
-import org.apache.sysml.runtime.controlprogram.Program;
-import org.apache.sysml.runtime.controlprogram.ProgramBlock;
-import org.apache.sysml.runtime.controlprogram.WhileProgramBlock;
 import org.apache.sysml.runtime.controlprogram.ParForProgramBlock.POptMode;
 import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
-import org.apache.sysml.runtime.controlprogram.context.ExecutionContextFactory;
 import org.apache.sysml.runtime.controlprogram.parfor.opt.Optimizer.CostModelType;
 import org.apache.sysml.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer;
 import org.apache.sysml.runtime.controlprogram.parfor.stat.Stat;
@@ -102,48 +87,6 @@ public class OptimizationWrapper
 				  .setLevel((Level) Level.DEBUG);
 		}
 	}
-	
-	/**
-	 * Called once per DML script (during program compile time) 
-	 * in order to optimize all top-level parfor program blocks.
-	 * 
-	 * NOTE: currently note used at all.
-	 * 
-	 * @param prog dml program
-	 * @param rtprog runtime program
-	 * @param monitor ?
-	 * @throws DMLRuntimeException if DMLRuntimeException occurs
-	 * @throws LanguageException if LanguageException occurs
-	 */
-	public static void optimize(DMLProgram prog, Program rtprog, boolean monitor) 
-		throws DMLRuntimeException, LanguageException 
-	{
-		LOG.debug("ParFOR Opt: Running optimize all on DML program "+DMLScript.getUUID());
-		
-		//init internal structures 
-		HashMap<Long, ParForStatementBlock> sbs = new HashMap<Long, ParForStatementBlock>();
-		HashMap<Long, ParForProgramBlock> pbs = new HashMap<Long, ParForProgramBlock>();	
-		
-		//find all top-level paror pbs
-		findParForProgramBlocks(prog, rtprog, sbs, pbs);
-		
-		// Create an empty symbol table
-		ExecutionContext ec = ExecutionContextFactory.createContext();
-		
-		//optimize each top-level parfor pb independently
-		for( Entry<Long, ParForProgramBlock> entry : pbs.entrySet() )
-		{
-			long key = entry.getKey();
-			ParForStatementBlock sb = sbs.get(key);
-			ParForProgramBlock pb = entry.getValue();
-			
-			//optimize (and implicit exchange)
-			POptMode type = pb.getOptimizationMode(); //known to be >0
-			optimize( type, sb, pb, ec, monitor );
-		}		
-		
-		LOG.debug("ParFOR Opt: Finished optimization for DML program "+DMLScript.getUUID());
-	}
 
 	/**
 	 * Called once per top-level parfor (during runtime, on parfor execute)
@@ -337,88 +280,6 @@ public class OptimizationWrapper
 		}
 	}
 
-	private static void findParForProgramBlocks( DMLProgram prog, Program rtprog, 
-			HashMap<Long, ParForStatementBlock> sbs, HashMap<Long, ParForProgramBlock> pbs ) 
-		throws LanguageException
-	{
-		//handle function program blocks
-		HashMap<String,FunctionProgramBlock> fpbs = rtprog.getFunctionProgramBlocks();
-		for( Entry<String, FunctionProgramBlock> entry : fpbs.entrySet() )
-		{
-			String[] keypart = entry.getKey().split( Program.KEY_DELIM );
-			String namespace = keypart[0];
-			String name      = keypart[1]; 
-			
-			ProgramBlock pb = entry.getValue();
-			StatementBlock sb = prog.getFunctionStatementBlock(namespace, name);
-			
-			//recursive find 
-			rfindParForProgramBlocks(sb, pb, sbs, pbs);	
-		}
-		
-		//handle actual program blocks
-		ArrayList<ProgramBlock> tpbs = rtprog.getProgramBlocks();
-		for( int i=0; i<tpbs.size(); i++ )
-		{
-			ProgramBlock pb = tpbs.get(i);
-			StatementBlock sb = prog.getStatementBlock(i);
-			
-			//recursive find
-			rfindParForProgramBlocks(sb, pb, sbs, pbs);
-		}	
-	}
-
-	private static void rfindParForProgramBlocks( StatementBlock sb, ProgramBlock pb,
-			HashMap<Long, ParForStatementBlock> sbs, HashMap<Long, ParForProgramBlock> pbs )
-	{
-		if( pb instanceof ParForProgramBlock  ) 
-		{
-			//put top-level parfor into map, but no recursion
-			ParForProgramBlock pfpb = (ParForProgramBlock) pb;
-			ParForStatementBlock pfsb = (ParForStatementBlock) sb;
-			
-			LOG.trace("ParFOR: found ParForProgramBlock with POptMode="+pfpb.getOptimizationMode().toString());
-			
-			if( pfpb.getOptimizationMode() != POptMode.NONE )
-			{
-				//register programblock tree for optimization
-				long pfid = pfpb.getID();
-				pbs.put(pfid, pfpb);
-				sbs.put(pfid, pfsb);
-			}
-		}
-		else if( pb instanceof ForProgramBlock )
-		{
-			//recursive find
-			ArrayList<ProgramBlock> fpbs = ((ForProgramBlock) pb).getChildBlocks();
-			ArrayList<StatementBlock> fsbs = ((ForStatement)((ForStatementBlock) sb).getStatement(0)).getBody();
-			for( int i=0;  i< fpbs.size(); i++ )
-				rfindParForProgramBlocks(fsbs.get(i), fpbs.get(i), sbs, pbs);
-		}
-		else if( pb instanceof WhileProgramBlock )
-		{
-			//recursive find
-			ArrayList<ProgramBlock> wpbs = ((WhileProgramBlock) pb).getChildBlocks();
-			ArrayList<StatementBlock> wsbs = ((WhileStatement)((WhileStatementBlock) sb).getStatement(0)).getBody();
-			for( int i=0;  i< wpbs.size(); i++ )
-				rfindParForProgramBlocks(wsbs.get(i), wpbs.get(i), sbs, pbs);	
-		}
-		else if( pb instanceof IfProgramBlock  )
-		{
-			//recursive find
-			IfProgramBlock ifpb = (IfProgramBlock) pb;
-			IfStatement ifs = (IfStatement) ((IfStatementBlock) sb).getStatement(0);			
-			ArrayList<ProgramBlock> ipbs1 = ifpb.getChildBlocksIfBody();
-			ArrayList<ProgramBlock> ipbs2 = ifpb.getChildBlocksElseBody();
-			ArrayList<StatementBlock> isbs1 = ifs.getIfBody();
-			ArrayList<StatementBlock> isbs2 = ifs.getElseBody();			
-			for( int i=0;  i< ipbs1.size(); i++ )
-				rfindParForProgramBlocks(isbs1.get(i), ipbs1.get(i), sbs, pbs);				
-			for( int i=0;  i< ipbs2.size(); i++ )
-				rfindParForProgramBlocks(isbs2.get(i), ipbs2.get(i), sbs, pbs);								
-		}
-	}
-
 	private static Optimizer createOptimizer( POptMode otype ) 
 		throws DMLRuntimeException
 	{

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/Optimizer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/Optimizer.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/Optimizer.java
index fc71971..fdeffb8 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/Optimizer.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/Optimizer.java
@@ -19,20 +19,13 @@
 
 package org.apache.sysml.runtime.controlprogram.parfor.opt;
 
-import java.util.Collection;
-import java.util.LinkedList;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
-import org.apache.sysml.hops.Hop;
 import org.apache.sysml.parser.ParForStatementBlock;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.ParForProgramBlock;
 import org.apache.sysml.runtime.controlprogram.ParForProgramBlock.POptMode;
 import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
-import org.apache.sysml.runtime.controlprogram.parfor.opt.OptNode.ExecType;
-import org.apache.sysml.runtime.controlprogram.parfor.opt.OptNode.NodeType;
 
 
 /**
@@ -102,58 +95,5 @@ public abstract class Optimizer
 	{
 		return _numEvaluatedPlans;
 	}
-	
-	
-	
-	
-	
-	///////
-	//methods for common basic primitives
-	
-	/**
-	 * Enum node plans (only for current opt node)
-	 * 
-	 * @param n internal representation of a plan alternative for program blocks and instructions
-	 * @param lck ?
-	 * @return collection of optimization nodes
-	 */
-	protected Collection<OptNode> enumPlans( OptNode n, double lck )
-	{
-		Collection<OptNode> plans = enumerateExecTypes( n );
-		
-		//TODO additional enumerations / potential rewrites go here
-			
-		return plans;
-	}
-
-	private Collection<OptNode> enumerateExecTypes( OptNode n )
-	{
-		Collection<OptNode> dTypes = new LinkedList<OptNode>();
-		boolean genAlternatives = false;
-		
-		//determine if alternatives should be generated
-		if( n.isLeaf() ) //hop
-		{
-			Hop hop = OptTreeConverter.getAbstractPlanMapping().getMappedHop(n.getID());
-			if( hop.allowsAllExecTypes() )
-				genAlternatives = true;
-		}
-		else if( n.getNodeType()==NodeType.PARFOR ) //parfor pb
-		{
-			genAlternatives = true;
-		}
 
-		//generate alternatives
-		if( genAlternatives )
-		{
-			OptNode c1 = n.createShallowClone();
-			OptNode c2 = n.createShallowClone();
-			c1.setExecType(ExecType.CP);
-			c2.setExecType(ExecType.MR);
-			dTypes.add( c1 );
-			dTypes.add( c2 );
-		}
-		
-		return dTypes;	
-	}
 }

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java
index 357aff8..532c41c 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java
@@ -535,12 +535,6 @@ public class OptimizerRuleBased extends Optimizer
 		return mem;
 	}
 
-	protected static LopProperties.ExecType getRIXExecType( MatrixObject mo, PDataPartitionFormat dpf ) 
-		throws DMLRuntimeException
-	{
-		return getRIXExecType(mo, dpf, false);
-	}
-
 	protected static LopProperties.ExecType getRIXExecType( MatrixObject mo, PDataPartitionFormat dpf, boolean withSparsity ) 
 		throws DMLRuntimeException
 	{
@@ -579,37 +573,6 @@ public class OptimizerRuleBased extends Optimizer
 			return LopProperties.ExecType.CP_FILE;
 	}
 
-	public static PDataPartitionFormat decideBlockWisePartitioning( MatrixObject mo, PDataPartitionFormat dpf ) 
-		throws DMLRuntimeException
-	{
-		long rlen = mo.getNumRows();
-		long clen = mo.getNumColumns();
-		long brlen = mo.getNumRowsPerBlock();
-		long bclen = mo.getNumColumnsPerBlock();
-		long k = InfrastructureAnalyzer.getRemoteParallelMapTasks();
-		
-		PDataPartitionFormat ret = dpf;
-		if( getRIXExecType(mo, dpf)==LopProperties.ExecType.CP )
-		if( ret == PDataPartitionFormat.ROW_WISE )
-		{
-			if( rlen/brlen > 4*k && //note: average sparsity, read must deal with it
-				getRIXExecType(mo, PDataPartitionFormat.ROW_BLOCK_WISE, false)==LopProperties.ExecType.CP )
-			{
-				ret = PDataPartitionFormat.ROW_BLOCK_WISE;				
-			}
-		}
-		else if( ret == PDataPartitionFormat.COLUMN_WISE )
-		{
-			if( clen/bclen > 4*k && //note: average sparsity, read must deal with it
-				getRIXExecType(mo, PDataPartitionFormat.COLUMN_BLOCK_WISE, false)==LopProperties.ExecType.CP )
-			{
-				ret = PDataPartitionFormat.COLUMN_BLOCK_WISE;				
-			}
-		}
-				
-		return ret;	
-	}
-
 	public static boolean allowsBinaryCellPartitions( MatrixObject mo, PDataPartitionFormat dpf ) 
 		throws DMLRuntimeException
 	{
@@ -3550,12 +3513,7 @@ public class OptimizerRuleBased extends Optimizer
 		{
 			this.hopCandidate = hopCandidate;
 		}
-		
-		ProgramBlock getProgramBlock()
-		{
-			return pb;
-		}
-		
+
 		int getLocation()
 		{
 			return this.iLocation;

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/PerfTestTool.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/PerfTestTool.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/PerfTestTool.java
index 5d43941..9635707 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/PerfTestTool.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/PerfTestTool.java
@@ -30,9 +30,9 @@ import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.LinkedList;
+import java.util.Map.Entry;
 import java.util.Random;
 import java.util.StringTokenizer;
-import java.util.Map.Entry;
 
 import javax.xml.stream.XMLInputFactory;
 import javax.xml.stream.XMLOutputFactory;
@@ -43,10 +43,6 @@ import javax.xml.stream.XMLStreamWriter;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
-import au.com.bytecode.opencsv.CSVReader;
-import au.com.bytecode.opencsv.CSVWriter;
-
 import org.apache.sysml.api.DMLException;
 import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
@@ -54,10 +50,10 @@ import org.apache.sysml.lops.Lop;
 import org.apache.sysml.lops.MMTSJ.MMTSJType;
 import org.apache.sysml.parser.DMLProgram;
 import org.apache.sysml.parser.DataIdentifier;
-import org.apache.sysml.parser.ExternalFunctionStatement;
-import org.apache.sysml.parser.ParseException;
 import org.apache.sysml.parser.Expression.DataType;
 import org.apache.sysml.parser.Expression.ValueType;
+import org.apache.sysml.parser.ExternalFunctionStatement;
+import org.apache.sysml.parser.ParseException;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.ExternalFunctionProgramBlockCP;
 import org.apache.sysml.runtime.controlprogram.LocalVariableMap;
@@ -83,9 +79,11 @@ import org.apache.sysml.runtime.matrix.MatrixFormatMetaData;
 import org.apache.sysml.runtime.matrix.data.InputInfo;
 import org.apache.sysml.runtime.matrix.data.MatrixBlock;
 import org.apache.sysml.runtime.matrix.data.OutputInfo;
-import org.apache.sysml.runtime.util.LocalFileUtils;
 import org.apache.sysml.runtime.util.MapReduceTool;
 
+import au.com.bytecode.opencsv.CSVReader;
+import au.com.bytecode.opencsv.CSVWriter;
+
 /**
  * DML Instructions Performance Test Tool: 
  * 
@@ -289,12 +287,6 @@ public class PerfTestTool
 		return tmp;
 	}
 
-	public CostFunction getInvariantCostFunction( TestMeasure measure, TestVariable[] variable, DataFormat dataformat )
-	{
-		//TODO: implement for additional rewrites
-		throw new RuntimeException("Not implemented yet.");
-	}
-
 	@SuppressWarnings("all")
 	public static boolean runTest()
 	{
@@ -920,44 +912,6 @@ public class PerfTestTool
 		return data;
 	}
 
-	public static MatrixObject generateInputDataset(String fname, double datasize, double sparsity, DataFormat df) 
-		throws IOException, CacheException
-	{
-		int dim = (int)Math.sqrt( datasize );
-		
-		//create random test data
-		double[][] d = generateTestMatrix(dim, dim, 1, 100, sparsity, 7);
-		
-		//create matrix block
-		MatrixBlock mb = null;
-		switch( df ) 
-		{
-			case DENSE:
-				mb = new MatrixBlock(dim,dim,false);
-				break;
-			case SPARSE:
-				mb = new MatrixBlock(dim,dim,true, (int)(sparsity*dim*dim));
-				break;
-		}
-		
-		//insert data
-		for(int i=0; i < dim; i++)
-			for(int j=0; j < dim; j++)
-				if( d[i][j]!=0 )
-					mb.setValue(i, j, d[i][j]);	
-		
-		MapReduceTool.deleteFileIfExistOnHDFS(fname);
-
-		MatrixCharacteristics mc = new MatrixCharacteristics(dim, dim, ConfigurationManager.getBlocksize(), ConfigurationManager.getBlocksize());
-		MatrixFormatMetaData md = new MatrixFormatMetaData(mc, OutputInfo.BinaryBlockOutputInfo, InputInfo.BinaryBlockInputInfo);
-		MatrixObject mo = new MatrixObject(ValueType.DOUBLE,fname,md);
-		mo.acquireModify(mb);
-		mo.release();
-		mo.exportData(); //write to HDFS
-		
-		return mo;
-	}
-
 	public static MatrixObject generateInputDataset(String fname, double dim1, double dim2, double sparsity, DataFormat df) 
 		throws IOException, CacheException
 	{		
@@ -999,30 +953,6 @@ public class PerfTestTool
 		return mo;
 	}
 
-	public static MatrixObject generateEmptyResult(String fname, double datasize, DataFormat df ) 
-		throws IOException, CacheException
-	{
-		int dim = (int)Math.sqrt( datasize );
-		
-		/*
-		MatrixBlock mb = null;
-		switch( df ) 
-		{
-			case DENSE:
-				mb = new MatrixBlock(dim,dim,false);
-				break;
-			case SPARSE:
-				mb = new MatrixBlock(dim,dim,true);
-				break;
-		}*/
-	
-		MatrixCharacteristics mc = new MatrixCharacteristics(dim, dim, ConfigurationManager.getBlocksize(), ConfigurationManager.getBlocksize());
-		MatrixFormatMetaData md = new MatrixFormatMetaData(mc, OutputInfo.BinaryBlockOutputInfo, InputInfo.BinaryBlockInputInfo);
-		MatrixObject mo = new MatrixObject(ValueType.DOUBLE,fname,md);
-		
-		return mo;
-	}
-
 	public static MatrixObject generateEmptyResult(String fname, double dim1, double dim2, DataFormat df ) 
 		throws IOException, CacheException
 	{
@@ -1080,19 +1010,6 @@ public class PerfTestTool
 		return matrix;
 	}
 
-	public static void externalReadProfile( String fname ) 
-		throws DMLRuntimeException, XMLStreamException, IOException
-	{
-		//validate external name (security issue)
-		if( !LocalFileUtils.validateExternalFilename(fname, false) )
-			throw new DMLRuntimeException("Invalid (non-trustworthy) external profile filename.");
-		
-		//register internals and read external profile
-		registerTestConfigurations();
-		registerInstructions();
-		readProfile( fname );
-	}
-
 	@SuppressWarnings("all")
 	private static HashMap<Integer,Long> writeResults( String dirname ) 
 		throws IOException, DMLRuntimeException 

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/ProgramRecompiler.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/ProgramRecompiler.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/ProgramRecompiler.java
index ef2d880..c21d0f8 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/ProgramRecompiler.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/ProgramRecompiler.java
@@ -25,15 +25,14 @@ import java.util.ArrayList;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.conf.DMLConfig;
 import org.apache.sysml.hops.Hop;
+import org.apache.sysml.hops.Hop.VisitStatus;
 import org.apache.sysml.hops.HopsException;
 import org.apache.sysml.hops.IndexingOp;
 import org.apache.sysml.hops.OptimizerUtils;
-import org.apache.sysml.hops.Hop.VisitStatus;
 import org.apache.sysml.hops.recompile.Recompiler;
-import org.apache.sysml.lops.LopProperties;
 import org.apache.sysml.lops.Lop;
+import org.apache.sysml.lops.LopProperties;
 import org.apache.sysml.lops.LopsException;
-import org.apache.sysml.lops.compile.Dag;
 import org.apache.sysml.parser.DMLProgram;
 import org.apache.sysml.parser.DMLTranslator;
 import org.apache.sysml.parser.ForStatement;
@@ -47,13 +46,10 @@ import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.ForProgramBlock;
 import org.apache.sysml.runtime.controlprogram.IfProgramBlock;
 import org.apache.sysml.runtime.controlprogram.LocalVariableMap;
-import org.apache.sysml.runtime.controlprogram.ParForProgramBlock;
 import org.apache.sysml.runtime.controlprogram.Program;
 import org.apache.sysml.runtime.controlprogram.ProgramBlock;
 import org.apache.sysml.runtime.controlprogram.WhileProgramBlock;
 import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
-import org.apache.sysml.runtime.controlprogram.parfor.ProgramConverter;
-import org.apache.sysml.runtime.controlprogram.parfor.opt.OptNode.NodeType;
 import org.apache.sysml.runtime.instructions.Instruction;
 import org.apache.sysml.runtime.instructions.cp.ArithmeticBinaryCPInstruction;
 import org.apache.sysml.runtime.instructions.cp.Data;
@@ -486,157 +482,4 @@ public class ProgramRecompiler
 		
 		return tmp;
 	}
-	
-	
-	
-	
-	/////////////////////////////////
-	// experimental functionality
-	//////////
-
-	protected static void recompilePartialPlan( OptNode n ) 
-		throws DMLRuntimeException 
-	{
-		//NOTE: need to recompile complete programblock because (1) many to many relationships
-		//between hops and instructions and (2) due to changed internal variable names 
-		
-		try
-		{
-			//get parent program and statement block
-			OptTreePlanMappingAbstract map = OptTreeConverter.getAbstractPlanMapping();
-			long pid = map.getMappedParentID(n.getID());
-			Object[] o = map.getMappedProg(pid);
-			StatementBlock sbOld = (StatementBlock) o[0];
-			ProgramBlock pbOld = (ProgramBlock) o[1];
-			
-			//get changed node and set type appropriately
-			Hop hop = (Hop) map.getMappedHop(n.getID());
-			hop.setForcedExecType(n.getExecType().toLopsExecType()); 
-			hop.setLops(null); //to enable fresh construction
-		
-			//get all hops of statement and construct new instructions
-			Dag<Lop> dag = new Dag<Lop>();
-			for( Hop hops : sbOld.get_hops() )
-			{
-				hops.resetVisitStatus();
-				Recompiler.rClearLops(hops);
-				Lop lops = hops.constructLops();
-				lops.addToDag(dag);
-			}
-			
-			//construct new instructions
-			ArrayList<Instruction> newInst = dag.getJobs(sbOld, ConfigurationManager.getDMLConfig());
-			
-			
-			//exchange instructions
-			pbOld.getInstructions().clear();
-			pbOld.getInstructions().addAll(newInst);
-		}
-		catch(Exception ex)
-		{
-			throw new DMLRuntimeException(ex);
-		}
-	}
-
-	
-	/**
-	 * NOTE: need to recompile complete programblock because (1) many to many relationships
-	 * between hops and instructions and (2) due to changed internal variable names 
-	 * 
-	 * @param n internal representation of a plan alternative for program blocks and instructions
-	 * @return program block
-	 * @throws DMLRuntimeException if DMLRuntimeException occurs
-	 */
-	protected static ProgramBlock recompile( OptNode n ) 
-		throws DMLRuntimeException 
-	{
-		ProgramBlock pbNew = null;
-		
-		try
-		{
-			if( n.getNodeType() == NodeType.HOP )
-			{
-				//get parent program and statement block
-				OptTreePlanMappingAbstract map = OptTreeConverter.getAbstractPlanMapping();
-				long pid = map.getMappedParentID(n.getID());
-				Object[] o = map.getMappedProg(pid);
-				StatementBlock sbOld = (StatementBlock) o[0];
-				ProgramBlock pbOld = (ProgramBlock) o[1];
-				LopProperties.ExecType oldtype = null;
-				
-				//get changed node and set type appropriately
-				Hop hop = (Hop) map.getMappedHop(n.getID());
-				hop.setForcedExecType(n.getExecType().toLopsExecType()); 
-				hop.setLops(null); //to enable fresh construction
-			
-				//get all hops of statement and construct new lops
-				Dag<Lop> dag = new Dag<Lop>();
-				for( Hop hops : sbOld.get_hops() )
-				{
-					hops.resetVisitStatus();
-					Recompiler.rClearLops(hops);
-					Lop lops = hops.constructLops();
-					lops.addToDag(dag);
-				}
-				
-				//construct new instructions
-				ArrayList<Instruction> newInst = dag.getJobs(sbOld, ConfigurationManager.getDMLConfig());
-				
-				//exchange instructions
-				pbNew = new ProgramBlock(pbOld.getProgram());
-				pbNew.setInstructions(newInst);
-				
-				//reset type global repository
-				hop.setForcedExecType(oldtype);
-				
-			}
-			else if( n.getNodeType() == NodeType.PARFOR )
-			{	
-				//no recompilation required
-				OptTreePlanMappingAbstract map = OptTreeConverter.getAbstractPlanMapping();
-				ParForProgramBlock pb = (ParForProgramBlock)map.getMappedProg(n.getID())[1];
-				pbNew = ProgramConverter.createShallowCopyParForProgramBlock(pb, pb.getProgram());
-				((ParForProgramBlock)pbNew).setExecMode(n.getExecType().toParForExecMode());
-			}
-			else
-			{
-				throw new DMLRuntimeException("Unexpected node type.");
-			}
-		}
-		catch(Exception ex)
-		{
-			throw new DMLRuntimeException(ex);
-		}
-		
-		return pbNew;
-	}
-
-	protected static void exchangeProgram(long hlNodeID, ProgramBlock pbNew) 
-		throws DMLRuntimeException 
-	{
-		OptTreePlanMappingAbstract map = OptTreeConverter.getAbstractPlanMapping();
-		OptNode node = map.getOptNode(hlNodeID);
-		
-		if( node.getNodeType() == NodeType.HOP )
-		{
-			long pid = map.getMappedParentID(hlNodeID);
-			Object[] o = map.getMappedProg(pid);
-			ProgramBlock pbOld = (ProgramBlock) o[1];
-			
-			//exchange instructions (save version)
-			pbOld.getInstructions().clear();
-			pbOld.getInstructions().addAll( pbNew.getInstructions() );
-		}
-		else if( node.getNodeType() == NodeType.PARFOR )
-		{
-			ParForProgramBlock pbOld = (ParForProgramBlock) map.getMappedProg(node.getID())[1];
-			pbOld.setExecMode(((ParForProgramBlock)pbNew).getExecMode());
-			//TODO extend as required
-		}
-		else
-		{
-			throw new DMLRuntimeException("Unexpected node type: "+node.getNodeType());
-		}
-	}
-	
 }

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/stat/InfrastructureAnalyzer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/stat/InfrastructureAnalyzer.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/stat/InfrastructureAnalyzer.java
index d5f8c7d..7cb01a2 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/stat/InfrastructureAnalyzer.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/stat/InfrastructureAnalyzer.java
@@ -135,20 +135,6 @@ public class InfrastructureAnalyzer
 		_remoteParReduce = preduce;
 	}
 	
-	/**
-	 * Gets the totals number of available map and reduce slots.
-	 * 
-	 * @return number of available remote parallel task slots
-	 */
-	public static int getRemoteParallelTasks()
-	{
-		if( _remoteParMap == -1 )
-			analyzeHadoopCluster();
-		
-		return _remoteParMap + _remoteParReduce;
-	}
-	
-	
 	///////
 	//methods for obtaining memory properties
 	
@@ -202,20 +188,7 @@ public class InfrastructureAnalyzer
 	{
 		_remoteJVMMaxMemReduce = remoteMem;
 	}
-	
-	/**
-	 * Gets the maximum memory requirement [in bytes] of a given hadoop job.
-	 * 
-	 * @param job job configuration
-	 * @return remote max memory of hadoop job
-	 */
-	public static long getRemoteMaxMemory( JobConf job )
-	{
-		return (1024*1024) * Math.max(
-				               job.getMemoryForMapTask(),
-				               job.getMemoryForReduceTask() );			
-	}
-	
+
 	/**
 	 * Gets the maximum sort buffer memory requirement [in bytes] of a hadoop task.
 	 * 

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/stat/StatisticMonitor.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/stat/StatisticMonitor.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/stat/StatisticMonitor.java
index 2547709..a558e16 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/stat/StatisticMonitor.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/stat/StatisticMonitor.java
@@ -118,17 +118,7 @@ public class StatisticMonitor
 		stats.addLast(s);
 		
 	}
-	
-	/**
-	 * Cleans up the whole repository by discarding all collected information.
-	 */
-	public static void cleanUp()
-	{
-		_mapPwPf.clear();
-		_pfstats.clear();
-		_pwstats.clear();
-	}
-	
+
 	/**
 	 * Globally disables statistic monitor for the currently activ JVM.
 	 */

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/util/IDHandler.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/util/IDHandler.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/util/IDHandler.java
index f494b88..e70ae9c 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/util/IDHandler.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/util/IDHandler.java
@@ -33,31 +33,6 @@ import java.net.InetAddress;
  */
 public class IDHandler 
 {
-
-	public static long extractUncheckedLongID( String taskID )
-	{
-		//in: e.g., task_local_0002_m_000009 or jobID + ...
-		//out: e.g., 2000009
-
-		//generic parsing for flexible taskID formats
-		char[] c = taskID.toCharArray(); //all chars
-		long value = 1; //1 catch leading zeros as well
-		for( int i=0; i<c.length; i++ )
-		{
-			if( c[i] >= 48 && c[i]<=57 )  //'0'-'9'
-			{
-				long newVal = (c[i]-48);
-				
-				if( (Long.MAX_VALUE-value*10) < newVal ) 
-					throw new RuntimeException("WARNING: extractLongID will produced numeric overflow "+value);
-				
-				value = value*10 + newVal;
-			}
-		}
-		
-		return value;
-	}
-
 	public static int extractIntID( String taskID )
 	{
 		int maxlen = (int)(Math.log10(Integer.MAX_VALUE));
@@ -66,13 +41,6 @@ public class IDHandler
 		
 	}
 
-	public static long extractLongID( String taskID )
-	{
-		int maxlen = (int)(Math.log10(Long.MAX_VALUE));
-		long longVal = extractID( taskID, maxlen );
-		return longVal;
-	}
-
 	public static long concatIntIDsToLong( int part1, int part2 )
 	{
 		//big-endian version (in java uses only big endian)

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/functionobjects/Builtin.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/functionobjects/Builtin.java b/src/main/java/org/apache/sysml/runtime/functionobjects/Builtin.java
index e98b46e..dad8c2d 100644
--- a/src/main/java/org/apache/sysml/runtime/functionobjects/Builtin.java
+++ b/src/main/java/org/apache/sysml/runtime/functionobjects/Builtin.java
@@ -269,49 +269,7 @@ public class Builtin extends ValueFunction
 		// cloning is not supported for singleton classes
 		throw new CloneNotSupportedException();
 	}
-	
-	public boolean checkArity(int _arity) throws DMLRuntimeException {
-		switch (bFunc) {
-		case ABS:
-		case SIN:
-		case COS:
-		case TAN:
-		case ASIN:
-		case ACOS:
-		case ATAN:
-		case SIGN:	
-		case SQRT:
-		case EXP:
-		case PLOGP:
-		case NROW:
-		case NCOL:
-		case LENGTH:
-		case ROUND:
-		case PRINT:
-		case MAXINDEX:
-		case MININDEX:
-		case STOP:
-		case CEIL:
-		case FLOOR:
-		case CUMSUM:
-		case INVERSE:
-		case SPROP:	
-		case SIGMOID:
-		case SELP:
-			return (_arity == 1);
-		
-		case LOG:
-		case LOG_NZ:
-			return (_arity == 1 || _arity == 2);
-			
-		case MAX:
-		case MIN:
-			return (_arity == 2);
-		default:
-			throw new DMLRuntimeException("checkNumberOfOperands(): Unknown opcode: " + bFunc);
-		}
-	}
-	
+
 	public double execute (double in) 
 		throws DMLRuntimeException 
 	{

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/functionobjects/OffsetColumnIndex.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/functionobjects/OffsetColumnIndex.java b/src/main/java/org/apache/sysml/runtime/functionobjects/OffsetColumnIndex.java
index dc15a4d..afd5483 100644
--- a/src/main/java/org/apache/sysml/runtime/functionobjects/OffsetColumnIndex.java
+++ b/src/main/java/org/apache/sysml/runtime/functionobjects/OffsetColumnIndex.java
@@ -43,12 +43,7 @@ public class OffsetColumnIndex extends IndexFunction
 		//	singleObj = new OffsetColumnIndex(offset);
 		//return singleObj;
 	}
-	
-	public void setOutputSize(int rows, int columns){
-		numRowsInOutput = rows;
-		numColumnsInOutput = columns;
-	}
-	
+
 	public void setOffset(int offset){
 		this.offset = offset;
 	}

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/instructions/InstructionUtils.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/InstructionUtils.java b/src/main/java/org/apache/sysml/runtime/instructions/InstructionUtils.java
index a18d49b..6ef39f1 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/InstructionUtils.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/InstructionUtils.java
@@ -212,13 +212,7 @@ public class InstructionUtils
 		Builtin.BuiltinCode bfc = Builtin.String2BuiltinCode.get(opcode);
 		return (bfc != null);
 	}
-	
-	public static boolean isOperand(String str) {
-		//note: split required for empty tokens
-		String[] parts = str.split(Instruction.DATATYPE_PREFIX);
-		return (parts.length > 1);
-	}
-	
+
 	/**
 	 * Evaluates if at least one instruction of the given instruction set
 	 * used the distributed cache; this call can also be used for individual
@@ -427,22 +421,6 @@ public class InstructionUtils
 		return agg;
 	}
 
-	public static AggregateUnaryOperator parseCumulativeAggregateUnaryOperator(UnaryOperator uop)
-	{
-		Builtin f = (Builtin)uop.fn;
-		
-		if( f.getBuiltinCode()==BuiltinCode.CUMSUM ) 
-			return parseCumulativeAggregateUnaryOperator("ucumack+") ;
-		else if( f.getBuiltinCode()==BuiltinCode.CUMPROD ) 
-			return parseCumulativeAggregateUnaryOperator("ucumac*") ;
-		else if( f.getBuiltinCode()==BuiltinCode.CUMMIN ) 
-			return parseCumulativeAggregateUnaryOperator("ucumacmin") ;
-		else if( f.getBuiltinCode()==BuiltinCode.CUMMAX ) 
-			return parseCumulativeAggregateUnaryOperator("ucumacmax" ) ;
-		
-		throw new RuntimeException("Unsupported cumulative aggregate unary operator: "+f.getBuiltinCode());
-	}
-
 	public static AggregateUnaryOperator parseBasicCumulativeAggregateUnaryOperator(UnaryOperator uop)
 	{
 		Builtin f = (Builtin)uop.fn;

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/instructions/MRJobInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/MRJobInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/MRJobInstruction.java
index 5b3c7e9..780325b 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/MRJobInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/MRJobInstruction.java
@@ -389,136 +389,6 @@ public class MRJobInstruction extends Instruction
 		setReplication(replication);
 	}	
 
-	public void setRandInstructions(long [] numRows, String[] inLabels,  
-			String randInstructions, String mapperInstructions, 
-			String aggInstructions, String otherInstructions, String [] outLabels, byte [] resultIndex, 
-			int numReducers, int replication)
-	{
-		setOutputs(resultIndex);
-			
-		setRecordReaderInstructions("");
-		setRandInstructions(randInstructions);
-		setMapperInstructions(mapperInstructions);
-		setShuffleInstructions("");
-		setAggregateInstructionsInReducer(aggInstructions);
-		setOtherInstructionsInReducer(otherInstructions);
-			
-		setInputOutputLabels(inLabels, outLabels);
-			
-		setNumberOfReducers(numReducers);
-		setReplication(replication);
-	}
-	
-
-	public void setMMCJInstructions(String[] inLabels, 
-			String mapperInstructions, String shuffleInstructions, 
-			String [] outLabels, byte [] resultIndex,  
-			int numReducers, int replication)
-	{
-		setOutputs(resultIndex);
-
-		setMapperInstructions(mapperInstructions);
-		setShuffleInstructions(shuffleInstructions);
-		setAggregateInstructionsInReducer("");
-		setOtherInstructionsInReducer("");
-
-		setInputOutputLabels(inLabels, outLabels);
-
-		setNumberOfReducers(numReducers);
-		setReplication(replication);
-	}
-
-	public void setMMRJInstructions(String[] inLabels, 
-			String mapperInstructions, String shuffleInstructions, String aggInstructions, String otherInstructions, 
-			String [] outLabels, byte [] resultIndex,  
-			int numReducers, int replication)
-	{
-		setOutputs(resultIndex);
-
-		setMapperInstructions(mapperInstructions);
-		setShuffleInstructions(shuffleInstructions);
-		setAggregateInstructionsInReducer(aggInstructions);
-		setOtherInstructionsInReducer(otherInstructions);
-
-		setInputOutputLabels(inLabels, outLabels);
-
-		setNumberOfReducers(numReducers);
-		setReplication(replication);
-	}
-	
-	// SortKeys Job does not have any instructions either in mapper or in reducer.
-	// It just has two inputs
-	public void setSORTKEYSInstructions(String [] inLabels,   
-			String mapperInstructions, String shuffleInstructions, 
-			String[] outLabels, byte [] resultIndex,  
-			int numReducers, int replication)
-	{
-		setOutputs(resultIndex);
-
-		setMapperInstructions(mapperInstructions);
-		setShuffleInstructions(shuffleInstructions);
-		setAggregateInstructionsInReducer("");
-		setOtherInstructionsInReducer("");
-
-		setInputOutputLabels(inLabels, outLabels);
-
-		setNumberOfReducers(numReducers);
-		setReplication(replication);
-	}
-
-	public void setCombineInstructions(String[] inLabels,  
-			String shuffleInstructions, String[] outLabels, byte[] resultIndex,  
-			int numReducers, int replication)
-	{
-		setOutputs(resultIndex);
-
-		setMapperInstructions("");
-		setShuffleInstructions(shuffleInstructions);
-		setAggregateInstructionsInReducer("");
-		setOtherInstructionsInReducer("");
-
-		setInputOutputLabels(inLabels, outLabels);
-
-		setNumberOfReducers(numReducers);
-		setReplication(replication);
-	}	
-	
-	public void setCentralMomentInstructions(String[] inLabels, 
-			String mapperInstructions, String shuffleInstructions, 
-			String[] outLabels, byte [] resultIndex,  
-			int numReducers, int replication)
-	{
-		setOutputs(resultIndex);
-
-		setMapperInstructions(mapperInstructions);
-		setShuffleInstructions(shuffleInstructions);
-		setAggregateInstructionsInReducer("");
-		setOtherInstructionsInReducer("");
-
-		setInputOutputLabels(inLabels, outLabels);
-
-		setNumberOfReducers(numReducers);
-		setReplication(replication);
-	}	
-	
-	public void setGroupedAggInstructions(String[] inLabels, 
-			String shuffleInstructions, String otherInstructions, 
-			String[] outLabels, byte [] resultIndex,  
-			int numReducers, int replication)
-	{
-		setOutputs(resultIndex);
-
-		setMapperInstructions("");
-		setShuffleInstructions(shuffleInstructions);
-		setAggregateInstructionsInReducer("");
-		setOtherInstructionsInReducer(otherInstructions);
-
-		setInputOutputLabels(inLabels, outLabels);
-
-		setNumberOfReducers(numReducers);
-		setReplication(replication);
-	}	
-	
 	public void setReBlockInstructions(String[] inLabels, 
 			String mapperInstructions, String reblockInstructions, String otherInstructions, 
 			String[] outLabels, byte [] resultIndex,  
@@ -567,89 +437,6 @@ public class MRJobInstruction extends Instruction
 		return sb.toString();
 	}
 	
-	public String getString(long [] arr)
-	{
-		StringBuilder sb = new StringBuilder();
-		for(int i = 0; i < arr.length; i++) {
-			sb.append(",");
-			sb.append(Long.toString(arr[i]));
-		}
-		
-		return sb.toString();
-	}
-	
-	public String getString(int [] arr)
-	{
-		StringBuilder sb = new StringBuilder();
-		for(int i = 0; i < arr.length; i++) {
-			sb.append(",");
-			sb.append(Integer.toString(arr[i]));
-		}
-		
-		return sb.toString();
-	}
-	
-	public String getString(OutputInfo[] iv_outputs) 
-	{
-		StringBuilder sb = new StringBuilder();
-		for(int i = 0 ; i < iv_outputs.length; i++) {
-			if(iv_outputs[i] == OutputInfo.BinaryBlockOutputInfo){
-				sb.append(", "); 
-				sb.append("BinaryBlockOutputInfo");
-			}
-			else if(iv_outputs[i] == OutputInfo.BinaryCellOutputInfo){
-				sb.append(", ");
-				sb.append("BinaryCellOutputInfo");
-			}
-			else if(iv_outputs[i] == OutputInfo.TextCellOutputInfo){
-				sb.append(", ");
-				sb.append("TextCellOutputInfo");
-			}
-			else {
-				sb.append(", (");
-				sb.append(iv_outputs[i].outputFormatClass);
-				sb.append(",");
-				sb.append(iv_outputs[i].outputKeyClass);
-				sb.append(",");
-				sb.append(iv_outputs[i].outputValueClass);
-				sb.append(")");
-			}
-		}
-		
-		return sb.toString();
-	}
-	
-	public String getString(InputInfo[] iv_inputs) 
-	{
-		StringBuilder sb = new StringBuilder();
-		for(int i = 0 ; i < iv_inputs.length; i++) {
-			if(iv_inputs[i] == InputInfo.BinaryBlockInputInfo){
-				sb.append(", ");
-				sb.append("BinaryBlockInputInfo");
-			}
-			else if(iv_inputs[i] == InputInfo.BinaryCellInputInfo){
-				sb.append(", ");
-				sb.append("BinaryCellInputInfo");
-			}
-			else if(iv_inputs[i] == InputInfo.TextCellInputInfo) {
-				sb.append(", ");
-				sb.append("TextCellInputInfo");
-			}
-			else {
-				sb.append(", (");
-				sb.append(iv_inputs[i].inputFormatClass);
-				sb.append(",");
-				sb.append(iv_inputs[i].inputKeyClass);
-				sb.append(",");
-				sb.append(iv_inputs[i].inputValueClass);
-				sb.append(")");
-			}
-		}
-		
-		return sb.toString();
-	}
-	
-	
 	public String toString()
 	{
 		String instruction = "";

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/instructions/cp/CPOperand.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/CPOperand.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/CPOperand.java
index 2f7fdb6..345aa7b 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/CPOperand.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/CPOperand.java
@@ -19,8 +19,8 @@
 
 package org.apache.sysml.runtime.instructions.cp;
 
-import org.apache.sysml.lops.Lop;
-import org.apache.sysml.parser.Expression.*;
+import org.apache.sysml.parser.Expression.DataType;
+import org.apache.sysml.parser.Expression.ValueType;
 import org.apache.sysml.runtime.instructions.Instruction;
 
 
@@ -83,12 +83,6 @@ public class CPOperand
 	public void setLiteral(boolean literal) {
 		_isLiteral = literal;
 	}
-	
-	public void split_by_value_type_prefix ( String str ) {
-		String[] opr = str.split(Lop.VALUETYPE_PREFIX);
-		_name = opr[0];
-		_valueType = ValueType.valueOf(opr[1]);
-	}
 
 	public void split(String str){
 		String[] opr = str.split(Instruction.VALUETYPE_PREFIX);
@@ -109,10 +103,5 @@ public class CPOperand
 			_valueType = ValueType.valueOf(opr[1]);
 		}
 	}
-	
-	public void copy(CPOperand o){
-		_name = o.getName();
-		_valueType = o.getValueType();
-		_dataType = o.getDataType();
-	}
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java
index e7dd21a..d5ce3f7 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java
@@ -981,27 +981,7 @@ public class VariableCPInstruction extends CPInstruction
 	public static Instruction prepareCreateMatrixVariableInstruction(String varName, String fileName, boolean fNameOverride, String format) throws DMLRuntimeException {
 		return parseInstruction(getBasicCreateVarString(varName, fileName, fNameOverride, DataType.MATRIX, format));
 	}
-	
-	public static Instruction prepareCreateVariableInstruction(String varName, String fileName, boolean fNameOverride, DataType dt, String format, MatrixCharacteristics mc) throws DMLRuntimeException {
-		StringBuilder sb = new StringBuilder();
-		sb.append(getBasicCreateVarString(varName, fileName, fNameOverride, dt, format));
-		
-		sb.append(Lop.OPERAND_DELIMITOR);
-		sb.append(mc.getRows());
-		sb.append(Lop.OPERAND_DELIMITOR);
-		sb.append(mc.getCols());
-		sb.append(Lop.OPERAND_DELIMITOR);
-		sb.append(mc.getRowsPerBlock());
-		sb.append(Lop.OPERAND_DELIMITOR);
-		sb.append(mc.getColsPerBlock());
-		sb.append(Lop.OPERAND_DELIMITOR);
-		sb.append(mc.getNonZeros());
-		
-		String str = sb.toString();
 
-		return parseInstruction(str);
-	}	
-	
 	public static Instruction prepareCreateVariableInstruction(String varName, String fileName, boolean fNameOverride, DataType dt, String format, MatrixCharacteristics mc, UpdateType update) throws DMLRuntimeException {
 		StringBuilder sb = new StringBuilder();
 		sb.append(getBasicCreateVarString(varName, fileName, fNameOverride, dt, format));

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/instructions/spark/MultiReturnParameterizedBuiltinSPInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/spark/MultiReturnParameterizedBuiltinSPInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/spark/MultiReturnParameterizedBuiltinSPInstruction.java
index f039a53..319d833 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/spark/MultiReturnParameterizedBuiltinSPInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/spark/MultiReturnParameterizedBuiltinSPInstruction.java
@@ -78,10 +78,6 @@ public class MultiReturnParameterizedBuiltinSPInstruction extends ComputationSPI
 		_sptype = SPINSTRUCTION_TYPE.MultiReturnBuiltin;
 		_outputs = outputs;
 	}
-	
-	public CPOperand getOutput(int i) {
-		return _outputs.get(i);
-	}
 
 	public static MultiReturnParameterizedBuiltinSPInstruction parseInstruction( String str ) 
 		throws DMLRuntimeException 

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/instructions/spark/UnarySPInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/spark/UnarySPInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/spark/UnarySPInstruction.java
index a8cda44..4d2bbd2 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/spark/UnarySPInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/spark/UnarySPInstruction.java
@@ -20,11 +20,9 @@
 package org.apache.sysml.runtime.instructions.spark;
 
 import org.apache.sysml.runtime.DMLRuntimeException;
-import org.apache.sysml.runtime.functionobjects.Not;
 import org.apache.sysml.runtime.instructions.InstructionUtils;
 import org.apache.sysml.runtime.instructions.cp.CPOperand;
 import org.apache.sysml.runtime.matrix.operators.Operator;
-import org.apache.sysml.runtime.matrix.operators.SimpleOperator;
 
 public abstract class UnarySPInstruction extends ComputationSPInstruction
 {
@@ -90,12 +88,4 @@ public abstract class UnarySPInstruction extends ComputationSPInstruction
 		}
 		return opcode;
 	}
-	
-	static SimpleOperator getSimpleUnaryOperator(String opcode)
-			throws DMLRuntimeException {
-		if (opcode.equalsIgnoreCase("!"))
-			return new SimpleOperator(Not.getNotFnObject());
-
-		throw new DMLRuntimeException("Unknown unary operator " + opcode);
-	}
 }

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDAggregateUtils.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDAggregateUtils.java b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDAggregateUtils.java
index 63fdb82..8038157 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDAggregateUtils.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDAggregateUtils.java
@@ -69,20 +69,6 @@ public class RDDAggregateUtils
 		}
 	}
 
-	public static JavaPairRDD<MatrixIndexes, MatrixBlock> sumByKey( JavaPairRDD<MatrixIndexes, MatrixBlock> in )
-	{
-		//sum of blocks per key, w/o exploitation of correction blocks
-		return in.reduceByKey(
-				new SumMultiBlockFunction());
-	}
-
-	public static JavaPairRDD<MatrixIndexes, Double> sumCellsByKey( JavaPairRDD<MatrixIndexes, Double> in )
-	{
-		//sum of blocks per key, w/o exploitation of corrections
-		return in.reduceByKey(
-				new SumDoubleCellsFunction());
-	}
-
 	public static JavaPairRDD<MatrixIndexes, MatrixBlock> sumByKeyStable( JavaPairRDD<MatrixIndexes, MatrixBlock> in )
 	{
 		//stable sum of blocks per key, by passing correction blocks along with aggregates 		
@@ -144,13 +130,6 @@ public class RDDAggregateUtils
 				new AggregateSingleBlockFunction(aop) );
 	}
 
-	public static JavaPairRDD<MatrixIndexes, MatrixBlock> aggByKey( JavaPairRDD<MatrixIndexes, MatrixBlock> in, AggregateOperator aop )
-	{
-		//aggregate of blocks per key, w/o exploitation of correction blocks
-		return in.reduceByKey(
-				new AggregateMultiBlockFunction(aop));
-	}
-
 	public static JavaPairRDD<MatrixIndexes, MatrixBlock> aggByKeyStable( JavaPairRDD<MatrixIndexes, MatrixBlock> in, AggregateOperator aop )
 	{
 		//stable sum of blocks per key, by passing correction blocks along with aggregates 		
@@ -507,41 +486,6 @@ public class RDDAggregateUtils
 			return out;
 		}
 	}
-	
-	/**
-	 * This aggregate function uses kahan+ with corrections to aggregate input blocks; it is meant for 
-	 * reducebykey operations where we CANNOT reuse the same correction block independent of the input
-	 * block indexes. Note that this aggregation function does not apply to embedded corrections.
-	 * 
-	 */
-	private static class SumMultiBlockFunction implements Function2<MatrixBlock, MatrixBlock, MatrixBlock> 
-	{
-		private static final long serialVersionUID = -4015979658416853324L;
-
-		private AggregateOperator _op = null;
-		private MatrixBlock _corr = null;
-		
-		public SumMultiBlockFunction()
-		{
-			_op = new AggregateOperator(0, KahanPlus.getKahanPlusFnObject(), true, CorrectionLocationType.NONE);	
-			_corr = new MatrixBlock();
-		}
-		
-		@Override
-		public MatrixBlock call(MatrixBlock arg0, MatrixBlock arg1)
-			throws Exception 
-		{
-			//copy one input to output
-			MatrixBlock out = new MatrixBlock(arg0);
-			
-			//aggregate other input
-			_corr.reset(out.getNumRows(), out.getNumColumns());
-			OperationsOnMatrixValues.incrementalAggregation(out, _corr, arg1, _op, false);
-			
-			return out;
-		}
-	}
-	
 
 	/**
 	 * Note: currently we always include the correction and use a subsequent maptopair to
@@ -584,46 +528,6 @@ public class RDDAggregateUtils
 			return arg0;
 		}
 	}
-	
-	/**
-	 * Note: currently we always include the correction and use a subsequent maptopair to
-	 * drop them at the end because during aggregation we dont know if we produce an
-	 * intermediate or the final aggregate. 
-	 */
-	private static class AggregateMultiBlockFunction implements Function2<MatrixBlock, MatrixBlock, MatrixBlock> 
-	{
-		private static final long serialVersionUID = -3672377410407066396L;
-
-		private AggregateOperator _op = null;
-		private MatrixBlock _corr = null;
-		
-		public AggregateMultiBlockFunction( AggregateOperator op )
-		{
-			_op = op;	
-			_corr = new MatrixBlock();
-		}
-		
-		@Override
-		public MatrixBlock call(MatrixBlock arg0, MatrixBlock arg1)
-			throws Exception 
-		{
-			//copy one first input
-			MatrixBlock out = new MatrixBlock(arg0); 
-			
-			//aggregate second input
-			_corr.reset(out.getNumRows(), out.getNumColumns());
-			if(_op.correctionExists) {
-				OperationsOnMatrixValues.incrementalAggregation(
-						out, _corr, arg1, _op, true);
-			}
-			else {
-				OperationsOnMatrixValues.incrementalAggregation(
-						out, null, arg1, _op, true);
-			}
-			
-			return out;
-		}
-	}
 
 	private static class MergeBlocksFunction implements Function2<MatrixBlock, MatrixBlock, MatrixBlock> 
 	{		
@@ -669,14 +573,4 @@ public class RDDAggregateUtils
 		}
 
 	}
-
-	private static class SumDoubleCellsFunction implements Function2<Double, Double, Double> 
-	{
-		private static final long serialVersionUID = -8167625566734873796L;
-
-		@Override
-		public Double call(Double v1, Double v2) throws Exception {
-			return v1 + v2;
-		}	
-	}
 }

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/SparkUtils.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/SparkUtils.java b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/SparkUtils.java
index 6872b57..d53f3cf 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/SparkUtils.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/SparkUtils.java
@@ -21,7 +21,6 @@
 package org.apache.sysml.runtime.instructions.spark.utils;
 
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.List;
 
 import org.apache.spark.HashPartitioner;
@@ -30,9 +29,6 @@ import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.api.java.function.Function;
 import org.apache.spark.api.java.function.Function2;
 import org.apache.spark.storage.StorageLevel;
-
-import scala.Tuple2;
-
 import org.apache.sysml.lops.Checkpoint;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.instructions.spark.functions.CopyBinaryCellFunction;
@@ -47,6 +43,8 @@ import org.apache.sysml.runtime.matrix.data.Pair;
 import org.apache.sysml.runtime.matrix.mapred.IndexedMatrixValue;
 import org.apache.sysml.runtime.util.UtilFunctions;
 
+import scala.Tuple2;
+
 public class SparkUtils 
 {	
 	//internal configuration
@@ -110,10 +108,6 @@ public class SparkUtils
 		return new Pair<Long,FrameBlock>(in._1(), in._2());
 	}
 
-	public static Pair<Long,FrameBlock> toIndexedFrameBlock( Long ix, FrameBlock fb ) {
-		return new Pair<Long,FrameBlock>(ix, fb);
-	}
-	
 	/**
 	 * Indicates if the input RDD is hash partitioned, i.e., it has a partitioner
 	 * of type {@code org.apache.spark.HashPartitioner}.
@@ -155,42 +149,6 @@ public class SparkUtils
 			return in.mapPartitionsToPair(new CopyBlockPairFunction(deep), true);
 	}
 
-	public static MatrixBlock[] partitionIntoRowBlocks( MatrixBlock mb, int blen ) 
-		throws DMLRuntimeException
-	{
-		//in-memory rowblock partitioning (according to bclen of rdd)
-		int lrlen = mb.getNumRows();
-		int numBlocks = (int)Math.ceil((double)lrlen/blen);				
-		MatrixBlock[] partBlocks = new MatrixBlock[numBlocks];
-		for( int i=0; i<numBlocks; i++ )
-		{
-			MatrixBlock tmp = new MatrixBlock();
-			mb.sliceOperations(i*blen, Math.min((i+1)*blen, lrlen)-1, 
-					0, mb.getNumColumns()-1, tmp);
-			partBlocks[i] = tmp;
-		}			
-		
-		return partBlocks;
-	}
-
-	public static MatrixBlock[] partitionIntoColumnBlocks( MatrixBlock mb, int blen ) 
-		throws DMLRuntimeException
-	{
-		//in-memory colblock partitioning (according to brlen of rdd)
-		int lclen = mb.getNumColumns();
-		int numBlocks = (int)Math.ceil((double)lclen/blen);				
-		MatrixBlock[] partBlocks = new MatrixBlock[numBlocks];
-		for( int i=0; i<numBlocks; i++ )
-		{
-			MatrixBlock tmp = new MatrixBlock();
-			mb.sliceOperations(0, mb.getNumRows()-1, 
-					i*blen, Math.min((i+1)*blen, lclen)-1,  tmp);
-			partBlocks[i] = tmp;
-		}
-		
-		return partBlocks;
-	}
-	
 	// This returns RDD with identifier as well as location
 	public static String getStartLineFromSparkDebugInfo(String line) throws DMLRuntimeException {
 		// To remove: (2)  -- Assumption: At max, 9 RDDs as input to transformation/action
@@ -211,85 +169,6 @@ public class SparkUtils
 		else
 			return retVal + "|" + twoSpaces;
 	}
-			
-	
-	// len = {clen or rlen}, blen = {brlen or bclen}
-	public static long getStartGlobalIndex(long blockIndex, int blen, long len) {
-		return UtilFunctions.computeCellIndex(blockIndex, blen, 0);
-	}
-	
-	public static JavaPairRDD<MatrixIndexes, MatrixBlock> getRDDWithEmptyBlocks(JavaSparkContext sc, 
-			JavaPairRDD<MatrixIndexes, MatrixBlock> binaryBlocksWithoutEmptyBlocks,
-			long numRows, long numColumns, int brlen, int bclen) throws DMLRuntimeException {
-		JavaPairRDD<MatrixIndexes, MatrixBlock> binaryBlocksWithEmptyBlocks = null;
-		// ----------------------------------------------------------------------------
-		// Now take care of empty blocks
-		// This is done as non-rdd operation due to complexity involved in "not in" operations
-		// Since this deals only with keys and not blocks, it might not be that bad.
-		List<MatrixIndexes> indexes = binaryBlocksWithoutEmptyBlocks.keys().collect();
-		ArrayList<Tuple2<MatrixIndexes, MatrixBlock> > emptyBlocksList = getEmptyBlocks(indexes, numRows, numColumns, brlen, bclen);
-		if(emptyBlocksList != null && emptyBlocksList.size() > 0) {
-			// Empty blocks needs to be inserted
-			binaryBlocksWithEmptyBlocks = JavaPairRDD.fromJavaRDD(sc.parallelize(emptyBlocksList))
-					.union(binaryBlocksWithoutEmptyBlocks);
-		}
-		else {
-			binaryBlocksWithEmptyBlocks = binaryBlocksWithoutEmptyBlocks;
-		}
-		// ----------------------------------------------------------------------------
-		return binaryBlocksWithEmptyBlocks;
-	}
-	
-	private static ArrayList<Tuple2<MatrixIndexes, MatrixBlock>> getEmptyBlocks(List<MatrixIndexes> nonEmptyIndexes, long rlen, long clen, int brlen, int bclen) throws DMLRuntimeException {
-		long numBlocksPerRow = (long) Math.ceil((double)rlen / brlen);
-		long numBlocksPerCol = (long) Math.ceil((double)clen / bclen);
-		long expectedNumBlocks = numBlocksPerRow*numBlocksPerCol;
-		
-		if(expectedNumBlocks == nonEmptyIndexes.size()) {
-			return null; // no empty blocks required: sanity check
-		}
-		else if(expectedNumBlocks < nonEmptyIndexes.size()) {
-			throw new DMLRuntimeException("Error: Incorrect number of indexes in ReblockSPInstruction:" + nonEmptyIndexes.size());
-		}
-		
-		// ----------------------------------------------------------------------------
-		// Add empty blocks: Performs a "not-in" operation
-		Collections.sort(nonEmptyIndexes); // sort in ascending order first wrt rows and then wrt columns
-		ArrayList<Tuple2<MatrixIndexes, MatrixBlock>> retVal = new ArrayList<Tuple2<MatrixIndexes,MatrixBlock>>();
-		int index = 0;
-		for(long row = 1; row <=  Math.ceil((double)rlen / brlen); row++) {
-			for(long col = 1; col <=  Math.ceil((double)clen / bclen); col++) {
-				boolean matrixBlockExists = false;
-				if(nonEmptyIndexes.size() > index) {
-					matrixBlockExists = (nonEmptyIndexes.get(index).getRowIndex() == row) && (nonEmptyIndexes.get(index).getColumnIndex() == col);
-				}
-				if(matrixBlockExists) {
-					index++; // No need to add empty block
-				}
-				else {
-					// ------------------------------------------------------------------
-					//	Compute local block size: 
-					// Example: For matrix: 1500 X 1100 with block length 1000 X 1000
-					// We will have four local block sizes (1000X1000, 1000X100, 500X1000 and 500X1000)
-					long blockRowIndex = row;
-					long blockColIndex = col;
-					int emptyBlk_lrlen = UtilFunctions.computeBlockSize(rlen, blockRowIndex, brlen);
-					int emptyBlk_lclen = UtilFunctions.computeBlockSize(clen, blockColIndex, bclen);
-					// ------------------------------------------------------------------
-					
-					MatrixBlock emptyBlk = new MatrixBlock(emptyBlk_lrlen, emptyBlk_lclen, true);
-					retVal.add(new Tuple2<MatrixIndexes, MatrixBlock>(new MatrixIndexes(blockRowIndex, blockColIndex), emptyBlk));
-				}
-			}
-		}
-		// ----------------------------------------------------------------------------
-		
-		if(index != nonEmptyIndexes.size()) {
-			throw new DMLRuntimeException("Unexpected error while adding empty blocks");
-		}
-		
-		return retVal;
-	}
 
 	public static JavaPairRDD<MatrixIndexes, MatrixBlock> getEmptyBlockRDD( JavaSparkContext sc, MatrixCharacteristics mc )
 	{
@@ -323,18 +202,6 @@ public class SparkUtils
 		return ret;
 	}
 
-	public static JavaPairRDD<MatrixIndexes, MatrixBlock> cacheBinaryBlockRDD(JavaPairRDD<MatrixIndexes, MatrixBlock> input)
-	{
-		JavaPairRDD<MatrixIndexes, MatrixBlock> ret = null;
-		
-		if( !input.getStorageLevel().equals(DEFAULT_TMP) ) {
-			ret = SparkUtils.copyBinaryBlockMatrix(input, false)
-					.persist(DEFAULT_TMP);
-		}
-		
-		return ret;
-	}
-	
 	/**
 	 * Utility to compute dimensions and non-zeros in a given RDD of binary cells.
 	 * 
@@ -350,24 +217,6 @@ public class SparkUtils
 		
 		return ret;
 	}
-	
-	/**
-	 * Utility to compute dimensions and non-zeros in the given RDD of matrix blocks.
-	 * 
-	 * @param input matrix as {@code JavaPairRDD<MatrixIndexes, MatrixBlock>}
-	 * @param brlen number of rows in a block
-	 * @param bclen number of columns in a block
-	 * @return matrix characteristics
-	 */
-	public static MatrixCharacteristics computeMatrixCharacteristics(JavaPairRDD<MatrixIndexes, MatrixBlock> input, int brlen, int bclen) 
-	{
-		// compute dimensions and nnz in single pass
-		MatrixCharacteristics ret = input
-				.map(new AnalyzeBlockMatrixCharacteristics(brlen, bclen))
-				.reduce(new AggregateMatrixCharacteristics());
-		
-		return ret;
-	}
 
 	private static class AnalyzeCellMatrixCharacteristics implements Function<Tuple2<MatrixIndexes,MatrixCell>, MatrixCharacteristics> 
 	{
@@ -384,30 +233,6 @@ public class SparkUtils
 		}
 	}
 
-	private static class AnalyzeBlockMatrixCharacteristics implements Function<Tuple2<MatrixIndexes,MatrixBlock>, MatrixCharacteristics> 
-	{
-		private static final long serialVersionUID = -1857049501217936951L;
-		
-		private int _brlen = -1; 
-		private int _bclen = -1; 
-		
-		public AnalyzeBlockMatrixCharacteristics(int brlen, int bclen) {
-			_brlen = brlen;
-			_bclen = bclen;
-		}
-		
-		@Override
-		public MatrixCharacteristics call(Tuple2<MatrixIndexes, MatrixBlock> arg0) 
-			throws Exception 
-		{
-			MatrixBlock block = arg0._2();
-			long rlen = (arg0._1().getRowIndex()-1)*_brlen + block.getNumRows();
-			long clen = (arg0._1().getColumnIndex()-1)*_bclen + block.getNumColumns();
-			long nnz = block.getNonZeros();
-			return new MatrixCharacteristics(rlen, clen, _brlen, _bclen, nnz);
-		}
-	}
-
 	private static class AggregateMatrixCharacteristics implements Function2<MatrixCharacteristics, MatrixCharacteristics, MatrixCharacteristics> 
 	{
 		private static final long serialVersionUID = 4263886749699779994L;
@@ -424,28 +249,7 @@ public class SparkUtils
 					arg0.getNonZeros() + arg1.getNonZeros() ); //sum
 		}	
 	}
-	
-	////////////////////////////
-	//TODO MB: to be cleaned up but still used
-	
-	/**
-	 * Utility to compute number of non-zeros from the given RDD of MatrixCells
-	 * 
-	 * @param rdd matrix as {@code JavaPairRDD<MatrixIndexes, MatrixCell>}
-	 * @return number of non-zeros
-	 */
-	public static long computeNNZFromCells(JavaPairRDD<MatrixIndexes, MatrixCell> rdd) {
-		long nnz = rdd.values().filter(
-						new Function<MatrixCell,Boolean>() {
-							private static final long serialVersionUID = -6550193680630537857L;
-							@Override
-							public Boolean call(MatrixCell v1) throws Exception {
-								return (v1.getValue() != 0);
-							}
-						}).count();
-		return nnz;
-	}
-	
+
 	/**
 	 * Utility to compute number of non-zeros from the given RDD of MatrixBlocks
 	 * 

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/io/FrameReaderFactory.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/io/FrameReaderFactory.java b/src/main/java/org/apache/sysml/runtime/io/FrameReaderFactory.java
index afd1b60..6300b32 100644
--- a/src/main/java/org/apache/sysml/runtime/io/FrameReaderFactory.java
+++ b/src/main/java/org/apache/sysml/runtime/io/FrameReaderFactory.java
@@ -38,20 +38,6 @@ public class FrameReaderFactory
 		return createFrameReader(iinfo, props);
 	}
 
-	public static FrameReader createFrameReader( ReadProperties rprops ) 
-		throws DMLRuntimeException
-	{
-		//check valid read properties
-		if( rprops == null )
-			throw new DMLRuntimeException("Failed to create frame reader with empty properties.");
-		
-		InputInfo iinfo = rprops.inputInfo;
-		FileFormatProperties props = (iinfo==InputInfo.CSVInputInfo) ? ((rprops.formatProperties!=null) ? 
-			(CSVFileFormatProperties)rprops.formatProperties : new CSVFileFormatProperties()) : null;		
-			
-		return createFrameReader(iinfo, props);
-	}
-
 	public static FrameReader createFrameReader( InputInfo iinfo, FileFormatProperties props ) 
 		throws DMLRuntimeException
 	{

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/io/WriterTextCSV.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/io/WriterTextCSV.java b/src/main/java/org/apache/sysml/runtime/io/WriterTextCSV.java
index f3533af..479ce40 100644
--- a/src/main/java/org/apache/sysml/runtime/io/WriterTextCSV.java
+++ b/src/main/java/org/apache/sysml/runtime/io/WriterTextCSV.java
@@ -25,7 +25,6 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collections;
 
 import org.apache.hadoop.conf.Configuration;
@@ -238,91 +237,6 @@ public class WriterTextCSV extends MatrixWriter
 		}
 	}
 
-
-	
-	/**
-	 * Method to merge multiple CSV part files on HDFS into a single CSV file on HDFS. 
-	 * The part files are created by CSV_WRITE MR job. 
-	 * 
-	 * This method is invoked from CP-write instruction.
-	 * 
-	 * @param srcFileName source file name
-	 * @param destFileName destination file name
-	 * @param csvprop CSV file format properties
-	 * @param rlen number of rows
-	 * @param clen number of columns
-	 * @throws IOException if IOException occurs
-	 */
-	public final void mergeCSVPartFiles(String srcFileName, String destFileName, CSVFileFormatProperties csvprop, long rlen, long clen) 
-		throws IOException 
-	{	
-		Configuration conf = new Configuration(ConfigurationManager.getCachedJobConf());
-
-		Path srcFilePath = new Path(srcFileName);
-		Path mergedFilePath = new Path(destFileName);
-		FileSystem hdfs = FileSystem.get(conf);
-
-		if (hdfs.exists(mergedFilePath)) {
-			hdfs.delete(mergedFilePath, true);
-		}
-		OutputStream out = hdfs.create(mergedFilePath, true);
-
-		// write out the header, if needed
-		if (csvprop.hasHeader()) {
-			StringBuilder sb = new StringBuilder();
-			for (int i = 0; i < clen; i++) {
-				sb.append("C" + (i + 1));
-				if (i < clen - 1)
-					sb.append(csvprop.getDelim());
-			}
-			sb.append('\n');
-			out.write(sb.toString().getBytes());
-			sb.setLength(0);
-		}
-
-		// if the source is a directory
-		if (hdfs.isDirectory(srcFilePath)) {
-			try {
-				FileStatus[] contents = hdfs.listStatus(srcFilePath);
-				Path[] partPaths = new Path[contents.length];
-				int numPartFiles = 0;
-				for (int i = 0; i < contents.length; i++) {
-					if (!contents[i].isDirectory()) {
-						partPaths[i] = contents[i].getPath();
-						numPartFiles++;
-					}
-				}
-				Arrays.sort(partPaths);
-
-				for (int i = 0; i < numPartFiles; i++) {
-					InputStream in = hdfs.open(partPaths[i]);
-					try {
-						IOUtils.copyBytes(in, out, conf, false);
-						if(i<numPartFiles-1)
-							out.write('\n');
-					} 
-					finally {
-						IOUtilFunctions.closeSilently(in);
-					}
-				}
-			} finally {
-				IOUtilFunctions.closeSilently(out);
-			}
-		} else if (hdfs.isFile(srcFilePath)) {
-			InputStream in = null;
-			try {
-				in = hdfs.open(srcFilePath);
-				IOUtils.copyBytes(in, out, conf, true);
-			} finally {
-				IOUtilFunctions.closeSilently(in);
-				IOUtilFunctions.closeSilently(out);
-			}
-		} else {
-			throw new IOException(srcFilePath.toString()
-					+ ": No such file or directory");
-		}
-	}
-
 	@SuppressWarnings("unchecked")
 	public final void addHeaderToCSV(String srcFileName, String destFileName, long rlen, long clen) 
 		throws IOException 

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/1ab2d7e6/src/main/java/org/apache/sysml/runtime/matrix/data/LibMatrixReorg.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/matrix/data/LibMatrixReorg.java b/src/main/java/org/apache/sysml/runtime/matrix/data/LibMatrixReorg.java
index 5638dad..9c6af97 100644
--- a/src/main/java/org/apache/sysml/runtime/matrix/data/LibMatrixReorg.java
+++ b/src/main/java/org/apache/sysml/runtime/matrix/data/LibMatrixReorg.java
@@ -508,22 +508,7 @@ public class LibMatrixReorg
 		
 		return out;
 	}
-	
-	/**
-	 * CP rmempty operation (single input, single output matrix) 
-	 * 
-	 * @param in input matrix
-	 * @param ret output matrix
-	 * @param rows ?
-	 * @return matrix block
-	 * @throws DMLRuntimeException if DMLRuntimeException occurs
-	 */
-	public static MatrixBlock rmempty(MatrixBlock in, MatrixBlock ret, boolean rows) 
-		throws DMLRuntimeException
-	{
-		return rmempty(in, ret, rows, null);
-	}
-		
+
 	/**
 	 * CP rmempty operation (single input, single output matrix) 
 	 *