You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@systemml.apache.org by mb...@apache.org on 2017/11/10 06:06:52 UTC

[1/4] systemml git commit: [MINOR] Performance createvar instruction, cleanup data handles

Repository: systemml
Updated Branches:
  refs/heads/master d69686273 -> 6a11413b1


http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/TaskPartitionerFixedsize.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/TaskPartitionerFixedsize.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/TaskPartitionerFixedsize.java
index d69837f..f283b69 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/TaskPartitionerFixedsize.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/TaskPartitionerFixedsize.java
@@ -60,7 +60,7 @@ public class TaskPartitionerFixedsize extends TaskPartitioner
 		for( long i = lFrom; i<=lTo;  )
 		{
 			//create new task and add to list of tasks
-			Task lTask = new Task( type );
+			Task lTask = new Task(_iterVarName, type);
 			tasks.addLast(lTask);
 			
 			int corr = (lfnp1-- > 0)? 1:0; //correction for static partitioner
@@ -71,9 +71,7 @@ public class TaskPartitionerFixedsize extends TaskPartitioner
 			{
 				//value based tasks
 				for( long j=0; j<_taskSize+corr && i<=lTo; j++, i+=lIncr )
-				{
-					lTask.addIteration(new IntObject(_iterVarName, i));				
-				}				
+					lTask.addIteration(new IntObject(i));
 			}
 			else 
 			{
@@ -81,9 +79,9 @@ public class TaskPartitionerFixedsize extends TaskPartitioner
 				long to = Math.min( i+(_taskSize-1+corr)*lIncr, lTo );
 				
 				//range based tasks
-				lTask.addIteration(new IntObject(_iterVarName, i));	    //from
-				lTask.addIteration(new IntObject(_iterVarName, to));    //to
-				lTask.addIteration(new IntObject(_iterVarName, lIncr));	//increment
+				lTask.addIteration(new IntObject(i));     //from
+				lTask.addIteration(new IntObject(to));    //to
+				lTask.addIteration(new IntObject(lIncr)); //increment
 				
 				i = to + lIncr;
 			}
@@ -112,7 +110,7 @@ public class TaskPartitionerFixedsize extends TaskPartitioner
 			for( long i = lFrom; i<=lTo;  )
 			{
 				//create new task and add to list of tasks
-				Task lTask = new Task( type );
+				Task lTask = new Task(_iterVarName, type);
 				
 				int corr = (lfnp1-- > 0)? 1:0; //correction for static partitioner
 				
@@ -122,9 +120,7 @@ public class TaskPartitionerFixedsize extends TaskPartitioner
 				{
 					//value based tasks
 					for( long j=0; j<_taskSize+corr && i<=lTo; j++, i+=lIncr )
-					{
-						lTask.addIteration(new IntObject(_iterVarName, i));				
-					}				
+						lTask.addIteration(new IntObject(i));
 				}
 				else 
 				{
@@ -132,9 +128,9 @@ public class TaskPartitionerFixedsize extends TaskPartitioner
 					long to = Math.min( i+(_taskSize-1+corr)*lIncr, lTo );
 					
 					//range based tasks
-					lTask.addIteration(new IntObject(_iterVarName, i));	    //from
-					lTask.addIteration(new IntObject(_iterVarName, to));    //to
-					lTask.addIteration(new IntObject(_iterVarName, lIncr));	//increment
+					lTask.addIteration(new IntObject(i));     //from
+					lTask.addIteration(new IntObject(to));    //to
+					lTask.addIteration(new IntObject(lIncr)); //increment
 					
 					i = to + lIncr;
 				}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cp/AggregateUnaryCPInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/AggregateUnaryCPInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/AggregateUnaryCPInstruction.java
index 132ec0a..d57a8dd 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/AggregateUnaryCPInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/AggregateUnaryCPInstruction.java
@@ -127,9 +127,9 @@ public class AggregateUnaryCPInstruction extends UnaryCPInstruction {
 			//create and set output scalar
 			ScalarObject ret = null;
 			switch( output.getValueType() ) {
-				case INT:	  ret = new IntObject(output_name, rval); break;
-				case DOUBLE:  ret = new DoubleObject(output_name, rval); break;
-				case STRING:  ret = new StringObject(output_name, String.valueOf(rval)); break;
+				case INT:	  ret = new IntObject(rval); break;
+				case DOUBLE:  ret = new DoubleObject(rval); break;
+				case STRING:  ret = new StringObject(String.valueOf(rval)); break;
 				
 				default: 
 					throw new DMLRuntimeException("Invalid output value type: "+output.getValueType());
@@ -148,7 +148,7 @@ public class AggregateUnaryCPInstruction extends UnaryCPInstruction {
 			ec.releaseMatrixInput(input1.getName(), getExtendedOpcode());
 			
 			if(output.getDataType() == DataType.SCALAR){
-				DoubleObject ret = new DoubleObject(output_name, resultBlock.getValue(0, 0));
+				DoubleObject ret = new DoubleObject(resultBlock.getValue(0, 0));
 				ec.setScalarOutput(output_name, ret);
 			} else{
 				// since the computed value is a scalar, allocate a "temp" output matrix

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cp/BooleanObject.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/BooleanObject.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/BooleanObject.java
index 757807f..31b8f8e 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/BooleanObject.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/BooleanObject.java
@@ -30,13 +30,9 @@ public class BooleanObject extends ScalarObject
 	private static final long serialVersionUID = -4506242165735516984L;
 
 	private final boolean _value;
-
+	
 	public BooleanObject(boolean val){
-		this(null,val);
-	}
-
-	public BooleanObject(String name,boolean val){
-		super(name, ValueType.BOOLEAN);
+		super(ValueType.BOOLEAN);
 		_value = val;
 	}
 

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cp/CentralMomentCPInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/CentralMomentCPInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/CentralMomentCPInstruction.java
index aa3fd86..c13ca4c 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/CentralMomentCPInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/CentralMomentCPInstruction.java
@@ -123,7 +123,6 @@ public class CentralMomentCPInstruction extends AggregateUnaryCPInstruction {
 		ec.releaseMatrixInput(input1.getName(), getExtendedOpcode());
 		
 		double val = cmobj.getRequiredResult(cm_op);
-		DoubleObject ret = new DoubleObject(output_name, val);
-		ec.setScalarOutput(output_name, ret);
+		ec.setScalarOutput(output_name, new DoubleObject(val));
 	}
 }

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cp/CovarianceCPInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/CovarianceCPInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/CovarianceCPInstruction.java
index 9457c65..699572f 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/CovarianceCPInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/CovarianceCPInstruction.java
@@ -104,8 +104,6 @@ public class CovarianceCPInstruction extends BinaryCPInstruction {
 		}
 		
 		double val = covobj.getRequiredResult(_optr);
-		DoubleObject ret = new DoubleObject(output_name, val);
-			
-		ec.setScalarOutput(output_name, ret);
+		ec.setScalarOutput(output_name, new DoubleObject(val));
 	}
 }

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cp/Data.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/Data.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/Data.java
index d52583f..3b15b9e 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/Data.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/Data.java
@@ -30,15 +30,10 @@ import org.apache.sysml.runtime.matrix.MetaData;
 
 public abstract class Data implements Serializable 
 {
-
 	private static final long serialVersionUID = 9176228330268046168L;
 
-	protected DataType dataType;
-	protected ValueType valueType;
-	
-	public Data() {
-		//default constructor for serialize
-	}
+	protected final DataType dataType;
+	protected final ValueType valueType;
 	
 	protected Data(DataType dt, ValueType vt) {
 		dataType = dt;
@@ -52,18 +47,10 @@ public abstract class Data implements Serializable
 		return dataType;
 	}
 
-	public void setDataType(DataType dataType) {
-		this.dataType = dataType;
-	}
-
 	public ValueType getValueType() {
 		return valueType;
 	}
 
-	public void setValueType(ValueType valueType) {
-		this.valueType = valueType;
-	}
-
 	public void setMetaData(MetaData md) throws DMLRuntimeException {
 		throw new DMLRuntimeException("This method in the base class should never be invoked.");
 	}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cp/DoubleObject.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/DoubleObject.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/DoubleObject.java
index 4290788..2c7e89d 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/DoubleObject.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/DoubleObject.java
@@ -29,11 +29,7 @@ public class DoubleObject extends ScalarObject
 	private final double _value;
 
 	public DoubleObject(double val){
-		this(null,val);
-	}
-
-	public DoubleObject(String name, double val){
-		super(name, ValueType.DOUBLE);
+		super(ValueType.DOUBLE);
 		_value = val;
 	}
 

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cp/FunctionCallCPInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/FunctionCallCPInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/FunctionCallCPInstruction.java
index 1c001f0..b36365c 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/FunctionCallCPInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/FunctionCallCPInstruction.java
@@ -201,8 +201,6 @@ public class FunctionCallCPInstruction extends CPInstruction {
 			}
 			
 			//add/replace data in symbol table
-			if( boundValue instanceof MatrixObject )
-				((MatrixObject) boundValue).setVarName(boundVarName);
 			ec.setVariable(boundVarName, boundValue);
 		}
 	}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cp/IntObject.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/IntObject.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/IntObject.java
index d4d44a9..6816c6b 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/IntObject.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/IntObject.java
@@ -30,14 +30,10 @@ public class IntObject extends ScalarObject
 	private final long _value;
 
 	public IntObject(long val) {
-		this(null,val);
-	}
-
-	public IntObject(String name, long val) {
-		super(name, ValueType.INT);
+		super(ValueType.INT);
 		_value = val;
 	}
-
+	
 	@Override
 	public boolean getBooleanValue(){
 		return (_value!=0);

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cp/ScalarObject.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/ScalarObject.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/ScalarObject.java
index 1529dc1..7872ac9 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/ScalarObject.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/ScalarObject.java
@@ -25,16 +25,9 @@ import org.apache.sysml.parser.Expression.ValueType;
 public abstract class ScalarObject extends Data
 {
 	private static final long serialVersionUID = 6994413375932824892L;
-
-	private String _name;
 	
-	public ScalarObject(String name, ValueType vt) {
+	public ScalarObject(ValueType vt) {
 		super(DataType.SCALAR, vt);
-		_name = name;
-	}
-
-	public String getName() {
-		return _name;
 	}
 
 	public abstract boolean getBooleanValue();
@@ -49,7 +42,7 @@ public abstract class ScalarObject extends Data
 		return getStringValue();
 	}
 	
-	public abstract Object getValue();	
+	public abstract Object getValue();
 	
 	@Override
 	public String toString() { 

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cp/StringObject.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/StringObject.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/StringObject.java
index 844d8a6..e099a3c 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/StringObject.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/StringObject.java
@@ -30,12 +30,8 @@ public class StringObject extends ScalarObject
 	
 	private final String _value;
 
-	public StringObject (String val){
-		this(null,val);
-	}
-
-	public StringObject(String name, String val){
-		super(name, ValueType.STRING);
+	public StringObject(String val){
+		super(ValueType.STRING);
 		_value = val;
 	}
 	

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cp/UaggOuterChainCPInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/UaggOuterChainCPInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/UaggOuterChainCPInstruction.java
index e48f396..d6fb334 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/UaggOuterChainCPInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/UaggOuterChainCPInstruction.java
@@ -105,7 +105,7 @@ public class UaggOuterChainCPInstruction extends UnaryCPInstruction {
 			//create and set output scalar
 			ScalarObject ret = null;
 			switch( output.getValueType() ) {
-				case DOUBLE:  ret = new DoubleObject(output_name, mbOut.quickGetValue(0, 0)); break;
+				case DOUBLE:  ret = new DoubleObject(mbOut.quickGetValue(0, 0)); break;
 				
 				default: 
 					throw new DMLRuntimeException("Invalid output value type: "+output.getValueType());

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java
index 9ddcdcc..659924c 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java
@@ -316,7 +316,7 @@ public class VariableCPInstruction extends CPInstruction {
 			in1 = new CPOperand(parts[1], vt, dt);
 			// file name
 			in2 = new CPOperand(parts[2], ValueType.STRING, DataType.SCALAR);
-			// file name override flag
+			// file name override flag (always literal)
 			in3 = new CPOperand(parts[3], ValueType.BOOLEAN, DataType.SCALAR);
 			
 			// format 
@@ -477,11 +477,11 @@ public class VariableCPInstruction extends CPInstruction {
 				//(existing objects gets cleared through rmvar instructions)
 				String fname = getInput2().getName();
 				// check if unique filename needs to be generated
-				if( Boolean.parseBoolean(getInput3().getName()) )
-					fname = fname + "_" + _uniqueVarID.getNextID();
-				
+				if( Boolean.parseBoolean(getInput3().getName()) ) {
+					fname = new StringBuilder(fname.length()+16).append(fname)
+						.append('_').append(_uniqueVarID.getNextID()).toString();
+				}
 				MatrixObject mobj = new MatrixObject(getInput1().getValueType(), fname );
-				mobj.setVarName(getInput1().getName());
 				//clone meta data because it is updated on copy-on-write, otherwise there
 				//is potential for hidden side effects between variables.
 				mobj.setMetaData((MetaData)metadata.clone());
@@ -494,7 +494,6 @@ public class VariableCPInstruction extends CPInstruction {
 			else if( getInput1().getDataType() == DataType.FRAME ) {
 				String fname = getInput2().getName();
 				FrameObject fobj = new FrameObject(fname);
-				fobj.setVarName(getInput1().getName());
 				fobj.setMetaData((MetaData)metadata.clone());
 				fobj.setFileFormatProperties(_formatProperties);
 				if( _schema != null )

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cpfile/MatrixIndexingCPFileInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cpfile/MatrixIndexingCPFileInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/cpfile/MatrixIndexingCPFileInstruction.java
index 0b35837..6ebdbf6 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cpfile/MatrixIndexingCPFileInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cpfile/MatrixIndexingCPFileInstruction.java
@@ -21,7 +21,6 @@ package org.apache.sysml.runtime.instructions.cpfile;
 
 import org.apache.sysml.lops.LeftIndex;
 import org.apache.sysml.lops.RightIndex;
-import org.apache.sysml.parser.Expression.DataType;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
 import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
@@ -98,12 +97,8 @@ public final class MatrixIndexingCPFileInstruction extends IndexingCPInstruction
 			
 			if( MapReduceTool.existsFileOnHDFS(pfname) )
 			{
-				MatrixObject out = ec.getMatrixObject(output.getName());
-				
-				//create output matrix object				
+				//create output matrix object
 				MatrixObject mobj = new MatrixObject(mo.getValueType(), pfname );
-				mobj.setDataType( DataType.MATRIX );
-				mobj.setVarName( out.getVarName() );
 				MatrixCharacteristics mcNew = null;
 				switch( mo.getPartitionFormat() )
 				{

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/cpfile/ParameterizedBuiltinCPFileInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cpfile/ParameterizedBuiltinCPFileInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/cpfile/ParameterizedBuiltinCPFileInstruction.java
index fed3ad5..3cd19f5 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cpfile/ParameterizedBuiltinCPFileInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cpfile/ParameterizedBuiltinCPFileInstruction.java
@@ -42,8 +42,6 @@ import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.sysml.conf.ConfigurationManager;
-import org.apache.sysml.parser.Expression.DataType;
-import org.apache.sysml.parser.Expression.ValueType;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.caching.CacheException;
 import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
@@ -225,14 +223,9 @@ public class ParameterizedBuiltinCPFileInstruction extends ParameterizedBuiltinC
 		private static MatrixObject createNewOutputObject( MatrixObject src, MatrixObject out, long rows, long cols ) 
 			throws DMLRuntimeException
 		{
-			String varName = out.getVarName();
 			String fName = out.getFileName();
-			ValueType vt = src.getValueType();
 			MetaDataFormat metadata = (MetaDataFormat) src.getMetaData();
-			
-			MatrixObject moNew = new MatrixObject( vt, fName );
-			moNew.setVarName( varName );
-			moNew.setDataType( DataType.MATRIX );
+			MatrixObject moNew = new MatrixObject(src.getValueType(), fName);
 			
 			//handle empty output block (ensure valid dimensions)
 			if( rows==0 || cols ==0 ){
@@ -251,8 +244,8 @@ public class ParameterizedBuiltinCPFileInstruction extends ParameterizedBuiltinC
 			MatrixCharacteristics mcOld = metadata.getMatrixCharacteristics();
 			OutputInfo oiOld = metadata.getOutputInfo();
 			InputInfo iiOld = metadata.getInputInfo();
-			MatrixCharacteristics mc = new MatrixCharacteristics( rows, cols, mcOld.getRowsPerBlock(),
-					                                              mcOld.getColsPerBlock(), mcOld.getNonZeros());
+			MatrixCharacteristics mc = new MatrixCharacteristics( rows, cols,
+				mcOld.getRowsPerBlock(), mcOld.getColsPerBlock(), mcOld.getNonZeros());
 			MetaDataFormat meta = new MetaDataFormat(mc,oiOld,iiOld);
 			moNew.setMetaData( meta );
 

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/spark/CentralMomentSPInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/spark/CentralMomentSPInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/spark/CentralMomentSPInstruction.java
index 82edf32..4e7b3f8 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/spark/CentralMomentSPInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/spark/CentralMomentSPInstruction.java
@@ -132,8 +132,7 @@ public class CentralMomentSPInstruction extends UnarySPInstruction {
 
 		//create scalar output (no lineage information required)
 		double val = cmobj.getRequiredResult(_optr);
-		DoubleObject ret = new DoubleObject(output.getName(), val);
-		ec.setScalarOutput(output.getName(), ret);
+		ec.setScalarOutput(output.getName(), new DoubleObject(val));
 	}
 
 	private static class RDDCMFunction implements Function<MatrixBlock, CM_COV_Object> 

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/spark/CheckpointSPInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/spark/CheckpointSPInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/spark/CheckpointSPInstruction.java
index 484cdaa..1db615d 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/spark/CheckpointSPInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/spark/CheckpointSPInstruction.java
@@ -157,7 +157,7 @@ public class CheckpointSPInstruction extends UnarySPInstruction {
 		CacheableData<?> cd = sec.getCacheableData( input1.getName() );
 		if( out != in ) {                         //prevent unnecessary lineage info
 			RDDObject inro =  cd.getRDDHandle();  //guaranteed to exist (see above)
-			RDDObject outro = new RDDObject(out, output.getName()); //create new rdd object
+			RDDObject outro = new RDDObject(out); //create new rdd object
 			outro.setCheckpointRDD(true);         //mark as checkpointed
 			outro.addLineageChild(inro);          //keep lineage to prevent cycles on cleanup
 			cd.setRDDHandle(outro);

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/spark/CovarianceSPInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/spark/CovarianceSPInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/spark/CovarianceSPInstruction.java
index 07cf40d..dbc5695 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/spark/CovarianceSPInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/spark/CovarianceSPInstruction.java
@@ -112,8 +112,7 @@ public class CovarianceSPInstruction extends BinarySPInstruction {
 
 		//create scalar output (no lineage information required)
 		double val = cmobj.getRequiredResult(_optr);
-		DoubleObject ret = new DoubleObject(output.getName(), val);
-		ec.setScalarOutput(output.getName(), ret);
+		ec.setScalarOutput(output.getName(), new DoubleObject(val));
 	}
 
 	private static class RDDCOVFunction implements Function<Tuple2<MatrixBlock,MatrixBlock>, CM_COV_Object>

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/spark/data/BroadcastObject.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/spark/data/BroadcastObject.java b/src/main/java/org/apache/sysml/runtime/instructions/spark/data/BroadcastObject.java
index 53b9d34..ff1ac4f 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/spark/data/BroadcastObject.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/spark/data/BroadcastObject.java
@@ -30,8 +30,8 @@ public class BroadcastObject<T extends CacheBlock> extends LineageObject
 	protected final SoftReference<PartitionedBroadcast<T>> _bcHandle;
 	private final long _size;
 	
-	public BroadcastObject( PartitionedBroadcast<T> bvar, String varName, long size ) {
-		super(varName);
+	public BroadcastObject( PartitionedBroadcast<T> bvar, long size ) {
+		super();
 		_bcHandle = new SoftReference<>(bvar);
 		_size = size;
 	}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/spark/data/DatasetObject.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/spark/data/DatasetObject.java b/src/main/java/org/apache/sysml/runtime/instructions/spark/data/DatasetObject.java
index 5030136..333a7cf 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/spark/data/DatasetObject.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/spark/data/DatasetObject.java
@@ -28,12 +28,12 @@ public class DatasetObject extends LineageObject
 	private final boolean _isVector;
 	private final boolean _containsID;
 	
-	public DatasetObject( Dataset<Row> dsvar, String varName) {
-		this(dsvar, varName, true, true);
+	public DatasetObject( Dataset<Row> dsvar) {
+		this(dsvar, true, true);
 	}
 	
-	public DatasetObject( Dataset<Row> dsvar, String varName, boolean isVector, boolean containsID) {
-		super(varName);
+	public DatasetObject( Dataset<Row> dsvar, boolean isVector, boolean containsID) {
+		super();
 		_dsHandle = dsvar;
 		_isVector = isVector;
 		_containsID = containsID;

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/spark/data/LineageObject.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/spark/data/LineageObject.java b/src/main/java/org/apache/sysml/runtime/instructions/spark/data/LineageObject.java
index 4d6f0c6..cd7c2ae 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/spark/data/LineageObject.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/spark/data/LineageObject.java
@@ -28,22 +28,16 @@ public abstract class LineageObject
 {
 	//basic lineage information
 	protected int _numRef = -1;
-	protected List<LineageObject> _childs = null;
-	protected final String _varName;
+	protected final List<LineageObject> _childs;
 	
 	//N:1 back reference to matrix/frame object
 	protected CacheableData<?> _cd = null;
 	
-	protected LineageObject(String varName) {
-		_varName = varName;
+	protected LineageObject() {
 		_numRef = 0;
 		_childs = new ArrayList<>();
 	}
 	
-	public String getVarName() {
-		return _varName;
-	}
-	
 	public int getNumReferences() {
 		return _numRef;
 	}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/instructions/spark/data/RDDObject.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/spark/data/RDDObject.java b/src/main/java/org/apache/sysml/runtime/instructions/spark/data/RDDObject.java
index 0a52323..f2ff064 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/spark/data/RDDObject.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/spark/data/RDDObject.java
@@ -32,8 +32,8 @@ public class RDDObject extends LineageObject
 	private boolean _parRDD = false;       //is a parallelized rdd at driver
 	private boolean _pending = true;       //is a pending rdd operation
 	
-	public RDDObject( JavaPairRDD<?,?> rddvar, String varName) {
-		super(varName);
+	public RDDObject( JavaPairRDD<?,?> rddvar) {
+		super();
 		_rddHandle = rddvar;
 	}
 

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/udf/ExternalFunctionInvocationInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/udf/ExternalFunctionInvocationInstruction.java b/src/main/java/org/apache/sysml/udf/ExternalFunctionInvocationInstruction.java
index 70eb4bf..196ca37 100644
--- a/src/main/java/org/apache/sysml/udf/ExternalFunctionInvocationInstruction.java
+++ b/src/main/java/org/apache/sysml/udf/ExternalFunctionInvocationInstruction.java
@@ -130,7 +130,6 @@ public class ExternalFunctionInvocationInstruction extends Instruction
 				case Matrix:
 					Matrix m = (Matrix) fun.getFunctionOutput(i);
 					MatrixObject newVar = createOutputMatrixObject( m );
-					newVar.setVarName(output.getName());
 					ec.setVariable(output.getName(), newVar);
 					break;
 				case Scalar:
@@ -138,19 +137,16 @@ public class ExternalFunctionInvocationInstruction extends Instruction
 					ScalarObject scalarObject = null;
 					switch( s.getScalarType() ) {
 						case Integer:
-							scalarObject = new IntObject(output.getName(),
-									Long.parseLong(s.getValue()));
+							scalarObject = new IntObject(Long.parseLong(s.getValue()));
 							break;
 						case Double:
-							scalarObject = new DoubleObject(output.getName(),
-									Double.parseDouble(s.getValue()));
+							scalarObject = new DoubleObject(Double.parseDouble(s.getValue()));
 							break;
 						case Boolean:
-							scalarObject = new BooleanObject(output.getName(),
-									Boolean.parseBoolean(s.getValue()));
+							scalarObject = new BooleanObject(Boolean.parseBoolean(s.getValue()));
 							break;
 						case Text:
-							scalarObject = new StringObject(output.getName(), s.getValue());
+							scalarObject = new StringObject(s.getValue());
 							break;
 						default:
 							throw new DMLRuntimeException("Unknown scalar value type '"


[3/4] systemml git commit: [SYSTEMML-1990] Generalized ctable rewrites (seq-table, const inputs)

Posted by mb...@apache.org.
[SYSTEMML-1990] Generalized ctable rewrites (seq-table, const inputs)

This patch generalized the existing rewrite for table(seq(),X,...) to
rexpand(X,...) to handle cases with unknown dimensions, including common
scenarios with column indexing on X. Additionally, this patch also
introduces a new rewrite for table with constant matrix inputs (i.e.,
table(X, matrix(7)) -> table(X,7)).


Project: http://git-wip-us.apache.org/repos/asf/systemml/repo
Commit: http://git-wip-us.apache.org/repos/asf/systemml/commit/c9614324
Tree: http://git-wip-us.apache.org/repos/asf/systemml/tree/c9614324
Diff: http://git-wip-us.apache.org/repos/asf/systemml/diff/c9614324

Branch: refs/heads/master
Commit: c96143248349b6c68253ef9b3777afd5e5ed62f2
Parents: d696862
Author: Matthias Boehm <mb...@gmail.com>
Authored: Thu Nov 9 16:31:58 2017 -0800
Committer: Matthias Boehm <mb...@gmail.com>
Committed: Thu Nov 9 22:08:02 2017 -0800

----------------------------------------------------------------------
 .../sysml/hops/rewrite/HopRewriteUtils.java     | 27 ++++++-
 .../RewriteAlgebraicSimplificationDynamic.java  | 11 ++-
 .../RewriteAlgebraicSimplificationStatic.java   | 22 +++++-
 .../misc/RewriteCTableToRExpandTest.java        | 83 ++++++++++++++------
 .../RewriteCTableToRExpandLeftUnknownPos.dml    | 28 +++++++
 .../RewriteCTableToRExpandRightUnknownPos.dml   | 28 +++++++
 6 files changed, 167 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/systemml/blob/c9614324/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java b/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java
index 28b2189..66f4fc7 100644
--- a/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java
+++ b/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java
@@ -965,6 +965,15 @@ public class HopRewriteUtils
 			|| isLiteralOfValue(hop.getInput().get(1), val));
 	}
 	
+	public static boolean isTernary(Hop hop, OpOp3 type) {
+		return hop instanceof TernaryOp && ((TernaryOp)hop).getOp()==type;
+	}
+	
+	public static boolean isTernary(Hop hop, OpOp3... types) {
+		return ( hop instanceof TernaryOp 
+			&& ArrayUtils.contains(types, ((TernaryOp) hop).getOp()));
+	}
+	
 	public static boolean containsInput(Hop current, Hop probe) {
 		return rContainsInput(current, probe, new HashSet<Long>());	
 	}
@@ -1052,6 +1061,15 @@ public class HopRewriteUtils
 		return true;
 	}
 	
+	public static boolean isColumnRightIndexing(Hop hop) {
+		return hop instanceof IndexingOp
+			&& ((IndexingOp) hop).isColLowerEqualsUpper()
+			&& ((hop.dimsKnown() && hop.getDim1() == hop.getInput().get(0).getDim1())
+			|| (isLiteralOfValue(hop.getInput().get(1), 1) 
+				&& isUnary(hop.getInput().get(2), OpOp1.NROW) 
+				&& hop.getInput().get(2).getInput().get(0)==hop.getInput().get(0)));
+	}
+	
 	public static boolean isFullColumnIndexing(LeftIndexingOp hop) {
 		return hop.isColLowerEqualsUpper()
 			&& isLiteralOfValue(hop.getInput().get(2), 1)
@@ -1112,9 +1130,7 @@ public class HopRewriteUtils
 			Hop to = dgop.getInput().get(dgop.getParamIndex(Statement.SEQ_TO));
 			Hop incr = dgop.getInput().get(dgop.getParamIndex(Statement.SEQ_INCR));
 			return isLiteralOfValue(from, 1) && isLiteralOfValue(incr, 1)
-				&& (isLiteralOfValue(to, row?input.getDim1():input.getDim2())
-					|| (to instanceof UnaryOp && ((UnaryOp)to).getOp()==(row?
-						OpOp1.NROW:OpOp1.NCOL) && to.getInput().get(0)==input));
+				&& isSizeExpressionOf(to, input, row);
 		}
 		return false;
 	}
@@ -1149,6 +1165,11 @@ public class HopRewriteUtils
 		throw new HopsException("Failed to retrieve 'to' argument from basic 1-N sequence.");
 	}
 	
+	public static boolean isSizeExpressionOf(Hop size, Hop input, boolean row) {
+		return (input.dimsKnown() && isLiteralOfValue(size, row?input.getDim1():input.getDim2()))
+			|| ((row ? isUnary(size, OpOp1.NROW) : isUnary(size, OpOp1.NCOL)) && (size.getInput().get(0)==input 
+			|| (isColumnRightIndexing(input) && size.getInput().get(0)==input.getInput().get(0))));
+	}
 	
 	public static boolean hasOnlyWriteParents( Hop hop, boolean inclTransient, boolean inclPersistent )
 	{

http://git-wip-us.apache.org/repos/asf/systemml/blob/c9614324/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationDynamic.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationDynamic.java b/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationDynamic.java
index 0fa1aed..e07f97c 100644
--- a/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationDynamic.java
+++ b/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationDynamic.java
@@ -2540,15 +2540,14 @@ public class RewriteAlgebraicSimplificationDynamic extends HopRewriteRule
 		//pattern: table(seq(1,nrow(v)), v, nrow(v), m) -> rexpand(v, max=m, dir=row, ignore=false, cast=true)
 		//note: this rewrite supports both left/right sequence 
 		if(    hi instanceof TernaryOp && hi.getInput().size()==5 //table without weights 
-			&& HopRewriteUtils.isLiteralOfValue(hi.getInput().get(2), 1) //i.e., weight of 1
-			&& hi.getInput().get(3) instanceof LiteralOp && hi.getInput().get(4) instanceof LiteralOp)
+			&& HopRewriteUtils.isLiteralOfValue(hi.getInput().get(2), 1) ) //i.e., weight of 1
 		{
 			Hop first = hi.getInput().get(0);
 			Hop second = hi.getInput().get(1);
 			
 			//pattern a: table(seq(1,nrow(v)), v, nrow(v), m, 1)
-			if( HopRewriteUtils.isBasic1NSequence(first, second, true) && second.dimsKnown() 
-				&& HopRewriteUtils.isLiteralOfValue(hi.getInput().get(3), second.getDim1()) )
+			if( HopRewriteUtils.isBasic1NSequence(first, second, true) 
+				&& HopRewriteUtils.isSizeExpressionOf(hi.getInput().get(3), second, true) )
 			{
 				//setup input parameter hops
 				HashMap<String,Hop> args = new HashMap<>();
@@ -2568,8 +2567,8 @@ public class RewriteAlgebraicSimplificationDynamic extends HopRewriteRule
 				LOG.debug("Applied simplifyTableSeqExpand1 (line "+hi.getBeginLine()+")");	
 			}
 			//pattern b: table(v, seq(1,nrow(v)), m, nrow(v))
-			else if( HopRewriteUtils.isBasic1NSequence(second, first, true) && first.dimsKnown() 
-				&& HopRewriteUtils.isLiteralOfValue(hi.getInput().get(4), first.getDim1()) )
+			else if( HopRewriteUtils.isBasic1NSequence(second, first, true)
+				&& HopRewriteUtils.isSizeExpressionOf(hi.getInput().get(4), first, true) )
 			{
 				//setup input parameter hops
 				HashMap<String,Hop> args = new HashMap<>();

http://git-wip-us.apache.org/repos/asf/systemml/blob/c9614324/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationStatic.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationStatic.java b/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationStatic.java
index 4c68fe2..cbfb527 100644
--- a/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationStatic.java
+++ b/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationStatic.java
@@ -152,6 +152,7 @@ public class RewriteAlgebraicSimplificationStatic extends HopRewriteRule
 			hi = foldMultipleAppendOperations(hi);               //e.g., cbind(X,cbind(Y,Z)) -> cbind(X,Y,Z)
 			hi = simplifyBinaryToUnaryOperation(hop, hi, i);     //e.g., X*X -> X^2 (pow2), X+X -> X*2, (X>0)-(X<0) -> sign(X)
 			hi = canonicalizeMatrixMultScalarAdd(hi);            //e.g., eps+U%*%t(V) -> U%*%t(V)+eps, U%*%t(V)-eps -> U%*%t(V)+(-eps) 
+			hi = simplifyCTableWithConstMatrixInputs(hi);        //e.g., table(X, matrix(1,...)) -> table(X, 1)
 			hi = simplifyReverseOperation(hop, hi, i);           //e.g., table(seq(1,nrow(X),1),seq(nrow(X),1,-1)) %*% X -> rev(X)
 			if(OptimizerUtils.ALLOW_OPERATOR_FUSION)
 				hi = simplifyMultiBinaryToBinaryOperation(hi);       //e.g., 1-X*Y -> X 1-* Y
@@ -664,13 +665,32 @@ public class RewriteAlgebraicSimplificationStatic extends HopRewriteRule
 			{
 				bop.setOp(OpOp2.PLUS);
 				HopRewriteUtils.replaceChildReference(bop,  right,
-						HopRewriteUtils.createBinaryMinus(right), 1);				
+						HopRewriteUtils.createBinaryMinus(right), 1);
 				LOG.debug("Applied canonicalizeMatrixMultScalarAdd2 (line "+hi.getBeginLine()+").");
 			}
 		}
 		
 		return hi;
 	}
+	
+	private static Hop simplifyCTableWithConstMatrixInputs( Hop hi ) 
+		throws HopsException
+	{
+		//pattern: table(X, matrix(1,...), matrix(7, ...)) -> table(X, 1, 7)
+		if( HopRewriteUtils.isTernary(hi, OpOp3.CTABLE) ) {
+			//note: the first input always expected to be a matrix
+			for( int i=1; i<hi.getInput().size(); i++ ) {
+				Hop inCurr = hi.getInput().get(i);
+				if( HopRewriteUtils.isDataGenOpWithConstantValue(inCurr) ) {
+					Hop inNew = ((DataGenOp)inCurr).getInput(DataExpression.RAND_MIN);
+					HopRewriteUtils.replaceChildReference(hi, inCurr, inNew, i);
+					LOG.debug("Applied simplifyCTableWithConstMatrixInputs"
+						+ i + " (line "+hi.getBeginLine()+").");
+				}
+			}
+		}
+		return hi;
+	}
 
 	/**
 	 * NOTE: this would be by definition a dynamic rewrite; however, we apply it as a static

http://git-wip-us.apache.org/repos/asf/systemml/blob/c9614324/src/test/java/org/apache/sysml/test/integration/functions/misc/RewriteCTableToRExpandTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/sysml/test/integration/functions/misc/RewriteCTableToRExpandTest.java b/src/test/java/org/apache/sysml/test/integration/functions/misc/RewriteCTableToRExpandTest.java
index b42a978..838fbb1 100644
--- a/src/test/java/org/apache/sysml/test/integration/functions/misc/RewriteCTableToRExpandTest.java
+++ b/src/test/java/org/apache/sysml/test/integration/functions/misc/RewriteCTableToRExpandTest.java
@@ -22,6 +22,7 @@ package org.apache.sysml.test.integration.functions.misc;
 import org.junit.Test;
 
 import org.junit.Assert;
+import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
 import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
 import org.apache.sysml.test.integration.AutomatedTestBase;
 import org.apache.sysml.test.integration.TestConfiguration;
@@ -33,6 +34,8 @@ public class RewriteCTableToRExpandTest extends AutomatedTestBase
 	private static final String TEST_NAME2 = "RewriteCTableToRExpandRightPos"; 
 	private static final String TEST_NAME3 = "RewriteCTableToRExpandLeftNeg"; 
 	private static final String TEST_NAME4 = "RewriteCTableToRExpandRightNeg"; 
+	private static final String TEST_NAME5 = "RewriteCTableToRExpandLeftUnknownPos";
+	private static final String TEST_NAME6 = "RewriteCTableToRExpandRightUnknownPos";
 	
 	private static final String TEST_DIR = "functions/misc/";
 	private static final String TEST_CLASS_DIR = TEST_DIR + RewriteCTableToRExpandTest.class.getSimpleName() + "/";
@@ -52,6 +55,8 @@ public class RewriteCTableToRExpandTest extends AutomatedTestBase
 		addTestConfiguration( TEST_NAME2, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME2, new String[] { "R" }) );
 		addTestConfiguration( TEST_NAME3, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME3, new String[] { "R" }) );
 		addTestConfiguration( TEST_NAME4, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME4, new String[] { "R" }) );
+		addTestConfiguration( TEST_NAME5, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME5, new String[] { "R" }) );
+		addTestConfiguration( TEST_NAME6, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME6, new String[] { "R" }) );
 	}
 
 	@Test
@@ -94,6 +99,25 @@ public class RewriteCTableToRExpandTest extends AutomatedTestBase
 		testRewriteCTableRExpand( TEST_NAME4, CropType.PAD );
 	}
 	
+	@Test
+	public void testRewriteCTableRExpandLeftUnknownDenseCrop()  {
+		testRewriteCTableRExpand( TEST_NAME5, CropType.CROP );
+	}
+	
+	@Test
+	public void testRewriteCTableRExpandLeftUnknownDensePad()  {
+		testRewriteCTableRExpand( TEST_NAME5, CropType.PAD );
+	}
+	
+	@Test
+	public void testRewriteCTableRExpandRightUnknownDenseCrop()  {
+		testRewriteCTableRExpand( TEST_NAME6, CropType.CROP );
+	}
+	
+	@Test
+	public void testRewriteCTableRExpandRightUnknownDensePad()  {
+		testRewriteCTableRExpand( TEST_NAME6, CropType.PAD );
+	}
 	
 	private void testRewriteCTableRExpand( String testname, CropType type )
 	{	
@@ -101,30 +125,45 @@ public class RewriteCTableToRExpandTest extends AutomatedTestBase
 		loadTestConfiguration(config);
 
 		int outDim = maxVal + ((type==CropType.CROP) ? -7 : 7);
+		boolean unknownTests = ( testname.equals(TEST_NAME5) || testname.equals(TEST_NAME6) );
+			
 		
-		String HOME = SCRIPT_DIR + TEST_DIR;
-		fullDMLScriptName = HOME + testname + ".dml";
-		programArgs = new String[]{ "-stats","-args", 
-			input("A"), String.valueOf(outDim), output("R") };
-		
-		fullRScriptName = HOME + testname + ".R";
-		rCmd = getRCmd(inputDir(), String.valueOf(outDim), expectedDir());			
-
-		double[][] A = getRandomMatrix(rows, 1, 1, 10, 1.0, 7);
-		writeInputMatrixWithMTD("A", A, false);
-		
-		//run performance tests
-		runTest(true, false, null, -1); 
+		RUNTIME_PLATFORM platformOld = rtplatform;
+		if( unknownTests )
+			rtplatform = RUNTIME_PLATFORM.SINGLE_NODE;
 		
-		//compare output meta data
-		boolean left = (testname.equals(TEST_NAME1) || testname.equals(TEST_NAME3));
-		boolean pos = (testname.equals(TEST_NAME1) || testname.equals(TEST_NAME2));
-		int rrows = (left && pos) ? rows : outDim;
-		int rcols = (!left && pos) ? rows : outDim;
-		checkDMLMetaDataFile("R", new MatrixCharacteristics(rrows, rcols, 1, 1));
-		
-		//check for applied rewrite
-		Assert.assertEquals(Boolean.valueOf(testname.equals(TEST_NAME1) || testname.equals(TEST_NAME2)),
+		try 
+		{
+			String HOME = SCRIPT_DIR + TEST_DIR;
+			fullDMLScriptName = HOME + testname + ".dml";
+			programArgs = new String[]{ "-explain","-stats","-args", 
+				input("A"), String.valueOf(outDim), output("R") };
+			
+			fullRScriptName = HOME + testname + ".R";
+			rCmd = getRCmd(inputDir(), String.valueOf(outDim), expectedDir());
+	
+			double[][] A = getRandomMatrix(rows, 1, 1, 10, 1.0, 7);
+			writeInputMatrixWithMTD("A", A, false);
+			
+			//run performance tests
+			runTest(true, false, null, -1); 
+			
+			//compare output meta data
+			boolean left = (testname.equals(TEST_NAME1) || testname.equals(TEST_NAME3) 
+				|| testname.equals(TEST_NAME5) || testname.equals(TEST_NAME6));
+			boolean pos = (testname.equals(TEST_NAME1) || testname.equals(TEST_NAME2));
+			int rrows = (left && pos) ? rows : outDim;
+			int rcols = (!left && pos) ? rows : outDim;
+			if( !unknownTests )
+				checkDMLMetaDataFile("R", new MatrixCharacteristics(rrows, rcols, 1, 1));
+			
+			//check for applied rewrite
+			Assert.assertEquals(Boolean.valueOf(testname.equals(TEST_NAME1) 
+				|| testname.equals(TEST_NAME2) || unknownTests),
 				Boolean.valueOf(heavyHittersContainsSubString("rexpand")));
+		}
+		finally {
+			rtplatform = platformOld;
+		}
 	}
 }

http://git-wip-us.apache.org/repos/asf/systemml/blob/c9614324/src/test/scripts/functions/misc/RewriteCTableToRExpandLeftUnknownPos.dml
----------------------------------------------------------------------
diff --git a/src/test/scripts/functions/misc/RewriteCTableToRExpandLeftUnknownPos.dml b/src/test/scripts/functions/misc/RewriteCTableToRExpandLeftUnknownPos.dml
new file mode 100644
index 0000000..4b07462
--- /dev/null
+++ b/src/test/scripts/functions/misc/RewriteCTableToRExpandLeftUnknownPos.dml
@@ -0,0 +1,28 @@
+#-------------------------------------------------------------
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#   http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#-------------------------------------------------------------
+
+A = read($1);
+
+T = matrix(1, nrow(A), 2);
+A2 = rand(rows=sum(T)/2, cols=100, min=1, max=10);
+R = table(seq(1,nrow(A2)), A2[,1], nrow(A2), $2);
+
+write(R, $3);

http://git-wip-us.apache.org/repos/asf/systemml/blob/c9614324/src/test/scripts/functions/misc/RewriteCTableToRExpandRightUnknownPos.dml
----------------------------------------------------------------------
diff --git a/src/test/scripts/functions/misc/RewriteCTableToRExpandRightUnknownPos.dml b/src/test/scripts/functions/misc/RewriteCTableToRExpandRightUnknownPos.dml
new file mode 100644
index 0000000..68d2860
--- /dev/null
+++ b/src/test/scripts/functions/misc/RewriteCTableToRExpandRightUnknownPos.dml
@@ -0,0 +1,28 @@
+#-------------------------------------------------------------
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#   http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#-------------------------------------------------------------
+
+A = read($1);
+
+T = matrix(1, nrow(A), 2);
+A2 = rand(rows=sum(T)/2, cols=100, min=1, max=10);
+R = table(A2[,1], seq(1,nrow(A2)), $2, nrow(A2));
+
+write(R, $3);


[4/4] systemml git commit: [SYSTEMML-2009] Performance multi-threaded JMLC scoring (contention)

Posted by mb...@apache.org.
[SYSTEMML-2009] Performance multi-threaded JMLC scoring (contention)

This patch makes a major performance improvement to multi-threaded JMLC
scoring and script execution. For scripts with many small intermediates,
concurrent JMLC prepared scripts were contended on sequence ID
generators for unique matrix IDs, which is unnecessary because these IDs
are only used in the bufferpool which is disabled through JMLC. On an
end-to-end application with above characteristics, this changed improved
performance from 487s to 94s (in a test environment w/ 24 vcores).
 

Project: http://git-wip-us.apache.org/repos/asf/systemml/repo
Commit: http://git-wip-us.apache.org/repos/asf/systemml/commit/6a11413b
Tree: http://git-wip-us.apache.org/repos/asf/systemml/tree/6a11413b
Diff: http://git-wip-us.apache.org/repos/asf/systemml/diff/6a11413b

Branch: refs/heads/master
Commit: 6a11413b13d88712745ba88a7f58f5544a1018e4
Parents: ffefd8e
Author: Matthias Boehm <mb...@gmail.com>
Authored: Thu Nov 9 21:46:06 2017 -0800
Committer: Matthias Boehm <mb...@gmail.com>
Committed: Thu Nov 9 22:08:03 2017 -0800

----------------------------------------------------------------------
 .../org/apache/sysml/api/jmlc/Connection.java   |  1 +
 .../org/apache/sysml/conf/CompilerConfig.java   |  2 ++
 .../controlprogram/caching/CacheableData.java   | 20 ++++++++------------
 .../instructions/cp/VariableCPInstruction.java  |  9 ++++++++-
 4 files changed, 19 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/systemml/blob/6a11413b/src/main/java/org/apache/sysml/api/jmlc/Connection.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/api/jmlc/Connection.java b/src/main/java/org/apache/sysml/api/jmlc/Connection.java
index 705d784..be75a95 100644
--- a/src/main/java/org/apache/sysml/api/jmlc/Connection.java
+++ b/src/main/java/org/apache/sysml/api/jmlc/Connection.java
@@ -112,6 +112,7 @@ public class Connection implements Closeable
 		CompilerConfig cconf = new CompilerConfig();
 		cconf.set(ConfigType.IGNORE_UNSPECIFIED_ARGS, true);
 		cconf.set(ConfigType.IGNORE_READ_WRITE_METADATA, true);
+		cconf.set(ConfigType.IGNORE_TEMPORARY_FILENAMES, true);
 		cconf.set(ConfigType.REJECT_READ_WRITE_UNKNOWNS, false);
 		cconf.set(ConfigType.PARALLEL_CP_READ_TEXTFORMATS, false);
 		cconf.set(ConfigType.PARALLEL_CP_WRITE_TEXTFORMATS, false);

http://git-wip-us.apache.org/repos/asf/systemml/blob/6a11413b/src/main/java/org/apache/sysml/conf/CompilerConfig.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/conf/CompilerConfig.java b/src/main/java/org/apache/sysml/conf/CompilerConfig.java
index bafe0ce..8c2c672 100644
--- a/src/main/java/org/apache/sysml/conf/CompilerConfig.java
+++ b/src/main/java/org/apache/sysml/conf/CompilerConfig.java
@@ -70,6 +70,7 @@ public class CompilerConfig
 		//Data expression configuration (modified by mlcontext, jmlc apis); no read of meta 
 		//data on mlcontext (local) /jmlc (global); ignore unknowns on jmlc
 		IGNORE_READ_WRITE_METADATA, // global skip meta data reads
+		IGNORE_TEMPORARY_FILENAMES, // global skip temporary filename modifications
 		REJECT_READ_WRITE_UNKNOWNS, // ignore missing meta data	
 		MLCONTEXT, // execution via new MLContext
 		
@@ -99,6 +100,7 @@ public class CompilerConfig
 		_bmap.put(ConfigType.ALLOW_CSE_PERSISTENT_READS, true);
 		_bmap.put(ConfigType.IGNORE_UNSPECIFIED_ARGS, false);
 		_bmap.put(ConfigType.IGNORE_READ_WRITE_METADATA, false);
+		_bmap.put(ConfigType.IGNORE_TEMPORARY_FILENAMES, false);
 		_bmap.put(ConfigType.REJECT_READ_WRITE_UNKNOWNS, true);
 		_bmap.put(ConfigType.MLCONTEXT, false);
 		_bmap.put(ConfigType.CODEGEN_ENABLED, false);

http://git-wip-us.apache.org/repos/asf/systemml/blob/6a11413b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java
index 885fedb..9e787de 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java
@@ -110,7 +110,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
     }
 	
 	/** Global flag indicating if caching is enabled (controls eviction) */
-	private static boolean _activeFlag = false;
+	private static volatile boolean _activeFlag = false;
 	
 	/** Global sequence for generating unique ids. */
 	private static IDSequence _seq = null;   
@@ -140,7 +140,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 	 * The unique (JVM-wide) ID of a cacheable data object; to ensure unique IDs across JVMs, we
 	 * concatenate filenames with a unique prefix (map task ID). 
 	 */
-	private final int _uniqueID;
+	private final long _uniqueID;
 	
 	/** The cache status of the data blob (whether it can be or is evicted, etc. */
 	private CacheStatus _cacheStatus = null;
@@ -205,7 +205,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 	 */
 	protected CacheableData(DataType dt, ValueType vt) {
 		super (dt, vt);
-		_uniqueID = (int)_seq.getNextID();
+		_uniqueID = isCachingActive() ? _seq.getNextID() : -1;
 		_cacheStatus = CacheStatus.EMPTY;
 		_numReadThreads = 0;
 		_gpuObjects = new HashMap<>();
@@ -1047,17 +1047,13 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 	
 	// ------------- IMPLEMENTED CACHE LOGIC METHODS --------------	
 	
-	protected int getUniqueCacheID() {
-		return _uniqueID;
-	}
-
 	protected String getCacheFilePathAndName () {
 		if( _cacheFileName==null ) {
 			StringBuilder sb = new StringBuilder();
 			sb.append(CacheableData.cacheEvictionLocalFilePath); 
 			sb.append(CacheableData.cacheEvictionLocalFilePrefix);
-			sb.append(String.format ("%09d", getUniqueCacheID()));
-			sb.append(CacheableData.CACHING_EVICTION_FILEEXTENSION);			
+			sb.append(String.format ("%09d", _uniqueID));
+			sb.append(CacheableData.CACHING_EVICTION_FILEEXTENSION);
 			_cacheFileName = sb.toString();
 		}
 		
@@ -1361,15 +1357,15 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		_activeFlag = true; //turn on caching
 	}
 	
-	public static synchronized boolean isCachingActive() {
+	public static boolean isCachingActive() {
 		return _activeFlag;
 	}
 	
-	public static synchronized void disableCaching() {
+	public static void disableCaching() {
 		_activeFlag = false;
 	}
 	
-	public static synchronized void enableCaching() {
+	public static void enableCaching() {
 		_activeFlag = true;
 	}
 

http://git-wip-us.apache.org/repos/asf/systemml/blob/6a11413b/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java b/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java
index 659924c..92750f7 100644
--- a/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java
+++ b/src/main/java/org/apache/sysml/runtime/instructions/cp/VariableCPInstruction.java
@@ -28,6 +28,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.conf.CompilerConfig.ConfigType;
+import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.lops.Lop;
 import org.apache.sysml.lops.UnaryCP;
 import org.apache.sysml.parser.Expression.DataType;
@@ -1006,6 +1008,11 @@ public class VariableCPInstruction extends CPInstruction {
 	}
 	
 	private static String getBasicCreateVarString(String varName, String fileName, boolean fNameOverride, DataType dt, String format) {
+		//note: the filename override property leads to concatenation of unique ids in order to 
+		//ensure conflicting filenames for objects that originate from the same instruction
+		boolean lfNameOverride = fNameOverride && !ConfigurationManager
+			.getCompilerConfigFlag(ConfigType.IGNORE_TEMPORARY_FILENAMES);
+		
 		StringBuilder sb = new StringBuilder();
 		sb.append("CP");
 		sb.append(Lop.OPERAND_DELIMITOR);
@@ -1016,7 +1023,7 @@ public class VariableCPInstruction extends CPInstruction {
 		sb.append(fileName);		// Constant CREATEVAR_FILE_NAME_VAR_POS is used to find a position of filename within a string generated through this function.
 									// If this position of filename within this string changes then constant CREATEVAR_FILE_NAME_VAR_POS to be updated.
 		sb.append(Lop.OPERAND_DELIMITOR);
-		sb.append(fNameOverride);
+		sb.append(lfNameOverride);
 		sb.append(Lop.OPERAND_DELIMITOR);
 		sb.append(dt.toString());
 		sb.append(Lop.OPERAND_DELIMITOR);


[2/4] systemml git commit: [MINOR] Performance createvar instruction, cleanup data handles

Posted by mb...@apache.org.
[MINOR] Performance createvar instruction, cleanup data handles

This patch improves the performance of string concatenation for
createvar instructions with unique filenames and removes the unnecessary
variable name from all data objects (matrices, frames, scalars).
 

Project: http://git-wip-us.apache.org/repos/asf/systemml/repo
Commit: http://git-wip-us.apache.org/repos/asf/systemml/commit/ffefd8e6
Tree: http://git-wip-us.apache.org/repos/asf/systemml/tree/ffefd8e6
Diff: http://git-wip-us.apache.org/repos/asf/systemml/diff/ffefd8e6

Branch: refs/heads/master
Commit: ffefd8e68defb7eb5412f92ddfd8e48046e6532c
Parents: c961432
Author: Matthias Boehm <mb...@gmail.com>
Authored: Thu Nov 9 20:03:47 2017 -0800
Committer: Matthias Boehm <mb...@gmail.com>
Committed: Thu Nov 9 22:08:02 2017 -0800

----------------------------------------------------------------------
 .../apache/sysml/api/jmlc/PreparedScript.java   |   8 +-
 .../api/mlcontext/MLContextConversionUtil.java  | 138 +++++++------------
 .../sysml/api/mlcontext/MLContextUtil.java      |  38 ++---
 .../runtime/controlprogram/ForProgramBlock.java |  12 +-
 .../controlprogram/ParForProgramBlock.java      |  16 +--
 .../runtime/controlprogram/ProgramBlock.java    |   8 +-
 .../controlprogram/caching/CacheableData.java   |  42 +++---
 .../controlprogram/caching/FrameObject.java     |   7 +-
 .../controlprogram/caching/MatrixObject.java    |  13 +-
 .../context/ExecutionContext.java               |   5 +-
 .../context/SparkExecutionContext.java          |  22 +--
 .../controlprogram/parfor/DataPartitioner.java  |   9 +-
 .../controlprogram/parfor/ParWorker.java        |  11 +-
 .../controlprogram/parfor/ProgramConverter.java |  12 +-
 .../parfor/RemoteDPParForSparkWorker.java       |   4 +-
 .../parfor/RemoteDPParWorkerReducer.java        |   4 +-
 .../parfor/ResultMergeLocalFile.java            |  59 ++++----
 .../parfor/ResultMergeLocalMemory.java          |  26 ++--
 .../parfor/ResultMergeRemoteMR.java             |  10 +-
 .../parfor/ResultMergeRemoteSpark.java          |  20 ++-
 .../runtime/controlprogram/parfor/Task.java     |  31 +++--
 .../parfor/TaskPartitionerFactoring.java        |  22 ++-
 .../parfor/TaskPartitionerFixedsize.java        |  24 ++--
 .../cp/AggregateUnaryCPInstruction.java         |   8 +-
 .../runtime/instructions/cp/BooleanObject.java  |   8 +-
 .../cp/CentralMomentCPInstruction.java          |   3 +-
 .../cp/CovarianceCPInstruction.java             |   4 +-
 .../sysml/runtime/instructions/cp/Data.java     |  17 +--
 .../runtime/instructions/cp/DoubleObject.java   |   6 +-
 .../cp/FunctionCallCPInstruction.java           |   2 -
 .../runtime/instructions/cp/IntObject.java      |   8 +-
 .../runtime/instructions/cp/ScalarObject.java   |  11 +-
 .../runtime/instructions/cp/StringObject.java   |   8 +-
 .../cp/UaggOuterChainCPInstruction.java         |   2 +-
 .../instructions/cp/VariableCPInstruction.java  |  11 +-
 .../cpfile/MatrixIndexingCPFileInstruction.java |   7 +-
 .../ParameterizedBuiltinCPFileInstruction.java  |  13 +-
 .../spark/CentralMomentSPInstruction.java       |   3 +-
 .../spark/CheckpointSPInstruction.java          |   2 +-
 .../spark/CovarianceSPInstruction.java          |   3 +-
 .../spark/data/BroadcastObject.java             |   4 +-
 .../instructions/spark/data/DatasetObject.java  |   8 +-
 .../instructions/spark/data/LineageObject.java  |  10 +-
 .../instructions/spark/data/RDDObject.java      |   4 +-
 .../ExternalFunctionInvocationInstruction.java  |  12 +-
 45 files changed, 275 insertions(+), 420 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/api/jmlc/PreparedScript.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/api/jmlc/PreparedScript.java b/src/main/java/org/apache/sysml/api/jmlc/PreparedScript.java
index 3dc0db7..dac211e 100644
--- a/src/main/java/org/apache/sysml/api/jmlc/PreparedScript.java
+++ b/src/main/java/org/apache/sysml/api/jmlc/PreparedScript.java
@@ -142,7 +142,7 @@ public class PreparedScript
 	 * @throws DMLException if DMLException occurs
 	 */
 	public void setScalar(String varname, boolean scalar, boolean reuse) throws DMLException {
-		setScalar(varname, new BooleanObject(varname, scalar), reuse);
+		setScalar(varname, new BooleanObject(scalar), reuse);
 	}
 	
 	/**
@@ -165,7 +165,7 @@ public class PreparedScript
 	 * @throws DMLException if DMLException occurs
 	 */
 	public void setScalar(String varname, long scalar, boolean reuse) throws DMLException {
-		setScalar(varname, new IntObject(varname, scalar), reuse);
+		setScalar(varname, new IntObject(scalar), reuse);
 	}
 	
 	/** Binds a scalar double to a registered input variable.
@@ -187,7 +187,7 @@ public class PreparedScript
 	 * @throws DMLException if DMLException occurs
 	 */
 	public void setScalar(String varname, double scalar, boolean reuse) throws DMLException {
-		setScalar(varname, new DoubleObject(varname, scalar), reuse);
+		setScalar(varname, new DoubleObject(scalar), reuse);
 	}
 	
 	/**
@@ -210,7 +210,7 @@ public class PreparedScript
 	 * @throws DMLException if DMLException occurs
 	 */
 	public void setScalar(String varname, String scalar, boolean reuse) throws DMLException {
-		setScalar(varname, new StringObject(varname, scalar), reuse);
+		setScalar(varname, new StringObject(scalar), reuse);
 	}
 
 	/**

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/api/mlcontext/MLContextConversionUtil.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/api/mlcontext/MLContextConversionUtil.java b/src/main/java/org/apache/sysml/api/mlcontext/MLContextConversionUtil.java
index 41e8551..1536f8f 100644
--- a/src/main/java/org/apache/sysml/api/mlcontext/MLContextConversionUtil.java
+++ b/src/main/java/org/apache/sysml/api/mlcontext/MLContextConversionUtil.java
@@ -124,23 +124,21 @@ public class MLContextConversionUtil {
 	/**
 	 * Convert a matrix at a URL to a {@code MatrixObject}.
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the matrix
 	 * @param url
 	 *            the URL to a matrix (in CSV or IJV format)
 	 * @param matrixMetadata
 	 *            the matrix metadata
 	 * @return the matrix at a URL converted to a {@code MatrixObject}
 	 */
-	public static MatrixObject urlToMatrixObject(String variableName, URL url, MatrixMetadata matrixMetadata) {
+	public static MatrixObject urlToMatrixObject(URL url, MatrixMetadata matrixMetadata) {
 		try {
 			InputStream is = url.openStream();
 			List<String> lines = IOUtils.readLines(is);
 			JavaRDD<String> javaRDD = jsc().parallelize(lines);
 			if ((matrixMetadata == null) || (matrixMetadata.getMatrixFormat() == MatrixFormat.CSV)) {
-				return javaRDDStringCSVToMatrixObject(variableName, javaRDD, matrixMetadata);
+				return javaRDDStringCSVToMatrixObject(javaRDD, matrixMetadata);
 			} else if (matrixMetadata.getMatrixFormat() == MatrixFormat.IJV) {
-				return javaRDDStringIJVToMatrixObject(variableName, javaRDD, matrixMetadata);
+				return javaRDDStringIJVToMatrixObject(javaRDD, matrixMetadata);
 			}
 			return null;
 		} catch (Exception e) {
@@ -206,25 +204,21 @@ public class MLContextConversionUtil {
 	 * Convert a {@code JavaPairRDD<MatrixIndexes, MatrixBlock>} to a
 	 * {@code MatrixObject}.
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the matrix
 	 * @param binaryBlocks
 	 *            {@code JavaPairRDD<MatrixIndexes, MatrixBlock>} representation
 	 *            of a binary-block matrix
 	 * @return the {@code JavaPairRDD<MatrixIndexes, MatrixBlock>} matrix
 	 *         converted to a {@code MatrixObject}
 	 */
-	public static MatrixObject binaryBlocksToMatrixObject(String variableName,
+	public static MatrixObject binaryBlocksToMatrixObject(
 			JavaPairRDD<MatrixIndexes, MatrixBlock> binaryBlocks) {
-		return binaryBlocksToMatrixObject(variableName, binaryBlocks, null);
+		return binaryBlocksToMatrixObject(binaryBlocks, null);
 	}
 
 	/**
 	 * Convert a {@code JavaPairRDD<MatrixIndexes, MatrixBlock>} to a
 	 * {@code MatrixObject}.
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the matrix
 	 * @param binaryBlocks
 	 *            {@code JavaPairRDD<MatrixIndexes, MatrixBlock>} representation
 	 *            of a binary-block matrix
@@ -233,21 +227,20 @@ public class MLContextConversionUtil {
 	 * @return the {@code JavaPairRDD<MatrixIndexes, MatrixBlock>} matrix
 	 *         converted to a {@code MatrixObject}
 	 */
-	public static MatrixObject binaryBlocksToMatrixObject(String variableName,
+	public static MatrixObject binaryBlocksToMatrixObject(
 			JavaPairRDD<MatrixIndexes, MatrixBlock> binaryBlocks, MatrixMetadata matrixMetadata) {
-		return binaryBlocksToMatrixObject(variableName, binaryBlocks, matrixMetadata, true);
+		return binaryBlocksToMatrixObject(binaryBlocks, matrixMetadata, true);
 	}
 
-	private static MatrixObject binaryBlocksToMatrixObject(String variableName,
-			JavaPairRDD<MatrixIndexes, MatrixBlock> binaryBlocks, MatrixMetadata matrixMetadata, boolean copy) {
+	private static MatrixObject binaryBlocksToMatrixObject(JavaPairRDD<MatrixIndexes, MatrixBlock> binaryBlocks,
+		MatrixMetadata matrixMetadata, boolean copy) {
 
 		MatrixCharacteristics mc = (matrixMetadata != null) ? matrixMetadata.asMatrixCharacteristics()
 				: new MatrixCharacteristics();
 		JavaPairRDD<MatrixIndexes, MatrixBlock> javaPairRdd = SparkUtils.copyBinaryBlockMatrix(binaryBlocks, copy);
-
 		MatrixObject matrixObject = new MatrixObject(ValueType.DOUBLE, OptimizerUtils.getUniqueTempFileName(),
 				new MetaDataFormat(mc, OutputInfo.BinaryBlockOutputInfo, InputInfo.BinaryBlockInputInfo));
-		matrixObject.setRDDHandle(new RDDObject(javaPairRdd, variableName));
+		matrixObject.setRDDHandle(new RDDObject(javaPairRdd));
 		return matrixObject;
 	}
 
@@ -279,24 +272,19 @@ public class MLContextConversionUtil {
 	/**
 	 * Convert a {@code JavaPairRDD<Long, FrameBlock>} to a {@code FrameObject}.
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the frame
 	 * @param binaryBlocks
 	 *            {@code JavaPairRDD<Long, FrameBlock>} representation of a
 	 *            binary-block frame
 	 * @return the {@code JavaPairRDD<Long, FrameBlock>} frame converted to a
 	 *         {@code FrameObject}
 	 */
-	public static FrameObject binaryBlocksToFrameObject(String variableName,
-			JavaPairRDD<Long, FrameBlock> binaryBlocks) {
-		return binaryBlocksToFrameObject(variableName, binaryBlocks, null);
+	public static FrameObject binaryBlocksToFrameObject(JavaPairRDD<Long, FrameBlock> binaryBlocks) {
+		return binaryBlocksToFrameObject(binaryBlocks, null);
 	}
 
 	/**
 	 * Convert a {@code JavaPairRDD<Long, FrameBlock>} to a {@code FrameObject}.
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the frame
 	 * @param binaryBlocks
 	 *            {@code JavaPairRDD<Long, FrameBlock>} representation of a
 	 *            binary-block frame
@@ -305,7 +293,7 @@ public class MLContextConversionUtil {
 	 * @return the {@code JavaPairRDD<Long, FrameBlock>} frame converted to a
 	 *         {@code FrameObject}
 	 */
-	public static FrameObject binaryBlocksToFrameObject(String variableName, JavaPairRDD<Long, FrameBlock> binaryBlocks,
+	public static FrameObject binaryBlocksToFrameObject(JavaPairRDD<Long, FrameBlock> binaryBlocks,
 			FrameMetadata frameMetadata) {
 
 		MatrixCharacteristics mc = (frameMetadata != null) ? 
@@ -316,29 +304,25 @@ public class MLContextConversionUtil {
 		
 		FrameObject frameObject = new FrameObject(OptimizerUtils.getUniqueTempFileName(),
 			new MetaDataFormat(mc, OutputInfo.BinaryBlockOutputInfo, InputInfo.BinaryBlockInputInfo), schema);
-		frameObject.setRDDHandle(new RDDObject(binaryBlocks, variableName));
+		frameObject.setRDDHandle(new RDDObject(binaryBlocks));
 		return frameObject;
 	}
 
 	/**
 	 * Convert a {@code DataFrame} to a {@code MatrixObject}.
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the matrix
 	 * @param dataFrame
 	 *            the Spark {@code DataFrame}
 	 * @return the {@code DataFrame} matrix converted to a converted to a
 	 *         {@code MatrixObject}
 	 */
-	public static MatrixObject dataFrameToMatrixObject(String variableName, Dataset<Row> dataFrame) {
-		return dataFrameToMatrixObject(variableName, dataFrame, null);
+	public static MatrixObject dataFrameToMatrixObject(Dataset<Row> dataFrame) {
+		return dataFrameToMatrixObject(dataFrame, null);
 	}
 
 	/**
 	 * Convert a {@code DataFrame} to a {@code MatrixObject}.
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the matrix
 	 * @param dataFrame
 	 *            the Spark {@code DataFrame}
 	 * @param matrixMetadata
@@ -346,14 +330,14 @@ public class MLContextConversionUtil {
 	 * @return the {@code DataFrame} matrix converted to a converted to a
 	 *         {@code MatrixObject}
 	 */
-	public static MatrixObject dataFrameToMatrixObject(String variableName, Dataset<Row> dataFrame,
+	public static MatrixObject dataFrameToMatrixObject(Dataset<Row> dataFrame,
 			MatrixMetadata matrixMetadata) {
 		matrixMetadata = (matrixMetadata != null) ? matrixMetadata : new MatrixMetadata();
 		JavaPairRDD<MatrixIndexes, MatrixBlock> binaryBlock = dataFrameToMatrixBinaryBlocks(dataFrame, matrixMetadata);
-		MatrixObject mo = binaryBlocksToMatrixObject(variableName, binaryBlock, matrixMetadata, false);
+		MatrixObject mo = binaryBlocksToMatrixObject(binaryBlock, matrixMetadata, false);
 		// keep lineage of original dataset to allow bypassing binary block
 		// conversion if possible
-		mo.getRDDHandle().addLineageChild(new DatasetObject(dataFrame, variableName,
+		mo.getRDDHandle().addLineageChild(new DatasetObject(dataFrame,
 				isDataFrameWithIDColumn(matrixMetadata), isVectorBasedDataFrame(matrixMetadata)));
 		return mo;
 	}
@@ -361,22 +345,18 @@ public class MLContextConversionUtil {
 	/**
 	 * Convert a {@code DataFrame} to a {@code FrameObject}.
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the frame
 	 * @param dataFrame
 	 *            the Spark {@code DataFrame}
 	 * @return the {@code DataFrame} matrix converted to a converted to a
 	 *         {@code FrameObject}
 	 */
-	public static FrameObject dataFrameToFrameObject(String variableName, Dataset<Row> dataFrame) {
-		return dataFrameToFrameObject(variableName, dataFrame, null);
+	public static FrameObject dataFrameToFrameObject(Dataset<Row> dataFrame) {
+		return dataFrameToFrameObject(dataFrame, null);
 	}
 
 	/**
 	 * Convert a {@code DataFrame} to a {@code FrameObject}.
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the frame
 	 * @param dataFrame
 	 *            the Spark {@code DataFrame}
 	 * @param frameMetadata
@@ -384,7 +364,7 @@ public class MLContextConversionUtil {
 	 * @return the {@code DataFrame} frame converted to a converted to a
 	 *         {@code FrameObject}
 	 */
-	public static FrameObject dataFrameToFrameObject(String variableName, Dataset<Row> dataFrame,
+	public static FrameObject dataFrameToFrameObject(Dataset<Row> dataFrame,
 			FrameMetadata frameMetadata) {
 		try {
 			// setup meta data and java spark context
@@ -405,7 +385,7 @@ public class MLContextConversionUtil {
 			frameMetadata.setMatrixCharacteristics(mc); // required due to meta
 														// data copy
 
-			return MLContextConversionUtil.binaryBlocksToFrameObject(variableName, binaryBlock, frameMetadata);
+			return MLContextConversionUtil.binaryBlocksToFrameObject(binaryBlock, frameMetadata);
 		} catch (DMLRuntimeException e) {
 			throw new MLContextException("Exception converting DataFrame to FrameObject", e);
 		}
@@ -590,28 +570,24 @@ public class MLContextConversionUtil {
 	/**
 	 * Convert a {@code JavaRDD<String>} in CSV format to a {@code MatrixObject}
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the matrix
 	 * @param javaRDD
 	 *            the Java RDD of strings
 	 * @return the {@code JavaRDD<String>} converted to a {@code MatrixObject}
 	 */
-	public static MatrixObject javaRDDStringCSVToMatrixObject(String variableName, JavaRDD<String> javaRDD) {
-		return javaRDDStringCSVToMatrixObject(variableName, javaRDD, null);
+	public static MatrixObject javaRDDStringCSVToMatrixObject(JavaRDD<String> javaRDD) {
+		return javaRDDStringCSVToMatrixObject(javaRDD, null);
 	}
 
 	/**
 	 * Convert a {@code JavaRDD<String>} in CSV format to a {@code MatrixObject}
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the matrix
 	 * @param javaRDD
 	 *            the Java RDD of strings
 	 * @param matrixMetadata
 	 *            matrix metadata
 	 * @return the {@code JavaRDD<String>} converted to a {@code MatrixObject}
 	 */
-	public static MatrixObject javaRDDStringCSVToMatrixObject(String variableName, JavaRDD<String> javaRDD,
+	public static MatrixObject javaRDDStringCSVToMatrixObject(JavaRDD<String> javaRDD,
 			MatrixMetadata matrixMetadata) {
 		JavaPairRDD<LongWritable, Text> javaPairRDD = javaRDD.mapToPair(new ConvertStringToLongTextPair());
 		MatrixCharacteristics mc = (matrixMetadata != null) ? matrixMetadata.asMatrixCharacteristics()
@@ -620,35 +596,31 @@ public class MLContextConversionUtil {
 		MatrixObject matrixObject = new MatrixObject(ValueType.DOUBLE, OptimizerUtils.getUniqueTempFileName(),
 				new MetaDataFormat(mc, OutputInfo.CSVOutputInfo, InputInfo.CSVInputInfo));
 		JavaPairRDD<LongWritable, Text> javaPairRDD2 = javaPairRDD.mapToPair(new CopyTextInputFunction());
-		matrixObject.setRDDHandle(new RDDObject(javaPairRDD2, variableName));
+		matrixObject.setRDDHandle(new RDDObject(javaPairRDD2));
 		return matrixObject;
 	}
 
 	/**
 	 * Convert a {@code JavaRDD<String>} in CSV format to a {@code FrameObject}
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the frame
 	 * @param javaRDD
 	 *            the Java RDD of strings
 	 * @return the {@code JavaRDD<String>} converted to a {@code FrameObject}
 	 */
-	public static FrameObject javaRDDStringCSVToFrameObject(String variableName, JavaRDD<String> javaRDD) {
-		return javaRDDStringCSVToFrameObject(variableName, javaRDD, null);
+	public static FrameObject javaRDDStringCSVToFrameObject(JavaRDD<String> javaRDD) {
+		return javaRDDStringCSVToFrameObject(javaRDD, null);
 	}
 
 	/**
 	 * Convert a {@code JavaRDD<String>} in CSV format to a {@code FrameObject}
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the frame
 	 * @param javaRDD
 	 *            the Java RDD of strings
 	 * @param frameMetadata
 	 *            frame metadata
 	 * @return the {@code JavaRDD<String>} converted to a {@code FrameObject}
 	 */
-	public static FrameObject javaRDDStringCSVToFrameObject(String variableName, JavaRDD<String> javaRDD,
+	public static FrameObject javaRDDStringCSVToFrameObject(JavaRDD<String> javaRDD,
 			FrameMetadata frameMetadata) {
 		JavaPairRDD<LongWritable, Text> javaPairRDD = javaRDD.mapToPair(new ConvertStringToLongTextPair());
 		MatrixCharacteristics mc = (frameMetadata != null) ? frameMetadata.asMatrixCharacteristics()
@@ -666,7 +638,7 @@ public class MLContextConversionUtil {
 			e.printStackTrace();
 			return null;
 		}
-		frameObject.setRDDHandle(new RDDObject(rdd, variableName));
+		frameObject.setRDDHandle(new RDDObject(rdd));
 		return frameObject;
 	}
 
@@ -674,15 +646,13 @@ public class MLContextConversionUtil {
 	 * Convert a {@code JavaRDD<String>} in IJV format to a {@code MatrixObject}
 	 * . Note that metadata is required for IJV format.
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the matrix
 	 * @param javaRDD
 	 *            the Java RDD of strings
 	 * @param matrixMetadata
 	 *            matrix metadata
 	 * @return the {@code JavaRDD<String>} converted to a {@code MatrixObject}
 	 */
-	public static MatrixObject javaRDDStringIJVToMatrixObject(String variableName, JavaRDD<String> javaRDD,
+	public static MatrixObject javaRDDStringIJVToMatrixObject(JavaRDD<String> javaRDD,
 			MatrixMetadata matrixMetadata) {
 		JavaPairRDD<LongWritable, Text> javaPairRDD = javaRDD.mapToPair(new ConvertStringToLongTextPair());
 		MatrixCharacteristics mc = (matrixMetadata != null) ? matrixMetadata.asMatrixCharacteristics()
@@ -691,7 +661,7 @@ public class MLContextConversionUtil {
 		MatrixObject matrixObject = new MatrixObject(ValueType.DOUBLE, OptimizerUtils.getUniqueTempFileName(),
 				new MetaDataFormat(mc, OutputInfo.TextCellOutputInfo, InputInfo.TextCellInputInfo));
 		JavaPairRDD<LongWritable, Text> javaPairRDD2 = javaPairRDD.mapToPair(new CopyTextInputFunction());
-		matrixObject.setRDDHandle(new RDDObject(javaPairRDD2, variableName));
+		matrixObject.setRDDHandle(new RDDObject(javaPairRDD2));
 		return matrixObject;
 	}
 
@@ -699,15 +669,13 @@ public class MLContextConversionUtil {
 	 * Convert a {@code JavaRDD<String>} in IJV format to a {@code FrameObject}
 	 * . Note that metadata is required for IJV format.
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the frame
 	 * @param javaRDD
 	 *            the Java RDD of strings
 	 * @param frameMetadata
 	 *            frame metadata
 	 * @return the {@code JavaRDD<String>} converted to a {@code FrameObject}
 	 */
-	public static FrameObject javaRDDStringIJVToFrameObject(String variableName, JavaRDD<String> javaRDD,
+	public static FrameObject javaRDDStringIJVToFrameObject(JavaRDD<String> javaRDD,
 			FrameMetadata frameMetadata) {
 		JavaPairRDD<LongWritable, Text> javaPairRDD = javaRDD.mapToPair(new ConvertStringToLongTextPair());
 		MatrixCharacteristics mc = (frameMetadata != null) ? frameMetadata.asMatrixCharacteristics()
@@ -728,108 +696,96 @@ public class MLContextConversionUtil {
 			e.printStackTrace();
 			return null;
 		}
-		frameObject.setRDDHandle(new RDDObject(rdd, variableName));
+		frameObject.setRDDHandle(new RDDObject(rdd));
 		return frameObject;
 	}
 
 	/**
 	 * Convert a {@code RDD<String>} in CSV format to a {@code MatrixObject}
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the matrix
 	 * @param rdd
 	 *            the RDD of strings
 	 * @return the {@code RDD<String>} converted to a {@code MatrixObject}
 	 */
-	public static MatrixObject rddStringCSVToMatrixObject(String variableName, RDD<String> rdd) {
-		return rddStringCSVToMatrixObject(variableName, rdd, null);
+	public static MatrixObject rddStringCSVToMatrixObject(RDD<String> rdd) {
+		return rddStringCSVToMatrixObject(rdd, null);
 	}
 
 	/**
 	 * Convert a {@code RDD<String>} in CSV format to a {@code MatrixObject}
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the matrix
 	 * @param rdd
 	 *            the RDD of strings
 	 * @param matrixMetadata
 	 *            matrix metadata
 	 * @return the {@code RDD<String>} converted to a {@code MatrixObject}
 	 */
-	public static MatrixObject rddStringCSVToMatrixObject(String variableName, RDD<String> rdd,
+	public static MatrixObject rddStringCSVToMatrixObject(RDD<String> rdd,
 			MatrixMetadata matrixMetadata) {
 		ClassTag<String> tag = scala.reflect.ClassTag$.MODULE$.apply(String.class);
 		JavaRDD<String> javaRDD = JavaRDD.fromRDD(rdd, tag);
-		return javaRDDStringCSVToMatrixObject(variableName, javaRDD, matrixMetadata);
+		return javaRDDStringCSVToMatrixObject(javaRDD, matrixMetadata);
 	}
 
 	/**
 	 * Convert a {@code RDD<String>} in CSV format to a {@code FrameObject}
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the frame
 	 * @param rdd
 	 *            the RDD of strings
 	 * @return the {@code RDD<String>} converted to a {@code FrameObject}
 	 */
-	public static FrameObject rddStringCSVToFrameObject(String variableName, RDD<String> rdd) {
-		return rddStringCSVToFrameObject(variableName, rdd, null);
+	public static FrameObject rddStringCSVToFrameObject(RDD<String> rdd) {
+		return rddStringCSVToFrameObject(rdd, null);
 	}
 
 	/**
 	 * Convert a {@code RDD<String>} in CSV format to a {@code FrameObject}
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the frame
 	 * @param rdd
 	 *            the RDD of strings
 	 * @param frameMetadata
 	 *            frame metadata
 	 * @return the {@code RDD<String>} converted to a {@code FrameObject}
 	 */
-	public static FrameObject rddStringCSVToFrameObject(String variableName, RDD<String> rdd,
+	public static FrameObject rddStringCSVToFrameObject(RDD<String> rdd,
 			FrameMetadata frameMetadata) {
 		ClassTag<String> tag = scala.reflect.ClassTag$.MODULE$.apply(String.class);
 		JavaRDD<String> javaRDD = JavaRDD.fromRDD(rdd, tag);
-		return javaRDDStringCSVToFrameObject(variableName, javaRDD, frameMetadata);
+		return javaRDDStringCSVToFrameObject(javaRDD, frameMetadata);
 	}
 
 	/**
 	 * Convert a {@code RDD<String>} in IJV format to a {@code MatrixObject}.
 	 * Note that metadata is required for IJV format.
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the matrix
 	 * @param rdd
 	 *            the RDD of strings
 	 * @param matrixMetadata
 	 *            matrix metadata
 	 * @return the {@code RDD<String>} converted to a {@code MatrixObject}
 	 */
-	public static MatrixObject rddStringIJVToMatrixObject(String variableName, RDD<String> rdd,
+	public static MatrixObject rddStringIJVToMatrixObject(RDD<String> rdd,
 			MatrixMetadata matrixMetadata) {
 		ClassTag<String> tag = scala.reflect.ClassTag$.MODULE$.apply(String.class);
 		JavaRDD<String> javaRDD = JavaRDD.fromRDD(rdd, tag);
-		return javaRDDStringIJVToMatrixObject(variableName, javaRDD, matrixMetadata);
+		return javaRDDStringIJVToMatrixObject(javaRDD, matrixMetadata);
 	}
 
 	/**
 	 * Convert a {@code RDD<String>} in IJV format to a {@code FrameObject}.
 	 * Note that metadata is required for IJV format.
 	 *
-	 * @param variableName
-	 *            name of the variable associated with the frame
 	 * @param rdd
 	 *            the RDD of strings
 	 * @param frameMetadata
 	 *            frame metadata
 	 * @return the {@code RDD<String>} converted to a {@code FrameObject}
 	 */
-	public static FrameObject rddStringIJVToFrameObject(String variableName, RDD<String> rdd,
+	public static FrameObject rddStringIJVToFrameObject(RDD<String> rdd,
 			FrameMetadata frameMetadata) {
 		ClassTag<String> tag = scala.reflect.ClassTag$.MODULE$.apply(String.class);
 		JavaRDD<String> javaRDD = JavaRDD.fromRDD(rdd, tag);
-		return javaRDDStringIJVToFrameObject(variableName, javaRDD, frameMetadata);
+		return javaRDDStringIJVToFrameObject(javaRDD, frameMetadata);
 	}
 
 	/**

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/api/mlcontext/MLContextUtil.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/api/mlcontext/MLContextUtil.java b/src/main/java/org/apache/sysml/api/mlcontext/MLContextUtil.java
index 6c1788a..af99ab1 100644
--- a/src/main/java/org/apache/sysml/api/mlcontext/MLContextUtil.java
+++ b/src/main/java/org/apache/sysml/api/mlcontext/MLContextUtil.java
@@ -533,24 +533,24 @@ public final class MLContextUtil {
 			if (hasMatrixMetadata) {
 				MatrixMetadata matrixMetadata = (MatrixMetadata) metadata;
 				if (matrixMetadata.getMatrixFormat() == MatrixFormat.IJV) {
-					return MLContextConversionUtil.javaRDDStringIJVToMatrixObject(name, javaRDD, matrixMetadata);
+					return MLContextConversionUtil.javaRDDStringIJVToMatrixObject(javaRDD, matrixMetadata);
 				} else {
-					return MLContextConversionUtil.javaRDDStringCSVToMatrixObject(name, javaRDD, matrixMetadata);
+					return MLContextConversionUtil.javaRDDStringCSVToMatrixObject(javaRDD, matrixMetadata);
 				}
 			} else if (hasFrameMetadata) {
 				FrameMetadata frameMetadata = (FrameMetadata) metadata;
 				if (frameMetadata.getFrameFormat() == FrameFormat.IJV) {
-					return MLContextConversionUtil.javaRDDStringIJVToFrameObject(name, javaRDD, frameMetadata);
+					return MLContextConversionUtil.javaRDDStringIJVToFrameObject(javaRDD, frameMetadata);
 				} else {
-					return MLContextConversionUtil.javaRDDStringCSVToFrameObject(name, javaRDD, frameMetadata);
+					return MLContextConversionUtil.javaRDDStringCSVToFrameObject(javaRDD, frameMetadata);
 				}
 			} else if (!hasMetadata) {
 				String firstLine = javaRDD.first();
 				boolean isAllNumbers = isCSVLineAllNumbers(firstLine);
 				if (isAllNumbers) {
-					return MLContextConversionUtil.javaRDDStringCSVToMatrixObject(name, javaRDD);
+					return MLContextConversionUtil.javaRDDStringCSVToMatrixObject(javaRDD);
 				} else {
-					return MLContextConversionUtil.javaRDDStringCSVToFrameObject(name, javaRDD);
+					return MLContextConversionUtil.javaRDDStringCSVToFrameObject(javaRDD);
 				}
 			}
 
@@ -561,24 +561,24 @@ public final class MLContextUtil {
 			if (hasMatrixMetadata) {
 				MatrixMetadata matrixMetadata = (MatrixMetadata) metadata;
 				if (matrixMetadata.getMatrixFormat() == MatrixFormat.IJV) {
-					return MLContextConversionUtil.rddStringIJVToMatrixObject(name, rdd, matrixMetadata);
+					return MLContextConversionUtil.rddStringIJVToMatrixObject(rdd, matrixMetadata);
 				} else {
-					return MLContextConversionUtil.rddStringCSVToMatrixObject(name, rdd, matrixMetadata);
+					return MLContextConversionUtil.rddStringCSVToMatrixObject(rdd, matrixMetadata);
 				}
 			} else if (hasFrameMetadata) {
 				FrameMetadata frameMetadata = (FrameMetadata) metadata;
 				if (frameMetadata.getFrameFormat() == FrameFormat.IJV) {
-					return MLContextConversionUtil.rddStringIJVToFrameObject(name, rdd, frameMetadata);
+					return MLContextConversionUtil.rddStringIJVToFrameObject(rdd, frameMetadata);
 				} else {
-					return MLContextConversionUtil.rddStringCSVToFrameObject(name, rdd, frameMetadata);
+					return MLContextConversionUtil.rddStringCSVToFrameObject(rdd, frameMetadata);
 				}
 			} else if (!hasMetadata) {
 				String firstLine = rdd.first();
 				boolean isAllNumbers = isCSVLineAllNumbers(firstLine);
 				if (isAllNumbers) {
-					return MLContextConversionUtil.rddStringCSVToMatrixObject(name, rdd);
+					return MLContextConversionUtil.rddStringCSVToMatrixObject(rdd);
 				} else {
-					return MLContextConversionUtil.rddStringCSVToFrameObject(name, rdd);
+					return MLContextConversionUtil.rddStringCSVToFrameObject(rdd);
 				}
 			}
 		} else if (value instanceof MatrixBlock) {
@@ -593,15 +593,15 @@ public final class MLContextUtil {
 
 			dataFrame = MLUtils.convertVectorColumnsToML(dataFrame);
 			if (hasMatrixMetadata) {
-				return MLContextConversionUtil.dataFrameToMatrixObject(name, dataFrame, (MatrixMetadata) metadata);
+				return MLContextConversionUtil.dataFrameToMatrixObject(dataFrame, (MatrixMetadata) metadata);
 			} else if (hasFrameMetadata) {
-				return MLContextConversionUtil.dataFrameToFrameObject(name, dataFrame, (FrameMetadata) metadata);
+				return MLContextConversionUtil.dataFrameToFrameObject(dataFrame, (FrameMetadata) metadata);
 			} else if (!hasMetadata) {
 				boolean looksLikeMatrix = doesDataFrameLookLikeMatrix(dataFrame);
 				if (looksLikeMatrix) {
-					return MLContextConversionUtil.dataFrameToMatrixObject(name, dataFrame);
+					return MLContextConversionUtil.dataFrameToMatrixObject(dataFrame);
 				} else {
-					return MLContextConversionUtil.dataFrameToFrameObject(name, dataFrame);
+					return MLContextConversionUtil.dataFrameToFrameObject(dataFrame);
 				}
 			}
 		} else if (value instanceof Matrix) {
@@ -611,7 +611,7 @@ public final class MLContextUtil {
 					metadata = matrix.getMatrixMetadata();
 				}
 				JavaPairRDD<MatrixIndexes, MatrixBlock> binaryBlocks = matrix.toBinaryBlocks();
-				return MLContextConversionUtil.binaryBlocksToMatrixObject(name, binaryBlocks,
+				return MLContextConversionUtil.binaryBlocksToMatrixObject(binaryBlocks,
 						(MatrixMetadata) metadata);
 			} else {
 				return matrix.toMatrixObject();
@@ -623,7 +623,7 @@ public final class MLContextUtil {
 					metadata = frame.getFrameMetadata();
 				}
 				JavaPairRDD<Long, FrameBlock> binaryBlocks = frame.toBinaryBlocks();
-				return MLContextConversionUtil.binaryBlocksToFrameObject(name, binaryBlocks, (FrameMetadata) metadata);
+				return MLContextConversionUtil.binaryBlocksToFrameObject(binaryBlocks, (FrameMetadata) metadata);
 			} else {
 				return frame.toFrameObject();
 			}
@@ -632,7 +632,7 @@ public final class MLContextUtil {
 			return MLContextConversionUtil.doubleMatrixToMatrixObject(name, doubleMatrix, (MatrixMetadata) metadata);
 		} else if (value instanceof URL) {
 			URL url = (URL) value;
-			return MLContextConversionUtil.urlToMatrixObject(name, url, (MatrixMetadata) metadata);
+			return MLContextConversionUtil.urlToMatrixObject(url, (MatrixMetadata) metadata);
 		} else if (value instanceof Integer) {
 			return new IntObject((Integer) value);
 		} else if (value instanceof Double) {

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/ForProgramBlock.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/ForProgramBlock.java b/src/main/java/org/apache/sysml/runtime/controlprogram/ForProgramBlock.java
index ec7a16f..c28825e 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/ForProgramBlock.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/ForProgramBlock.java
@@ -121,7 +121,7 @@ public class ForProgramBlock extends ProgramBlock
 			UpdateType[] flags = prepareUpdateInPlaceVariables(ec, _tid);
 			
 			// run for loop body for each instance of predicate sequence 
-			SequenceIterator seqIter = new SequenceIterator(_iterPredVar, from, to, incr);
+			SequenceIterator seqIter = new SequenceIterator(from, to, incr);
 			for( IntObject iterVar : seqIter ) 
 			{
 				//set iteration variable
@@ -131,7 +131,7 @@ public class ForProgramBlock extends ProgramBlock
 				for(int i=0 ; i < this._childBlocks.size() ; i++) {
 					ec.updateDebugState( i );
 					_childBlocks.get(i).execute(ec);
-				}				
+				}
 			}
 			
 			// reset update-in-place variables
@@ -201,7 +201,7 @@ public class ForProgramBlock extends ProgramBlock
 		if( tmp instanceof IntObject )
 			ret = (IntObject)tmp;
 		else //downcast to int if necessary
-			ret = new IntObject(tmp.getName(),tmp.getLongValue()); 
+			ret = new IntObject(tmp.getLongValue()); 
 		
 		return ret;
 	}
@@ -216,14 +216,12 @@ public class ForProgramBlock extends ProgramBlock
 	 */
 	protected class SequenceIterator implements Iterator<IntObject>, Iterable<IntObject>
 	{
-		private String _varName = null;
 		private long _cur = -1;
 		private long _to = -1;
 		private long _incr = -1;
 		private boolean _inuse = false;
 		
-		protected SequenceIterator(String varName, IntObject from, IntObject to, IntObject incr) {
-			_varName = varName;
+		protected SequenceIterator(IntObject from, IntObject to, IntObject incr) {
 			_cur = from.getLongValue();
 			_to = to.getLongValue();
 			_incr = incr.getLongValue();
@@ -236,7 +234,7 @@ public class ForProgramBlock extends ProgramBlock
 
 		@Override
 		public IntObject next() {
-			IntObject ret = new IntObject( _varName, _cur );
+			IntObject ret = new IntObject(_cur);
 			_cur += _incr; //update current val
 			return ret;
 		}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java b/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java
index 760ddff..4775494 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java
@@ -616,7 +616,7 @@ public class ParForProgramBlock extends ForProgramBlock
 			StatisticMonitor.putPFStat(_ID, Stat.PARFOR_INIT_DATA_T, time.stop());
 		
 		// initialize iter var to form value
-		IntObject iterVar = new IntObject(_iterPredVar, from.getLongValue() );
+		IntObject iterVar = new IntObject(from.getLongValue());
 		
 		///////
 		//begin PARALLEL EXECUTION of (PAR)FOR body
@@ -674,7 +674,7 @@ public class ParForProgramBlock extends ForProgramBlock
 		cleanupSharedVariables(ec, varState);
 		
 		//set iteration var to TO value (+ increment) for FOR equivalence
-		iterVar = new IntObject(_iterPredVar, to.getLongValue()); //consistent with for
+		iterVar = new IntObject(to.getLongValue()); //consistent with for
 		ec.setVariable(_iterPredVar, iterVar);
 		
 		//ensure that subsequent program blocks never see partitioned data (invalid plans!)
@@ -982,7 +982,7 @@ public class ParForProgramBlock extends ForProgramBlock
 		OutputInfo inputOI = ((inputMatrix.getSparsity()<0.1 && inputDPF==PartitionFormat.COLUMN_WISE)
 			|| (inputMatrix.getSparsity()<0.001 && inputDPF==PartitionFormat.ROW_WISE)) ?
 			OutputInfo.BinaryCellOutputInfo : OutputInfo.BinaryBlockOutputInfo;
-		RemoteParForJobReturn ret = RemoteDPParForMR.runJob(_ID, itervar.getName(), _colocatedDPMatrix, program, resultFile, 
+		RemoteParForJobReturn ret = RemoteDPParForMR.runJob(_ID, _iterPredVar, _colocatedDPMatrix, program, resultFile, 
 			inputMatrix, inputDPF, inputOI, _tSparseCol, _enableCPCaching, _numThreads, _replicationDP );
 		
 		if( _monitor )
@@ -1107,7 +1107,7 @@ public class ParForProgramBlock extends ForProgramBlock
 		// Step 4) submit MR job (wait for finished work)
 		//TODO runtime support for binary cell partitioning 
 		OutputInfo inputOI = OutputInfo.BinaryBlockOutputInfo;
-		RemoteParForJobReturn ret = RemoteDPParForSpark.runJob(_ID, itervar.getName(), _colocatedDPMatrix, program,
+		RemoteParForJobReturn ret = RemoteDPParForSpark.runJob(_ID, _iterPredVar, _colocatedDPMatrix, program,
 			clsMap, resultFile, inputMatrix, ec, inputDPF, inputOI, _tSparseCol, _enableCPCaching, _numThreads );
 		
 		if( _monitor ) 
@@ -1273,10 +1273,10 @@ public class ParForProgramBlock extends ForProgramBlock
 				{
 					case SCALAR:
 						switch( valuetype ) {
-							case BOOLEAN: dataObj = new BooleanObject(var,false); break;
-							case INT:     dataObj = new IntObject(var,-1);        break;
-							case DOUBLE:  dataObj = new DoubleObject(var,-1d);    break;
-							case STRING:  dataObj = new StringObject(var,"-1");   break;
+							case BOOLEAN: dataObj = new BooleanObject(false); break;
+							case INT:     dataObj = new IntObject(-1);        break;
+							case DOUBLE:  dataObj = new DoubleObject(-1d);    break;
+							case STRING:  dataObj = new StringObject("-1");   break;
 							default:
 								throw new DMLRuntimeException("Value type not supported: "+valuetype);
 						}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/ProgramBlock.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/ProgramBlock.java b/src/main/java/org/apache/sysml/runtime/controlprogram/ProgramBlock.java
index 4ac9af0..34ba2fc 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/ProgramBlock.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/ProgramBlock.java
@@ -235,10 +235,10 @@ public class ProgramBlock implements ParseInfo
 		//check and correct scalar ret type (incl save double to int)
 		if( ret.getValueType() != retType )
 			switch( retType ) {
-				case BOOLEAN: ret = new BooleanObject(ret.getName(),ret.getBooleanValue()); break;
-				case INT:	  ret = new IntObject(ret.getName(),ret.getLongValue()); break;
-				case DOUBLE:  ret = new DoubleObject(ret.getName(),ret.getDoubleValue()); break;
-				case STRING:  ret = new StringObject(ret.getName(),ret.getStringValue()); break;
+				case BOOLEAN: ret = new BooleanObject(ret.getBooleanValue()); break;
+				case INT:	  ret = new IntObject(ret.getLongValue()); break;
+				case DOUBLE:  ret = new DoubleObject(ret.getDoubleValue()); break;
+				case STRING:  ret = new StringObject(ret.getStringValue()); break;
 				default:
 					//do nothing
 			}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java
index e41b06c..885fedb 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java
@@ -186,7 +186,6 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 	// additional private flags and meta data
 	private int     _numReadThreads = 0;   //number of threads for read from HDFS
 	private boolean _cleanupFlag = true;   //flag if obj unpinned (cleanup enabled)	
-	private String  _varName = "";         //plan variable name
 	private String  _cacheFileName = null; //local eviction file name
 	private boolean _requiresLocalWrite = false; //flag if local write for read obj
 	private boolean _isAcquireFromEmpty = false; //flag if read from status empty 
@@ -205,8 +204,8 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 	 * @param vt value type
 	 */
 	protected CacheableData(DataType dt, ValueType vt) {
-		super (dt, vt);		
-		_uniqueID = (int)_seq.getNextID();		
+		super (dt, vt);
+		_uniqueID = (int)_seq.getNextID();
 		_cacheStatus = CacheStatus.EMPTY;
 		_numReadThreads = 0;
 		_gpuObjects = new HashMap<>();
@@ -222,7 +221,6 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		_cleanupFlag = that._cleanupFlag;
 		_hdfsFileName = that._hdfsFileName;
 		_hdfsFileExists = that._hdfsFileExists; 
-		_varName = that._varName;
 		_gpuObjects = that._gpuObjects;
 	}
 
@@ -247,14 +245,6 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		return _cleanupFlag;
 	}
 
-	public void setVarName(String s) {
-		_varName = s;
-	}
-
-	public String getVarName() {
-		return _varName;
-	}
-
 	public boolean isHDFSFileExists() {
 		return _hdfsFileExists;
 	}
@@ -384,7 +374,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		throws CacheException
 	{
 		if( LOG.isTraceEnabled() )
-			LOG.trace("Acquire read "+getVarName());
+			LOG.trace("Acquire read "+hashCode());
 		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
 		
 		if ( !isAvailableToRead() )
@@ -443,7 +433,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 				setDirty(false);
 			}
 			catch (IOException e) {
-				throw new CacheException("Reading of " + _hdfsFileName + " ("+getVarName()+") failed.", e);
+				throw new CacheException("Reading of " + _hdfsFileName + " ("+hashCode()+") failed.", e);
 			}
 			
 			_isAcquireFromEmpty = true;
@@ -481,7 +471,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		throws CacheException
 	{
 		if( LOG.isTraceEnabled() )
-			LOG.trace("Acquire modify "+getVarName());
+			LOG.trace("Acquire modify "+hashCode());
 		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
 		
 		if ( !isAvailableToModify() )
@@ -505,7 +495,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 			}
 			catch (IOException e)
 			{
-				throw new CacheException("Reading of " + _hdfsFileName + " ("+getVarName()+") failed.", e);
+				throw new CacheException("Reading of " + _hdfsFileName + " ("+hashCode()+") failed.", e);
 			}
 		}
 
@@ -544,7 +534,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		throws DMLRuntimeException
 	{
 		if( LOG.isTraceEnabled() )
-			LOG.trace("Acquire modify newdata "+getVarName());
+			LOG.trace("Acquire modify newdata "+hashCode());
 		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
 		
 		if (! isAvailableToModify ())
@@ -602,7 +592,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		throws CacheException
 	{
 		if( LOG.isTraceEnabled() )
-			LOG.trace("Release "+getVarName());
+			LOG.trace("Release "+hashCode());
 		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
 		
 		boolean write = false;
@@ -642,7 +632,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 					}
 				}
 				catch (Exception e) {
-					throw new CacheException("Eviction to local path " + filePath + " ("+getVarName()+") failed.", e);
+					throw new CacheException("Eviction to local path " + filePath + " ("+hashCode()+") failed.", e);
 				}
 				_requiresLocalWrite = false;
 			}
@@ -652,7 +642,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 			_data = null;			
 		}
 		else if( LOG.isTraceEnabled() ){
-			LOG.trace("Var "+getVarName()+" not subject to caching, state="+getStatusAsString());
+			LOG.trace("Var "+hashCode()+" not subject to caching, state="+getStatusAsString());
 		}
 
 		if( DMLScript.STATISTICS ){
@@ -677,7 +667,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		throws DMLRuntimeException
 	{
 		if( LOG.isTraceEnabled() )
-			LOG.trace("Clear data "+getVarName());
+			LOG.trace("Clear data "+hashCode());
 		
 		// check if cleanup enabled and possible 
 		if( !isCleanupEnabled() ) 
@@ -774,7 +764,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		throws CacheException
 	{
 		if( LOG.isTraceEnabled() )
-			LOG.trace("Export data "+getVarName()+" "+fName);
+			LOG.trace("Export data "+hashCode()+" "+fName);
 		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
 		
 		//prevent concurrent modifications
@@ -826,7 +816,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 				}
 				catch (IOException e)
 				{
-				    throw new CacheException("Reading of " + _hdfsFileName + " ("+getVarName()+") failed.", e);
+				    throw new CacheException("Reading of " + _hdfsFileName + " ("+hashCode()+") failed.", e);
 				}
 			}
 			//get object from cache
@@ -926,7 +916,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		long begin = LOG.isTraceEnabled() ? System.currentTimeMillis() : 0;
 		
 		if( LOG.isTraceEnabled() )
-			LOG.trace ("CACHE: Restoring matrix...  " + getVarName() + "  HDFS path: " + 
+			LOG.trace ("CACHE: Restoring matrix...  " + hashCode() + "  HDFS path: " + 
 						(_hdfsFileName == null ? "null" : _hdfsFileName) + ", Restore from path: " + cacheFilePathAndName);
 				
 		if (_data != null)
@@ -959,7 +949,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		String cacheFilePathAndName = getCacheFilePathAndName();
 		long begin = LOG.isTraceEnabled() ? System.currentTimeMillis() : 0;
 		if( LOG.isTraceEnabled() )
-			LOG.trace("CACHE: Freeing evicted matrix...  " + getVarName() + "  HDFS path: " + 
+			LOG.trace("CACHE: Freeing evicted matrix...  " + hashCode() + "  HDFS path: " + 
 						(_hdfsFileName == null ? "null" : _hdfsFileName) + " Eviction path: " + cacheFilePathAndName);
 		
 		LazyWriteBuffer.deleteBlock(cacheFilePathAndName);
@@ -982,7 +972,7 @@ public abstract class CacheableData<T extends CacheBlock> extends Data
 		String debugNameEnding = (_hdfsFileName == null ? "null" : 
 			(_hdfsFileName.length() < maxLength ? _hdfsFileName : "..." + 
 				_hdfsFileName.substring (_hdfsFileName.length() - maxLength + 3)));
-		return getVarName() + " " + debugNameEnding;
+		return hashCode() + " " + debugNameEnding;
 	}
 
 	protected T readBlobFromHDFS(String fname) 

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/caching/FrameObject.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/FrameObject.java b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/FrameObject.java
index b594777..10e924f 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/FrameObject.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/FrameObject.java
@@ -223,11 +223,10 @@ public class FrameObject extends CacheableData<FrameBlock>
 		catch(DMLRuntimeException ex) {
 			throw new IOException(ex);
 		}
-				
+		
 		//sanity check correct output
-		if( fb == null ) {
-			throw new IOException("Unable to load frame from rdd: "+lrdd.getVarName());
-		}
+		if( fb == null )
+			throw new IOException("Unable to load frame from rdd.");
 		
 		return fb;
 	}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java
index 6486b02..0aaca69 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java
@@ -99,7 +99,7 @@ public class MatrixObject extends CacheableData<MatrixBlock>
 	public MatrixObject( ValueType vt, String file, MetaData mtd ) {
 		super (DataType.MATRIX, vt);
 		_metaData = mtd; 
-		_hdfsFileName = file;		
+		_hdfsFileName = file;
 		_cache = null;
 		_data = null;
 	}
@@ -239,7 +239,7 @@ public class MatrixObject extends CacheableData<MatrixBlock>
 		throws CacheException
 	{
 		if( LOG.isTraceEnabled() )
-			LOG.trace("Acquire partition "+getVarName()+" "+pred);
+			LOG.trace("Acquire partition "+hashCode()+" "+pred);
 		long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
 		
 		if ( !_partitioned )
@@ -424,7 +424,7 @@ public class MatrixObject extends CacheableData<MatrixBlock>
 		long begin = 0;
 		
 		if( LOG.isTraceEnabled() ) {
-			LOG.trace("Reading matrix from HDFS...  " + getVarName() + "  Path: " + fname 
+			LOG.trace("Reading matrix from HDFS...  " + hashCode() + "  Path: " + fname 
 					+ ", dimensions: [" + mc.getRows() + ", " + mc.getCols() + ", " + mc.getNonZeros() + "]");
 			begin = System.currentTimeMillis();
 		}
@@ -504,9 +504,8 @@ public class MatrixObject extends CacheableData<MatrixBlock>
 		}
 		
 		//sanity check correct output
-		if( mb == null ) {
-			throw new IOException("Unable to load matrix from rdd: "+lrdd.getVarName());
-		}
+		if( mb == null )
+			throw new IOException("Unable to load matrix from rdd.");
 		
 		return mb;
 	}
@@ -520,7 +519,7 @@ public class MatrixObject extends CacheableData<MatrixBlock>
 	{
 		long begin = 0;
 		if( LOG.isTraceEnabled() ){
-			LOG.trace (" Writing matrix to HDFS...  " + getVarName() + "  Path: " + fname + ", Format: " +
+			LOG.trace (" Writing matrix to HDFS...  " + hashCode() + "  Path: " + fname + ", Format: " +
 						(ofmt != null ? ofmt : "inferred from metadata"));
 			begin = System.currentTimeMillis();
 		}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java b/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java
index 3b9c075..67e91b0 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java
@@ -518,12 +518,9 @@ public class ExecutionContext {
 		throws DMLRuntimeException 
 	{
 		FrameObject fo = getFrameObject(varName);
-		if( outputData.getNumColumns()>0 && outputData.getSchema()!=null )
-			fo.setValueType(outputData.getSchema()[0]);
 		fo.acquireModify(outputData);
 		fo.release();
-		    
-	    setVariable(varName, fo);
+		setVariable(varName, fo);
 	}
 	
 	/**

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java b/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java
index be95164..467b6fc 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java
@@ -369,7 +369,7 @@ public class SparkExecutionContext extends ExecutionContext
 			}
 
 			//keep rdd handle for future operations on it
-			RDDObject rddhandle = new RDDObject(rdd, mo.getVarName());
+			RDDObject rddhandle = new RDDObject(rdd);
 			rddhandle.setHDFSFile(fromFile);
 			mo.setRDDHandle(rddhandle);
 		}
@@ -397,7 +397,7 @@ public class SparkExecutionContext extends ExecutionContext
 			}
 
 			//keep rdd handle for future operations on it
-			RDDObject rddhandle = new RDDObject(rdd, mo.getVarName());
+			RDDObject rddhandle = new RDDObject(rdd);
 			rddhandle.setHDFSFile(true);
 			mo.setRDDHandle(rddhandle);
 		}
@@ -461,7 +461,7 @@ public class SparkExecutionContext extends ExecutionContext
 			}
 
 			//keep rdd handle for future operations on it
-			RDDObject rddhandle = new RDDObject(rdd, fo.getVarName());
+			RDDObject rddhandle = new RDDObject(rdd);
 			rddhandle.setHDFSFile(fromFile);
 			fo.setRDDHandle(rddhandle);
 		}
@@ -488,7 +488,7 @@ public class SparkExecutionContext extends ExecutionContext
 			}
 
 			//keep rdd handle for future operations on it
-			RDDObject rddhandle = new RDDObject(rdd, fo.getVarName());
+			RDDObject rddhandle = new RDDObject(rdd);
 			rddhandle.setHDFSFile(true);
 			fo.setRDDHandle(rddhandle);
 		}
@@ -560,7 +560,7 @@ public class SparkExecutionContext extends ExecutionContext
 			}
 			
 			bret = new PartitionedBroadcast<>(ret);
-			BroadcastObject<MatrixBlock> bchandle = new BroadcastObject<>(bret, varname,
+			BroadcastObject<MatrixBlock> bchandle = new BroadcastObject<>(bret,
 					OptimizerUtils.estimatePartitionedSizeExactSparsity(mo.getMatrixCharacteristics()));
 			mo.setBroadcastHandle(bchandle);
 			CacheableData.addBroadcastSize(bchandle.getSize());
@@ -630,7 +630,7 @@ public class SparkExecutionContext extends ExecutionContext
 			}
 
 			bret = new PartitionedBroadcast<>(ret);
-			BroadcastObject<FrameBlock> bchandle = new BroadcastObject<>(bret, varname,
+			BroadcastObject<FrameBlock> bchandle = new BroadcastObject<>(bret,
 					OptimizerUtils.estimatePartitionedSizeExactSparsity(fo.getMatrixCharacteristics()));
 			fo.setBroadcastHandle(bchandle);
 			CacheableData.addBroadcastSize(bchandle.getSize());
@@ -656,7 +656,7 @@ public class SparkExecutionContext extends ExecutionContext
 		throws DMLRuntimeException
 	{
 		CacheableData<?> obj = getCacheableData(varname);
-		RDDObject rddhandle = new RDDObject(rdd, varname);
+		RDDObject rddhandle = new RDDObject(rdd);
 		obj.setRDDHandle( rddhandle );
 	}
 
@@ -1236,10 +1236,10 @@ public class SparkExecutionContext extends ExecutionContext
 		   .count(); //trigger caching to prevent contention
 
 		//create new rdd handle, in-place of current matrix object
-		RDDObject inro =  mo.getRDDHandle();       //guaranteed to exist (see above)
-		RDDObject outro = new RDDObject(out, var); //create new rdd object
-		outro.setCheckpointRDD(true);              //mark as checkpointed
-		outro.addLineageChild(inro);               //keep lineage to prevent cycles on cleanup
+		RDDObject inro =  mo.getRDDHandle();  //guaranteed to exist (see above)
+		RDDObject outro = new RDDObject(out); //create new rdd object
+		outro.setCheckpointRDD(true);         //mark as checkpointed
+		outro.addLineageChild(inro);          //keep lineage to prevent cycles on cleanup
 		mo.setRDDHandle(outro);
 	}
 

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitioner.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitioner.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitioner.java
index 69d2a70..afb2c71 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitioner.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitioner.java
@@ -23,8 +23,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 import org.apache.sysml.hops.Hop;
-import org.apache.sysml.parser.Expression.DataType;
-import org.apache.sysml.parser.Expression.ValueType;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.ParForProgramBlock.PDataPartitionFormat;
 import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
@@ -71,12 +69,7 @@ public abstract class DataPartitioner
 	public MatrixObject createPartitionedMatrixObject( MatrixObject in, String fnameNew, boolean force )
 		throws DMLRuntimeException
 	{
-		ValueType vt = in.getValueType();
-		String varname = in.getVarName();
-		MatrixObject out = new MatrixObject(vt, fnameNew );
-		out.setDataType( DataType.MATRIX );
-		out.setVarName( varname+NAME_SUFFIX );		
-		
+		MatrixObject out = new MatrixObject(in.getValueType(), fnameNew);
 		return createPartitionedMatrixObject(in, out, force);
 	}
 	

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ParWorker.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ParWorker.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ParWorker.java
index 05872b1..281ce07 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ParWorker.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ParWorker.java
@@ -145,7 +145,7 @@ public abstract class ParWorker
 		throws DMLRuntimeException 
 	{
 		//monitoring start
-		Timing time1 = null, time2 = null;		
+		Timing time1 = null, time2 = null;
 		if( _monitor )
 		{
 			time1 = new Timing(true); 
@@ -155,12 +155,13 @@ public abstract class ParWorker
 		//core execution
 
 		//foreach iteration in task, execute iteration body
+		String lVarName = task.getVarName();
 		for( IntObject indexVal : task.getIterations() )
 		{
 			//System.out.println(" EXECUTE ITERATION: "+indexVal.getName()+"="+indexVal.getIntValue());
 			
 			//set index values
-			_ec.setVariable(indexVal.getName(), indexVal);
+			_ec.setVariable(lVarName, indexVal);
 			
 			// for each program block
 			for (ProgramBlock pb : _childBlocks)
@@ -186,7 +187,7 @@ public abstract class ParWorker
 		throws DMLRuntimeException 
 	{
 		//monitoring start
-		Timing time1 = null, time2 = null;		
+		Timing time1 = null, time2 = null;
 		if( _monitor )
 		{
 			time1 = new Timing(true); 
@@ -195,7 +196,7 @@ public abstract class ParWorker
 		
 		//core execution
 		List<IntObject> tmp = task.getIterations();
-		String lVarName = tmp.get(0).getName();
+		String lVarName = task.getVarName();
 		long lFrom      = tmp.get(0).getLongValue();
 		long lTo        = tmp.get(1).getLongValue();
 		long lIncr      = tmp.get(2).getLongValue();
@@ -203,7 +204,7 @@ public abstract class ParWorker
 		for( long i=lFrom; i<=lTo; i+=lIncr )
 		{
 			//set index values
-			_ec.setVariable(lVarName, new IntObject(lVarName,i));
+			_ec.setVariable(lVarName, new IntObject(i));
 			
 			// for each program block
 			for (ProgramBlock pb : _childBlocks)

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ProgramConverter.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ProgramConverter.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ProgramConverter.java
index 860c7b6..3e4568b 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ProgramConverter.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ProgramConverter.java
@@ -1718,19 +1718,16 @@ public class ProgramConverter
 				switch ( valuetype )
 				{
 					case INT:
-						long value1 = Long.parseLong(valString);
-						dat = new IntObject(name,value1);
+						dat = new IntObject(Long.parseLong(valString));
 						break;
 					case DOUBLE:
-						double value2 = Double.parseDouble(valString);
-						dat = new DoubleObject(name,value2);
+						dat = new DoubleObject(Double.parseDouble(valString));
 						break;
 					case BOOLEAN:
-						boolean value3 = Boolean.parseBoolean(valString);
-						dat = new BooleanObject(name,value3);
+						dat = new BooleanObject(Boolean.parseBoolean(valString));
 						break;
 					case STRING:
-						dat = new StringObject(name,valString);
+						dat = new StringObject(valString);
 						break;
 					default:
 						throw new DMLRuntimeException("Unable to parse valuetype "+valuetype);
@@ -1752,7 +1749,6 @@ public class ProgramConverter
 				MatrixCharacteristics mc = new MatrixCharacteristics(rows, cols, brows, bcols, nnz); 
 				MetaDataFormat md = new MetaDataFormat( mc, oin, iin );
 				mo.setMetaData( md );
-				mo.setVarName( name );
 				if( partFormat._dpf != PDataPartitionFormat.NONE )
 					mo.setPartitioned( partFormat._dpf, partFormat._N );
 				mo.setUpdateType(inplace);

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSparkWorker.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSparkWorker.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSparkWorker.java
index 367cc8b..9f2658c 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSparkWorker.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSparkWorker.java
@@ -121,8 +121,8 @@ public class RemoteDPParForSparkWorker extends ParWorker implements PairFlatMapF
 			mo.setInMemoryPartition( partition );
 			
 			//create tasks for input data
-			Task lTask = new Task(TaskType.SET);
-			lTask.addIteration( new IntObject(_iterVar, larg._1()) );
+			Task lTask = new Task(_iterVar, TaskType.SET);
+			lTask.addIteration( new IntObject(larg._1()) );
 			
 			//execute program
 			long numIter = getExecutedIterations();

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParWorkerReducer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParWorkerReducer.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParWorkerReducer.java
index 3dda64c..4c05791 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParWorkerReducer.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParWorkerReducer.java
@@ -99,8 +99,8 @@ public class RemoteDPParWorkerReducer extends ParWorker
 			mo.setInMemoryPartition( _partition );
 			
 			//create tasks for input data
-			Task lTask = new Task(TaskType.SET);
-			lTask.addIteration( new IntObject(_iterVar,key.get()) );
+			Task lTask = new Task(_iterVar, TaskType.SET);
+			lTask.addIteration( new IntObject(key.get()) );
 			
 			//execute program
 			executeTask( lTask );

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeLocalFile.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeLocalFile.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeLocalFile.java
index 253883b..f245cbf 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeLocalFile.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeLocalFile.java
@@ -41,8 +41,6 @@ import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.sysml.conf.ConfigurationManager;
-import org.apache.sysml.parser.Expression.DataType;
-import org.apache.sysml.parser.Expression.ValueType;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.caching.CacheException;
 import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
@@ -92,17 +90,13 @@ public class ResultMergeLocalFile extends ResultMerge
 		throws DMLRuntimeException 
 	{
 		MatrixObject moNew = null; //always create new matrix object (required for nested parallelism)
-
-		//Timing time = null;
-		LOG.trace("ResultMerge (local, file): Execute serial merge for output "+_output.getVarName()+" (fname="+_output.getFileName()+")");
-		//	time = new Timing();
-		//	time.start();
-
+		
+		if( LOG.isTraceEnabled() )
+		LOG.trace("ResultMerge (local, file): Execute serial merge for output "
+			+_output.hashCode()+" (fname="+_output.getFileName()+")");
 		
 		try
 		{
-			
-			
 			//collect all relevant inputs
 			ArrayList<MatrixObject> inMO = new ArrayList<>();
 			for( MatrixObject in : _inputs )
@@ -155,13 +149,8 @@ public class ResultMergeLocalFile extends ResultMerge
 	private MatrixObject createNewMatrixObject(MatrixObject output, ArrayList<MatrixObject> inMO ) 
 		throws DMLRuntimeException
 	{
-		String varName = _output.getVarName();
-		ValueType vt = _output.getValueType();
 		MetaDataFormat metadata = (MetaDataFormat) _output.getMetaData();
-		
-		MatrixObject moNew = new MatrixObject( vt, _outputFName );
-		moNew.setVarName( varName.contains(NAME_SUFFIX) ? varName : varName+NAME_SUFFIX );
-		moNew.setDataType( DataType.MATRIX );
+		MatrixObject moNew = new MatrixObject( _output.getValueType(), _outputFName );
 		
 		//create deep copy of metadata obj
 		MatrixCharacteristics mcOld = metadata.getMatrixCharacteristics();
@@ -231,7 +220,9 @@ public class ResultMergeLocalFile extends ResultMerge
 			{
 				for( MatrixObject in : inMO ) //read/write all inputs
 				{
-					LOG.trace("ResultMerge (local, file): Merge input "+in.getVarName()+" (fname="+in.getFileName()+") via stream merge");
+					if( LOG.isTraceEnabled() )
+						LOG.trace("ResultMerge (local, file): Merge input "+in.hashCode()+" (fname="
+							+in.getFileName()+") via stream merge");
 					
 					JobConf tmpJob = new JobConf(ConfigurationManager.getCachedJobConf());
 					Path tmpPath = new Path(in.getFileName());
@@ -282,13 +273,16 @@ public class ResultMergeLocalFile extends ResultMerge
 			MapReduceTool.deleteFileIfExistOnHDFS(fnameNew);
 			
 			//Step 0) write compare blocks to staging area (if necessary)
-			LOG.trace("ResultMerge (local, file): Create merge compare matrix for output "+outMo.getVarName()+" (fname="+outMo.getFileName()+")");
+			if( LOG.isTraceEnabled() )
+				LOG.trace("ResultMerge (local, file): Create merge compare matrix for output "
+					+outMo.hashCode()+" (fname="+outMo.getFileName()+")");
 			createTextCellStagingFile(fnameStagingCompare, outMo, 0);
 			
 			//Step 1) read and write blocks to staging area
 			for( MatrixObject in : inMO )
 			{
-				LOG.trace("ResultMerge (local, file): Merge input "+in.getVarName()+" (fname="+in.getFileName()+")");
+				if( LOG.isTraceEnabled() )
+					LOG.trace("ResultMerge (local, file): Merge input "+in.hashCode()+" (fname="+in.getFileName()+")");
 				
 				long ID = _seq.getNextID();
 				createTextCellStagingFile( fnameStaging, in, ID );
@@ -334,7 +328,9 @@ public class ResultMergeLocalFile extends ResultMerge
 			{
 				for( MatrixObject in : inMO ) //read/write all inputs
 				{
-					LOG.trace("ResultMerge (local, file): Merge input "+in.getVarName()+" (fname="+in.getFileName()+") via stream merge");
+					if( LOG.isTraceEnabled() )
+						LOG.trace("ResultMerge (local, file): Merge input "
+							+in.hashCode()+" (fname="+in.getFileName()+") via stream merge");
 					
 					JobConf tmpJob = new JobConf(ConfigurationManager.getCachedJobConf());
 					Path tmpPath = new Path(in.getFileName());
@@ -377,13 +373,16 @@ public class ResultMergeLocalFile extends ResultMerge
 			MapReduceTool.deleteFileIfExistOnHDFS(fnameNew);
 			
 			//Step 0) write compare blocks to staging area (if necessary)
-			LOG.trace("ResultMerge (local, file): Create merge compare matrix for output "+outMo.getVarName()+" (fname="+outMo.getFileName()+")");
+			if( LOG.isTraceEnabled() )
+				LOG.trace("ResultMerge (local, file): Create merge compare matrix for output "
+					+outMo.hashCode()+" (fname="+outMo.getFileName()+")");
 			createBinaryCellStagingFile(fnameStagingCompare, outMo, 0);
 			
 			//Step 1) read and write blocks to staging area
 			for( MatrixObject in : inMO )
 			{
-				LOG.trace("ResultMerge (local, file): Merge input "+in.getVarName()+" (fname="+in.getFileName()+")");
+				if( LOG.isTraceEnabled() )
+					LOG.trace("ResultMerge (local, file): Merge input "+in.hashCode()+" (fname="+in.getFileName()+")");
 				
 				long ID = _seq.getNextID();
 				createBinaryCellStagingFile( fnameStaging, in, ID );
@@ -414,7 +413,8 @@ public class ResultMergeLocalFile extends ResultMerge
 			//Step 1) read and write blocks to staging area
 			for( MatrixObject in : inMO )
 			{
-				LOG.trace("ResultMerge (local, file): Merge input "+in.getVarName()+" (fname="+in.getFileName()+")");				
+				if( LOG.isTraceEnabled() )
+					LOG.trace("ResultMerge (local, file): Merge input "+in.hashCode()+" (fname="+in.getFileName()+")");
 				
 				createBinaryBlockStagingFile( fnameStaging, in );
 			}
@@ -442,14 +442,17 @@ public class ResultMergeLocalFile extends ResultMerge
 			MapReduceTool.deleteFileIfExistOnHDFS(fnameNew);
 			
 			//Step 0) write compare blocks to staging area (if necessary)
-			LOG.trace("ResultMerge (local, file): Create merge compare matrix for output "+outMo.getVarName()+" (fname="+outMo.getFileName()+")");			
+			if( LOG.isTraceEnabled() )
+				LOG.trace("ResultMerge (local, file): Create merge compare matrix for output "
+					+outMo.hashCode()+" (fname="+outMo.getFileName()+")");
 			
 			createBinaryBlockStagingFile(fnameStagingCompare, outMo);
 			
 			//Step 1) read and write blocks to staging area
 			for( MatrixObject in : inMO )
 			{
-				LOG.trace("ResultMerge (local, file): Merge input "+in.getVarName()+" (fname="+in.getFileName()+")");		
+				if( LOG.isTraceEnabled() )
+					LOG.trace("ResultMerge (local, file): Merge input "+in.hashCode()+" (fname="+in.getFileName()+")");
 				createBinaryBlockStagingFile( fnameStaging, in );
 			}
 	
@@ -1017,8 +1020,10 @@ public class ResultMergeLocalFile extends ResultMerge
 		//merge in all input matrix objects
 		IDSequence seq = new IDSequence();
 		for( MatrixObject in : inMO )
-		{			
-			LOG.trace("ResultMerge (local, file): Merge input "+in.getVarName()+" (fname="+in.getFileName()+") via file rename.");
+		{
+			if( LOG.isTraceEnabled() )
+				LOG.trace("ResultMerge (local, file): Merge input "+in.hashCode()
+					+" (fname="+in.getFileName()+") via file rename.");
 			
 			//copy over files (just rename file or entire dir)
 			Path tmpPath = new Path(in.getFileName());

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeLocalMemory.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeLocalMemory.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeLocalMemory.java
index 5fcfb26..a31294e 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeLocalMemory.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeLocalMemory.java
@@ -21,7 +21,6 @@ package org.apache.sysml.runtime.controlprogram.parfor;
 
 import java.util.ArrayList;
 
-import org.apache.sysml.parser.Expression.DataType;
 import org.apache.sysml.parser.Expression.ValueType;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
@@ -56,9 +55,11 @@ public class ResultMergeLocalMemory extends ResultMerge
 		throws DMLRuntimeException
 	{
 		MatrixObject moNew = null; //always create new matrix object (required for nested parallelism)
-
-		LOG.trace("ResultMerge (local, in-memory): Execute serial merge for output "+_output.getVarName()+" (fname="+_output.getFileName()+")");
-				
+		
+		if( LOG.isTraceEnabled() )
+			LOG.trace("ResultMerge (local, in-memory): Execute serial merge for output "
+				+_output.hashCode()+" (fname="+_output.getFileName()+")");
+		
 		try
 		{
 			//get old output matrix from cache for compare
@@ -83,7 +84,8 @@ public class ResultMergeLocalMemory extends ResultMerge
 				//check for empty inputs (no iterations executed)
 				if( in != null && in != _output ) 
 				{
-					LOG.trace("ResultMerge (local, in-memory): Merge input "+in.getVarName()+" (fname="+in.getFileName()+")");
+					if( LOG.isTraceEnabled() )
+						LOG.trace("ResultMerge (local, in-memory): Merge input "+in.hashCode()+" (fname="+in.getFileName()+")");
 					
 					//read/pin input_i
 					MatrixBlock inMB = in.acquireRead();	
@@ -145,7 +147,9 @@ public class ResultMergeLocalMemory extends ResultMerge
 	{		
 		MatrixObject moNew = null; //always create new matrix object (required for nested parallelism)
 		
-		LOG.trace("ResultMerge (local, in-memory): Execute parallel (par="+par+") merge for output "+_output.getVarName()+" (fname="+_output.getFileName()+")");
+		if( LOG.isTraceEnabled() )
+			LOG.trace("ResultMerge (local, in-memory): Execute parallel (par="+par+") "
+				+ "merge for output "+_output.hashCode()+" (fname="+_output.getFileName()+")");
 		
 		try
 		{
@@ -205,7 +209,7 @@ public class ResultMergeLocalMemory extends ResultMerge
 			}
 			
 			//release old output, and all inputs
-			_output.release();			
+			_output.release();
 		}
 		catch(Exception ex) {
 			throw new DMLRuntimeException(ex);
@@ -213,7 +217,7 @@ public class ResultMergeLocalMemory extends ResultMerge
 		
 		//LOG.trace("ResultMerge (local, in-memory): Executed parallel (par="+par+") merge for output "+_output.getVarName()+" (fname="+_output.getFileName()+") in "+time.stop()+"ms");
 
-		return moNew;		
+		return moNew;
 	}
 
 	private static double[][] createCompareMatrix( MatrixBlock output ) {
@@ -226,13 +230,9 @@ public class ResultMergeLocalMemory extends ResultMerge
 	private MatrixObject createNewMatrixObject( MatrixBlock data ) 
 		throws DMLRuntimeException
 	{
-		String varName = _output.getVarName();
 		ValueType vt = _output.getValueType();
 		MetaDataFormat metadata = (MetaDataFormat) _output.getMetaData();
-		
 		MatrixObject moNew = new MatrixObject( vt, _outputFName );
-		moNew.setVarName( varName.contains(NAME_SUFFIX) ? varName : varName+NAME_SUFFIX );
-		moNew.setDataType( DataType.MATRIX );
 		
 		//create deep copy of metadata obj
 		MatrixCharacteristics mcOld = metadata.getMatrixCharacteristics();
@@ -315,7 +315,7 @@ public class ResultMergeLocalMemory extends ResultMerge
 			//read each input if required
 			try
 			{
-				LOG.trace("ResultMerge (local, in-memory): Merge input "+_inMO.getVarName()+" (fname="+_inMO.getFileName()+")");
+				LOG.trace("ResultMerge (local, in-memory): Merge input "+_inMO.hashCode()+" (fname="+_inMO.getFileName()+")");
 				
 				MatrixBlock inMB = _inMO.acquireRead(); //incl. implicit read from HDFS
 				merge( _outMB, inMB, false );

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteMR.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteMR.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteMR.java
index 76cbd40..7ea1543 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteMR.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteMR.java
@@ -34,8 +34,6 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.sysml.api.DMLScript;
 import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.conf.DMLConfig;
-import org.apache.sysml.parser.Expression.DataType;
-import org.apache.sysml.parser.Expression.ValueType;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
 import org.apache.sysml.runtime.controlprogram.parfor.util.StagingFileUtils;
@@ -99,7 +97,7 @@ public class ResultMergeRemoteMR extends ResultMerge
 		MatrixObject moNew = null; //always create new matrix object (required for nested parallelism)
 		if( LOG.isTraceEnabled() )
 			LOG.trace("ResultMerge (remote, mr): Execute serial merge for output "
-				+_output.getVarName()+" (fname="+_output.getFileName()+")");
+				+_output.hashCode()+" (fname="+_output.getFileName()+")");
 		
 		try
 		{
@@ -135,11 +133,7 @@ public class ResultMergeRemoteMR extends ResultMerge
 						     mcOld.getRowsPerBlock(), mcOld.getColsPerBlock());
 				
 				//create new output matrix (e.g., to prevent potential export<->read file access conflict
-				String varName = _output.getVarName();
-				ValueType vt = _output.getValueType();
-				moNew = new MatrixObject( vt, _outputFName );
-				moNew.setVarName( varName.contains(NAME_SUFFIX) ? varName : varName+NAME_SUFFIX );
-				moNew.setDataType( DataType.MATRIX );
+				moNew = new MatrixObject(_output.getValueType(), _outputFName);
 				OutputInfo oiOld = metadata.getOutputInfo();
 				InputInfo iiOld = metadata.getInputInfo();
 				MatrixCharacteristics mc = new MatrixCharacteristics(mcOld.getRows(),mcOld.getCols(),

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteSpark.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteSpark.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteSpark.java
index 9accfea..2b64bb2 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteSpark.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/ResultMergeRemoteSpark.java
@@ -28,8 +28,6 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.sysml.api.DMLScript;
-import org.apache.sysml.parser.Expression.DataType;
-import org.apache.sysml.parser.Expression.ValueType;
 import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
 import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
@@ -79,7 +77,9 @@ public class ResultMergeRemoteSpark extends ResultMerge
 	{
 		MatrixObject moNew = null; //always create new matrix object (required for nested parallelism)
 
-		LOG.trace("ResultMerge (remote, spark): Execute serial merge for output "+_output.getVarName()+" (fname="+_output.getFileName()+")");
+		if( LOG.isTraceEnabled() )
+			LOG.trace("ResultMerge (remote, spark): Execute serial merge for output "
+				+_output.hashCode()+" (fname="+_output.getFileName()+")");
 
 		try
 		{
@@ -91,14 +91,10 @@ public class ResultMergeRemoteSpark extends ResultMerge
 				MatrixObject compare = (mcOld.getNonZeros()==0) ? null : _output;
 				
 				//actual merge
-				RDDObject ro = executeMerge(compare, _inputs, _output.getVarName(), mcOld.getRows(), mcOld.getCols(), mcOld.getRowsPerBlock(), mcOld.getColsPerBlock());
+				RDDObject ro = executeMerge(compare, _inputs, mcOld.getRows(), mcOld.getCols(), mcOld.getRowsPerBlock(), mcOld.getColsPerBlock());
 				
 				//create new output matrix (e.g., to prevent potential export<->read file access conflict
-				String varName = _output.getVarName();
-				ValueType vt = _output.getValueType();
-				moNew = new MatrixObject( vt, _outputFName );
-				moNew.setVarName( varName.contains(NAME_SUFFIX) ? varName : varName+NAME_SUFFIX );
-				moNew.setDataType( DataType.MATRIX );
+				moNew = new MatrixObject(_output.getValueType(), _outputFName);
 				OutputInfo oiOld = metadata.getOutputInfo();
 				InputInfo iiOld = metadata.getInputInfo();
 				MatrixCharacteristics mc = new MatrixCharacteristics(mcOld.getRows(),mcOld.getCols(),
@@ -122,7 +118,7 @@ public class ResultMergeRemoteSpark extends ResultMerge
 	}
 
 	@SuppressWarnings("unchecked")
-	protected RDDObject executeMerge(MatrixObject compare, MatrixObject[] inputs, String varname, long rlen, long clen, int brlen, int bclen)
+	protected RDDObject executeMerge(MatrixObject compare, MatrixObject[] inputs, long rlen, long clen, int brlen, int bclen)
 		throws DMLRuntimeException 
 	{
 		String jobname = "ParFor-RMSP";
@@ -187,7 +183,7 @@ public class ResultMergeRemoteSpark extends ResultMerge
 			}
 		    
 			//Step 3: create output rdd handle w/ lineage
-			ret = new RDDObject(out, varname);
+			ret = new RDDObject(out);
 			for(int i=0; i<paths.length; i++)
 				ret.addLineageChild(inputs[i].getRDDHandle());
 			if( withCompare )
@@ -221,7 +217,7 @@ public class ResultMergeRemoteSpark extends ResultMerge
 		JavaSparkContext sc = sec.getSparkContext();
 		JavaPairRDD<MatrixIndexes,MatrixBlock> rdd = (JavaPairRDD<MatrixIndexes,MatrixBlock>) 
 			sc.hadoopFile( mo.getFileName(), iinfo.inputFormatClass, iinfo.inputKeyClass, iinfo.inputValueClass);
-		RDDObject rddhandle = new RDDObject(rdd, mo.getVarName());
+		RDDObject rddhandle = new RDDObject(rdd);
 		rddhandle.setHDFSFile(true);
 		mo.setRDDHandle(rddhandle);
 	}

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/Task.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/Task.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/Task.java
index 7edb3f2..d09fb89 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/Task.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/Task.java
@@ -47,21 +47,23 @@ public class Task implements Serializable
 	public static final int MAX_VARNAME_SIZE  = 256;
 	public static final int MAX_TASK_SIZE     = Integer.MAX_VALUE-1; 
 	
-	private TaskType           	  _type;
+	private String _iterVar;
+	private TaskType _type;
 	private LinkedList<IntObject> _iterations; //each iteration is specified as an ordered set of index values
 	
 	public Task() {
 		//default constructor for serialize
 	}
 	
-	public Task( TaskType type ) {
+	public Task( String iterVar, TaskType type ) {
+		if( iterVar.length() > MAX_VARNAME_SIZE )
+			throw new RuntimeException("Cannot create task, MAX_VARNAME_SIZE exceeded.");
+		_iterVar = iterVar;
 		_type = type;
 		_iterations = new LinkedList<>();
 	}
 	
 	public void addIteration( IntObject indexVal )  {
-		if( indexVal.getName().length() > MAX_VARNAME_SIZE )
-			throw new RuntimeException("Cannot add iteration, MAX_VARNAME_SIZE exceeded.");
 		if( size() >= MAX_TASK_SIZE )
 			throw new RuntimeException("Cannot add iteration, MAX_TASK_SIZE reached.");
 		_iterations.addLast( indexVal );
@@ -75,6 +77,10 @@ public class Task implements Serializable
 		return _type;
 	}
 	
+	public String getVarName() {
+		return _iterVar;
+	}
+	
 	public int size() {
 		return _iterations.size();
 	}
@@ -96,7 +102,7 @@ public class Task implements Serializable
 			if( count!=0 ) 
 				sb.append(";");
 			sb.append("[");
-			sb.append(dat.getName());
+			sb.append(_iterVar);
 			sb.append("=");
 			sb.append(dat.getLongValue());
 			sb.append("]");
@@ -115,8 +121,7 @@ public class Task implements Serializable
 		if( size() > 0 )
 		{
 			sb.append(".");
-			IntObject dat0 = _iterations.getFirst();
-			sb.append(dat0.getName());
+			sb.append(_iterVar);
 			sb.append(".{");
 		
 			int count = 0;
@@ -142,8 +147,7 @@ public class Task implements Serializable
 		if( size() > 0 )
 		{
 			sb.append(".");
-			IntObject dat0 = _iterations.getFirst();
-			sb.append(dat0.getName());
+			sb.append(_iterVar);
 			sb.append(".{");
 		
 			int count = 0;
@@ -168,19 +172,18 @@ public class Task implements Serializable
 	public static Task parseCompactString( String stask )
 	{
 		StringTokenizer st = new StringTokenizer( stask.trim(), "." );
-		
-		Task newTask = new Task( TaskType.valueOf(st.nextToken()) );
+		TaskType type = TaskType.valueOf(st.nextToken());
 		String meta = st.nextToken();
+		Task newTask = new Task(meta, type);
 		
 		//iteration data
 		String sdata = st.nextToken();
 		sdata = sdata.substring(1,sdata.length()-1); // remove brackets
 		StringTokenizer st2 = new StringTokenizer(sdata, ",");
-		while( st2.hasMoreTokens() )
-		{
+		while( st2.hasMoreTokens() ) {
 			//create new iteration
 			String lsdata = st2.nextToken();
-			IntObject ldata = new IntObject(meta,Integer.parseInt( lsdata ) );
+			IntObject ldata = new IntObject(Integer.parseInt(lsdata));
 			newTask.addIteration(ldata);
 		}
 		

http://git-wip-us.apache.org/repos/asf/systemml/blob/ffefd8e6/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/TaskPartitionerFactoring.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/TaskPartitionerFactoring.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/TaskPartitionerFactoring.java
index 4a00037..547e607 100644
--- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/TaskPartitionerFactoring.java
+++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/TaskPartitionerFactoring.java
@@ -80,23 +80,23 @@ public class TaskPartitionerFactoring extends TaskPartitioner
 					break;
 				
 				//create new task and add to list of tasks
-				Task lTask = new Task( type );
+				Task lTask = new Task(_iterVarName, type);
 				tasks.addLast(lTask);
 				
 				// add iterations to task 
 				if( type == TaskType.SET ) {
 					//value based tasks
 					for( long k=0; k<K && i<=lTo; k++, i+=lIncr )
-						lTask.addIteration(new IntObject(_iterVarName, i));
+						lTask.addIteration(new IntObject(i));
 				}
 				else {
 					//determine end of task
 					long to = Math.min( i+(K-1)*lIncr, lTo );
 					
 					//range based tasks
-					lTask.addIteration(new IntObject(_iterVarName, i));	    //from
-					lTask.addIteration(new IntObject(_iterVarName, to));    //to
-					lTask.addIteration(new IntObject(_iterVarName, lIncr));	//increment
+					lTask.addIteration(new IntObject(i));     //from
+					lTask.addIteration(new IntObject(to));    //to
+					lTask.addIteration(new IntObject(lIncr)); //increment
 					i = to + lIncr;
 				}
 			}
@@ -138,16 +138,14 @@ public class TaskPartitionerFactoring extends TaskPartitioner
 						break;
 					
 					//create new task and add to list of tasks
-					Task lTask = new Task( type );
+					Task lTask = new Task(_iterVarName, type);
 					
 					// add iterations to task 
 					if( type == TaskType.SET ) 
 					{
 						//value based tasks
 						for( long k=0; k<K && i<=lTo; k++, i+=lIncr )
-						{
-							lTask.addIteration(new IntObject(_iterVarName, i));				
-						}				
+							lTask.addIteration(new IntObject(i));
 					}
 					else 
 					{
@@ -155,9 +153,9 @@ public class TaskPartitionerFactoring extends TaskPartitioner
 						long to = Math.min( i+(K-1)*lIncr, lTo );
 						
 						//range based tasks
-						lTask.addIteration(new IntObject(_iterVarName, i));	    //from
-						lTask.addIteration(new IntObject(_iterVarName, to));    //to
-						lTask.addIteration(new IntObject(_iterVarName, lIncr));	//increment
+						lTask.addIteration(new IntObject(i));	    //from
+						lTask.addIteration(new IntObject(to));    //to
+						lTask.addIteration(new IntObject(lIncr));	//increment
 						
 						i = to + lIncr;
 					}