You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@systemds.apache.org by ba...@apache.org on 2021/06/03 11:34:40 UTC

[systemds] 02/04: [SYSTEMDS-2828] Python Multi Return Integration

This is an automated email from the ASF dual-hosted git repository.

baunsgaard pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/systemds.git

commit 6b885a08fec0f1e07964c5e733f87fb438eb1ed6
Author: baunsgaard <ba...@tugraz.at>
AuthorDate: Wed Jun 2 22:03:14 2021 +0200

    [SYSTEMDS-2828] Python Multi Return Integration
    
    This commit fixes interation of the multi return pr,
    this is done by adding a few extra node classes to reflect the
    difference between multi returns from functions and lists.
    Support for both is added in this commit, but the generator script
    does not comply with the changes
---
 .../python/systemds/context/systemds_context.py    |  15 +--
 src/main/python/systemds/operator/__init__.py      |   6 +-
 src/main/python/systemds/operator/nodes/frame.py   |  34 ++++---
 src/main/python/systemds/operator/nodes/list.py    |  70 +++++++-------
 .../python/systemds/operator/nodes/list_access.py  |  63 ++++++++++++
 src/main/python/systemds/operator/nodes/matrix.py  |  38 +++++---
 .../python/systemds/operator/nodes/multi_return.py |  85 +++++++++++++++++
 src/main/python/systemds/operator/nodes/scalar.py  |  20 ++--
 src/main/python/systemds/operator/nodes/source.py  |   9 +-
 .../python/systemds/operator/operation_node.py     | 103 +++-----------------
 src/main/python/systemds/script_building/dag.py    |   2 +
 src/main/python/systemds/script_building/script.py |  34 +++++--
 .../python/tests/frame/test_transform_encode.py    |  15 +++
 .../{systemds/operator => tests/list}/__init__.py  |  10 --
 .../__init__.py => tests/list/return_list.dml}     |  19 ++--
 .../test_list.py}                                  |  41 ++++++--
 .../test_list_unknown.py}                          |  31 ++++--
 src/main/python/tests/list/test_operations.py      | 106 ---------------------
 .../python/tests/source/test_source_neural_net.py  |  17 ++--
 19 files changed, 378 insertions(+), 340 deletions(-)

diff --git a/src/main/python/systemds/context/systemds_context.py b/src/main/python/systemds/context/systemds_context.py
index 80db697..8a2ebd6 100644
--- a/src/main/python/systemds/context/systemds_context.py
+++ b/src/main/python/systemds/context/systemds_context.py
@@ -446,7 +446,7 @@ class SystemDSContext(object):
         The importing is done thorugh the DML command source, and adds all defined methods from
         the script to the Source object returned in python. This gives the flexibility to call the methods 
         directly on the object returned.
-    
+
         In systemds a method called func_01 can then be imported using
 
         ```python
@@ -460,15 +460,4 @@ class SystemDSContext(object):
         return Source(self, path, name, print_imported_methods)
 
     def list(self, *args: Sequence[VALID_INPUT_TYPES], **kwargs: Dict[str, VALID_INPUT_TYPES]) -> 'List':
-        if len(kwargs) != 0 and len(args) != 0:
-            raise Exception("Accepts either args or kwargs")
-        elif len(kwargs) != 0:
-            out = []
-            for key, arg in kwargs.items():
-                out.append((key, OutputType.from_type(arg)))
-            return List(self, 'list', named_input_nodes=kwargs, outputs=out)
-        elif len(args) != 0:
-            out = []
-            for idx, arg in enumerate(args):
-                out.append((f"_{idx}", OutputType.from_type(arg)))
-            return List(self, 'list', unnamed_input_nodes=args, outputs=out)
+        return List(self, unnamed_input_nodes=args, named_input_nodes=kwargs)
diff --git a/src/main/python/systemds/operator/__init__.py b/src/main/python/systemds/operator/__init__.py
index cda9ba2..ebdb6ee 100644
--- a/src/main/python/systemds/operator/__init__.py
+++ b/src/main/python/systemds/operator/__init__.py
@@ -20,11 +20,13 @@
 # -------------------------------------------------------------
 
 from systemds.operator.operation_node import OperationNode
+from systemds.operator.nodes.multi_return import MultiReturn
 from systemds.operator.nodes.scalar import Scalar
 from systemds.operator.nodes.matrix import Matrix
 from systemds.operator.nodes.frame import Frame
-from systemds.operator.nodes.source import Source
+from systemds.operator.nodes.list_access import ListAccess
 from systemds.operator.nodes.list import List
+from systemds.operator.nodes.source import Source
 from systemds.operator import algorithm
 
-__all__ = [OperationNode, algorithm, Scalar, List, Matrix, Frame, Source]
+__all__ = [OperationNode, algorithm, Scalar, List, ListAccess, Matrix, Frame, Source, MultiReturn]
diff --git a/src/main/python/systemds/operator/nodes/frame.py b/src/main/python/systemds/operator/nodes/frame.py
index 2b38065..0beb05f 100644
--- a/src/main/python/systemds/operator/nodes/frame.py
+++ b/src/main/python/systemds/operator/nodes/frame.py
@@ -27,9 +27,9 @@ from typing import Dict, Optional, Sequence, Tuple, Union, TYPE_CHECKING, Iterab
 import numpy as np
 import pandas as pd
 from py4j.java_gateway import JavaObject, JVMView
-from systemds.operator import OperationNode, Matrix
+from systemds.operator import OperationNode, Matrix, MultiReturn
 from systemds.utils.consts import VALID_INPUT_TYPES
-from systemds.utils.converters import pandas_to_frame_block
+from systemds.utils.converters import pandas_to_frame_block, frame_block_to_pandas
 from systemds.script_building.dag import OutputType, DAGNode
 
 if TYPE_CHECKING:
@@ -78,20 +78,31 @@ class Frame(OperationNode):
         else:
             return super().compute(verbose, lineage)
 
+    def _parse_output_result_variables(self, result_variables):
+        return frame_block_to_pandas(self.sds_context, result_variables.getFrameBlock(self._script.out_var_name[0]))
+
     def _is_pandas(self) -> bool:
         return self._pd_dataframe is not None
 
     def transform_encode(self, spec: "Scalar"):
         params_dict = {"target": self, "spec": spec}
-        return OperationNode(
+        
+        frame = Frame(self.sds_context,"")
+        matrix = Matrix(self.sds_context,"")
+
+        output_nodes = [matrix,frame]
+        op = MultiReturn(
             self.sds_context,
             "transformencode",
+            output_nodes,
             named_input_nodes=params_dict,
-            output_type=OutputType.LIST,
-            number_of_outputs=2,
-            output_types=[OutputType.MATRIX, OutputType.FRAME],
         )
 
+        frame._unnamed_input_nodes = [op]
+        matrix._unnamed_input_nodes = [op]
+
+        return op
+
     def transform_apply(self, spec: "Scalar", meta: "Frame"):
         params_dict = {"target": self, "spec": spec, "meta": meta}
         return Matrix(self.sds_context, "transformapply", named_input_nodes=params_dict)
@@ -103,7 +114,7 @@ class Frame(OperationNode):
         :return: The OperationNode containing the concatenated frames.
         """
 
-        return Frame(self.sds_context, "rbind",[self, other])
+        return Frame(self.sds_context, "rbind", [self, other])
 
     def cbind(self, other) -> 'Frame':
         """
@@ -111,12 +122,5 @@ class Frame(OperationNode):
         :param: The other frame to bind to the right hand side.
         :return: The OperationNode containing the concatenated frames.
         """
-        return Frame(self.sds_context,"cbind",[self, other])
-
-    def t(self) -> 'OperationNode':
-        """ Transposes the input
-
-        :return: the OperationNode representing this operation
-        """
+        return Frame(self.sds_context, "cbind", [self, other])
 
-        return Frame(self.sds_context, 't', [self])
diff --git a/src/main/python/systemds/operator/nodes/list.py b/src/main/python/systemds/operator/nodes/list.py
index 64e37eb..90e2e90 100644
--- a/src/main/python/systemds/operator/nodes/list.py
+++ b/src/main/python/systemds/operator/nodes/list.py
@@ -26,7 +26,7 @@ from typing import Dict, Sequence, Tuple, Union, Iterable, List
 import numpy as np
 from py4j.java_gateway import JavaObject
 
-from systemds.operator import OperationNode, Matrix
+from systemds.operator import OperationNode, ListAccess
 from systemds.script_building.dag import OutputType
 from systemds.utils.consts import VALID_INPUT_TYPES
 from systemds.utils.converters import numpy_to_matrix_block
@@ -35,26 +35,39 @@ from systemds.utils.helpers import create_params_string
 
 class List(OperationNode):
 
-    def __init__(self, sds_context: 'SystemDSContext', operation: str,
-                 unnamed_input_nodes: Union[str, Iterable[VALID_INPUT_TYPES]] = None,
-                 named_input_nodes: Dict[str, VALID_INPUT_TYPES] = None,
-                 outputs: List[Tuple[str, OutputType]] = [("_1", OutputType.MATRIX)]):
+    def __init__(self, sds_context: 'SystemDSContext', func='list',
+                 unnamed_input_nodes: Union[str,
+                                            Iterable[VALID_INPUT_TYPES]] = None,
+                 named_input_nodes: Dict[str, VALID_INPUT_TYPES] = None):
 
-        is_python_local_data = False
-        self._outputs = outputs
-        self._named_output_nodes = {}
-        for idx, output in enumerate(outputs):
-            if output[1] == OutputType.MATRIX:
-                self.named_output_nodes[output[0]] = Matrix(sds_context, operation='list', named_input_nodes={f"_{idx}": self})
-                # TODO add output types
+        named = named_input_nodes != None and len(named_input_nodes) != 0
+        unnamed = unnamed_input_nodes != None and len(unnamed_input_nodes) != 0
+        if func == "list":
+            if named and unnamed:
+                raise ValueError(
+                    "A List cannot both contain named and unamed variables")
+            elif unnamed:
+                self._outputs = []
+                for v in unnamed_input_nodes:
+                    self._outputs.append(v)
+            else:
+                self._outputs = {}
+                for idx, v in named_input_nodes:
+                    self._outputs[idx] = v
+        else:
+            # Initialize the outputs as an empty list, and populate it when items are requested.
+            self._outputs = {}
 
-        super().__init__(sds_context, operation, unnamed_input_nodes,
-                         named_input_nodes, OutputType.LIST, is_python_local_data)
+        super().__init__(sds_context, func, unnamed_input_nodes,
+                         named_input_nodes, OutputType.LIST, False)
 
     def __getitem__(self, key):
-        if isinstance(key, int):
-            return self.named_output_nodes[self._outputs[key][0]]
-        return self.named_output_nodes[key]
+        if key in self._outputs:
+            return self._outputs[key]
+        else:
+            ent = ListAccess(self.sds_context, self, key)
+            self._outputs[key] = ent
+            return ent
 
     def pass_python_data_to_prepared_script(self, sds, var_name: str, prepared_script: JavaObject) -> None:
         assert self.is_python_local_data, 'Can only pass data to prepared script if it is python local!'
@@ -62,27 +75,12 @@ class List(OperationNode):
             prepared_script.setMatrix(var_name, numpy_to_matrix_block(
                 sds, self._np_array), True)  # True for reuse
 
-    def __parse_output_result_list(self, result_variables):
-        result_var = []
-        named_output_nodes_types_list = [type(named_output_node).__name__ for named_output_node in list(self.named_output_nodes.values())]
-        for idx, v in enumerate(self._script.out_var_name):
-            if named_output_nodes_types_list[idx] == "Matrix":
-                result_var.append(self.__parse_output_result_matrix(result_variables, v))
-            elif named_output_nodes_types_list[idx] == "Frame":
-                result_var.append(self.__parse_output_result_frame(result_variables, v))
-            else:
-                result_var.append(result_variables.getDouble(self._script.out_var_name[idx]))
-        return result_var
-
     def code_line(self, var_name: str, unnamed_input_vars: Sequence[str],
                   named_input_vars: Dict[str, str]) -> str:
-
-        inputs_comma_sep = create_params_string(unnamed_input_vars, named_input_vars)
-        output = "["
-        for idx, output_node in enumerate(self.named_output_nodes):
-            output += f'{var_name}_{idx},'
-        output = output[:-1] + "]"
-        return f'{output}={self.operation}({inputs_comma_sep});'
+        inputs_comma_sep = create_params_string(
+            unnamed_input_vars, named_input_vars)
+        return f'{var_name}={self.operation}({inputs_comma_sep});'
 
     def compute(self, verbose: bool = False, lineage: bool = False) -> Union[np.array]:
         return super().compute(verbose, lineage)
+
diff --git a/src/main/python/systemds/operator/nodes/list_access.py b/src/main/python/systemds/operator/nodes/list_access.py
new file mode 100644
index 0000000..869377f
--- /dev/null
+++ b/src/main/python/systemds/operator/nodes/list_access.py
@@ -0,0 +1,63 @@
+# -------------------------------------------------------------
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# -------------------------------------------------------------
+
+__all__ = ["ListAccess"]
+
+from typing import Dict, Sequence, Tuple, Union, Iterable
+
+import numpy as np
+from py4j.java_gateway import JavaObject
+
+from systemds.operator import OperationNode, Matrix, Frame, Scalar
+from systemds.script_building.dag import OutputType
+
+
+class ListAccess(OperationNode):
+
+    def __init__(self, sds_context: 'SystemDSContext', list_source: 'List', key):
+        self._key = key
+        self._list_source = list_source
+
+        inputs = [list_source]
+        super().__init__(sds_context, None, unnamed_input_nodes=inputs,
+                         output_type=OutputType.UNKNOWN, is_python_local_data=False)
+
+    def code_line(self, var_name: str, unnamed_input_vars: Sequence[str],
+                  named_input_vars: Dict[str, str]) -> str:
+        return f'{var_name}={self._list_source._dml_name}[{self._key}];'
+
+    def as_matrix(self) -> Matrix:
+        ent = self._list_source[self._key]
+        res = Matrix(self.sds_context, "as.matrix", [ent])
+        self._list_source._outputs[self._key] = res
+        return res
+    
+    def as_frame(self) -> Frame:
+        ent = self._list_source[self._key]
+        res = Frame(self.sds_context, "as.frame", [ent])
+        self._list_source._outputs[self._key] = res
+        return res
+
+    def as_scalar(self) -> Scalar:
+        ent = self._list_source[self._key]
+        res = Scalar(self.sds_context, "as.scalar", [ent])
+        self._list_source._outputs[self._key] = res
+        return res
\ No newline at end of file
diff --git a/src/main/python/systemds/operator/nodes/matrix.py b/src/main/python/systemds/operator/nodes/matrix.py
index 7a96499..e1322d4 100644
--- a/src/main/python/systemds/operator/nodes/matrix.py
+++ b/src/main/python/systemds/operator/nodes/matrix.py
@@ -28,16 +28,18 @@ import numpy as np
 from py4j.java_gateway import JavaObject, JVMView
 from systemds.operator import OperationNode, Scalar
 from systemds.utils.consts import VALID_INPUT_TYPES
-from systemds.utils.converters import numpy_to_matrix_block
+from systemds.utils.converters import numpy_to_matrix_block, matrix_block_to_numpy
 from systemds.script_building.dag import OutputType
 
 from systemds.utils.consts import VALID_INPUT_TYPES, BINARY_OPERATIONS, VALID_ARITHMETIC_TYPES
 
+
 class Matrix(OperationNode):
     _np_array: np.array
 
     def __init__(self, sds_context: 'SystemDSContext', operation: str,
-                 unnamed_input_nodes: Union[str,Iterable[VALID_INPUT_TYPES]] = None,
+                 unnamed_input_nodes: Union[str,
+                                            Iterable[VALID_INPUT_TYPES]] = None,
                  named_input_nodes: Dict[str, VALID_INPUT_TYPES] = None,
                  local_data: np.array = None) -> 'Matrix':
 
@@ -72,6 +74,10 @@ class Matrix(OperationNode):
         else:
             return super().compute(verbose, lineage)
 
+    def _parse_output_result_variables(self, result_variables):
+        return matrix_block_to_numpy(self.sds_context.java_gateway.jvm,
+                                     result_variables.getMatrixBlock(self._script.out_var_name[0]))
+
     def _is_numpy(self) -> bool:
         return self._np_array is not None
 
@@ -146,7 +152,7 @@ class Matrix(OperationNode):
     def __matmul__(self, other: 'Matrix') -> 'Matrix':
         return Matrix(self.sds_context, '%*%', [self, other])
 
-    def sum(self, axis: int = None) -> 'Matrix':
+    def sum(self, axis: int = None) -> 'OperationNode':
         """Calculate sum of matrix.
 
         :param axis: can be 0 or 1 to do either row or column sums
@@ -161,7 +167,7 @@ class Matrix(OperationNode):
         raise ValueError(
             f"Axis has to be either 0, 1 or None, for column, row or complete {self.operation}")
 
-    def mean(self, axis: int = None) -> 'Matrix':
+    def mean(self, axis: int = None) -> 'OperationNode':
         """Calculate mean of matrix.
 
         :param axis: can be 0 or 1 to do either row or column means
@@ -176,7 +182,7 @@ class Matrix(OperationNode):
         raise ValueError(
             f"Axis has to be either 0, 1 or None, for column, row or complete {self.operation}")
 
-    def var(self, axis: int = None) -> 'Matrix':
+    def var(self, axis: int = None) -> 'OperationNode':
         """Calculate variance of matrix.
 
         :param axis: can be 0 or 1 to do either row or column vars
@@ -268,7 +274,7 @@ class Matrix(OperationNode):
         unnamed_inputs.append(moment)
         return Matrix(self.sds_context, 'moment', unnamed_inputs, output_type=OutputType.DOUBLE)
 
-    def cholesky(self, safe: bool = False) -> 'OperationNode':
+    def cholesky(self, safe: bool = False) -> 'Matrix':
         """ Computes the Cholesky decomposition of a symmetric, positive definite matrix
 
         :param safe: default value is False, if flag is True additional checks to ensure
@@ -277,7 +283,7 @@ class Matrix(OperationNode):
         """
         return Matrix(self.sds_context, 'cholesky', [self])
 
-    def to_one_hot(self, num_classes: int) -> 'OperationNode':
+    def to_one_hot(self, num_classes: int) -> 'Matrix':
         """ OneHot encode the matrix.
 
         It is assumed that there is only one column to encode, and all values are whole numbers > 0
@@ -307,7 +313,7 @@ class Matrix(OperationNode):
         """
         return Matrix(self.sds_context, "cbind", [self, other])
 
-    def t(self) -> 'OperationNode':
+    def t(self) -> 'Matrix':
         """ Transposes the input
 
         :return: the OperationNode representing this operation
@@ -315,7 +321,7 @@ class Matrix(OperationNode):
         return Matrix(self.sds_context, 't', [self])
 
     def order(self, by: int = 1, decreasing: bool = False,
-              index_return: bool = False) -> 'OperationNode':
+              index_return: bool = False) -> 'Matrix':
         """ Sort by a column of the matrix X in increasing/decreasing order and returns either the index or data
 
         :param by: sort matrix by this column number
@@ -327,10 +333,18 @@ class Matrix(OperationNode):
         named_input_nodes = {'target': self, 'by': by, 'decreasing': str(decreasing).upper(),
                              'index.return': str(index_return).upper()}
 
-        return OperationNode(self.sds_context, 'order', [], named_input_nodes=named_input_nodes)
+        return Matrix(self.sds_context, 'order', [], named_input_nodes=named_input_nodes)
 
-    def to_string(self, **kwargs: Dict[str, VALID_INPUT_TYPES]) -> 'OperationNode':
+    def to_string(self, **kwargs: Dict[str, VALID_INPUT_TYPES]) -> 'Matrix':
         """ Converts the input to a string representation.
         :return: `Scalar` containing the string.
         """
-        return Scalar(self.sds_context, 'toString', [self], kwargs, output_type=OutputType.STRING)
\ No newline at end of file
+        return Scalar(self.sds_context, 'toString', [self], kwargs, output_type=OutputType.STRING)
+
+    def rev(self) -> 'Matrix':
+        """ Reverses the rows
+
+        :return: the OperationNode representing this operation
+        """
+        return Matrix(self.sds_context, 'rev', [self])
+
diff --git a/src/main/python/systemds/operator/nodes/multi_return.py b/src/main/python/systemds/operator/nodes/multi_return.py
new file mode 100644
index 0000000..c14a7b0
--- /dev/null
+++ b/src/main/python/systemds/operator/nodes/multi_return.py
@@ -0,0 +1,85 @@
+# -------------------------------------------------------------
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# -------------------------------------------------------------
+
+__all__ = ["MultiReturn"]
+
+from typing import Dict, Sequence, Tuple, Union, Iterable, List
+
+import numpy as np
+from py4j.java_gateway import JavaObject
+
+from systemds.operator import OperationNode
+from systemds.script_building.dag import OutputType
+from systemds.utils.consts import VALID_INPUT_TYPES
+from systemds.utils.converters import matrix_block_to_numpy,frame_block_to_pandas
+from systemds.utils.helpers import create_params_string
+
+
+class MultiReturn(OperationNode):
+
+    def __init__(self, sds_context: 'SystemDSContext', operation,
+                 output_nodes: List[OperationNode],
+                 unnamed_input_nodes: Union[str,
+                                            Iterable[VALID_INPUT_TYPES]] = None,
+                 named_input_nodes: Dict[str, VALID_INPUT_TYPES] = None):
+
+        self._outputs = output_nodes
+
+        super().__init__(sds_context, operation, unnamed_input_nodes,
+                         named_input_nodes, OutputType.MULTI_RETURN, False)
+
+    def __getitem__(self, key):
+        self._outputs[key]
+
+    def code_line(self, var_name: str, unnamed_input_vars: Sequence[str],
+                  named_input_vars: Dict[str, str]) -> str:
+
+        inputs_comma_sep = create_params_string(
+            unnamed_input_vars, named_input_vars)
+        output = "["
+        for idx, output_node in enumerate(self._outputs):
+            name = f'{var_name}_{idx}'
+            output_node.dml_name = name
+            output += f'{name},'
+
+        output = output[:-1] + "]"
+
+        return f'{output}={self.operation}({inputs_comma_sep});'
+
+    def _parse_output_result_variables(self, result_variables):
+        result_var = []
+        jvmV = self.sds_context.java_gateway.jvm
+        for idx, v in enumerate(self._script.out_var_name):
+            out_type =self._outputs[idx].output_type
+            if out_type == OutputType.MATRIX:
+                result_var.append(
+                    matrix_block_to_numpy(jvmV, result_variables.getMatrixBlock(v)))
+            elif out_type == OutputType.FRAME:
+                result_var.append(
+                    frame_block_to_pandas(jvmV, result_variables.getFrameBlock(v)))
+            elif out_type == OutputType.DOUBLE:
+                result_var.append(result_variables.getDouble(v))
+            else:
+                raise NotImplementedError("Not Implemented Support of type" + out_type)
+        return result_var
+
+    def __iter__(self):
+        return iter(self._outputs)
\ No newline at end of file
diff --git a/src/main/python/systemds/operator/nodes/scalar.py b/src/main/python/systemds/operator/nodes/scalar.py
index d3b0446..cd27d30 100644
--- a/src/main/python/systemds/operator/nodes/scalar.py
+++ b/src/main/python/systemds/operator/nodes/scalar.py
@@ -34,21 +34,21 @@ from systemds.script_building.dag import OutputType
 from systemds.utils.consts import VALID_INPUT_TYPES, BINARY_OPERATIONS, VALID_ARITHMETIC_TYPES
 
 
-
 class Scalar(OperationNode):
-    __assign : bool
+    __assign: bool
 
     def __init__(self, sds_context: 'SystemDSContext', operation: str,
                  unnamed_input_nodes: Iterable[VALID_INPUT_TYPES] = None,
                  named_input_nodes: Dict[str, VALID_INPUT_TYPES] = None,
-                 output_type : OutputType = OutputType.DOUBLE,
-                 assign : bool = False) -> 'Scalar':
+                 output_type: OutputType = OutputType.DOUBLE,
+                 assign: bool = False) -> 'Scalar':
         self.__assign = assign
         super().__init__(sds_context, operation, unnamed_input_nodes=unnamed_input_nodes,
                          named_input_nodes=named_input_nodes, output_type=output_type)
 
     def pass_python_data_to_prepared_script(self, sds, var_name: str, prepared_script: JavaObject) -> None:
-        raise RuntimeError('Scalar Operation Nodes, should not have python data input')
+        raise RuntimeError(
+            'Scalar Operation Nodes, should not have python data input')
 
     def code_line(self, var_name: str, unnamed_input_vars: Sequence[str],
                   named_input_vars: Dict[str, str]) -> str:
@@ -60,6 +60,14 @@ class Scalar(OperationNode):
     def compute(self, verbose: bool = False, lineage: bool = False) -> Union[np.array]:
         return super().compute(verbose, lineage)
 
+    def _parse_output_result_variables(self, result_variables):
+        if self.output_type == OutputType.DOUBLE:
+            return result_variables.getDouble(self._script.out_var_name[0])
+        elif self.output_type == OutputType.STRING:
+            return result_variables.getString(self._script.out_var_name[0])
+        else:
+            raise NotImplemented("Not currently support scalar type: " + self.output_type)
+
     def __add__(self, other: VALID_ARITHMETIC_TYPES) -> 'Scalar':
         return Scalar(self.sds_context, '+', [self, other])
 
@@ -214,4 +222,4 @@ class Scalar(OperationNode):
         """ Converts the input to a string representation.
         :return: `Scalar` containing the string.
         """
-        return Scalar(self.sds_context, 'toString', [self], kwargs, output_type=OutputType.STRING)
\ No newline at end of file
+        return Scalar(self.sds_context, 'toString', [self], named_input_nodes=kwargs, output_type=OutputType.STRING)
diff --git a/src/main/python/systemds/operator/nodes/source.py b/src/main/python/systemds/operator/nodes/source.py
index 28853b5..46e1799 100644
--- a/src/main/python/systemds/operator/nodes/source.py
+++ b/src/main/python/systemds/operator/nodes/source.py
@@ -26,7 +26,7 @@ from typing import (TYPE_CHECKING, Dict, Iterable, Optional, Sequence, Tuple,
                     Union)
 
 import numpy as np
-from systemds.operator import Matrix, OperationNode, Scalar
+from systemds.operator import Matrix, OperationNode, Scalar, List
 from systemds.script_building.dag import OutputType
 
 
@@ -106,7 +106,7 @@ class Func(object):
         elif var_l[0] == 'b':  # boolean
             return (self.split_to_value_and_def(var[7:], True), 'Scalar')
         elif var_l[0] == 'l': # list[unknown]
-            return (self.split_to_value_and_def(var[13:]), 'OperationNode')
+            return (self.split_to_value_and_def(var[13:]), 'List')
         elif var_l[0] == 's': # string
             return (self.split_to_value_and_def(var[6:]), 'Scalar')
         else:
@@ -135,17 +135,12 @@ class Source(OperationNode):
                          f'"{path}"', output_type=OutputType.IMPORT)
         self.__name = name
         functions = self.__parse_functions_from_script(path)
-        # if print_imported_methods:
-        #     current_functions = set(dir(self))
 
         # Add all the functions found in the source file to this object.
         for id, f in enumerate(functions):
             func = f.get_func(sds_context, name, id, print_imported_methods)
             setattr(self, f._name, MethodType(func, self))
 
-        # if print_imported_methods:
-        #     print(set(dir(self)) - current_functions)
-
     def __parse_functions_from_script(self, path: str) -> Iterable[Func]:
         lines = self.__parse_lines_with_filter(path)
         functions = []
diff --git a/src/main/python/systemds/operator/operation_node.py b/src/main/python/systemds/operator/operation_node.py
index 6dcd56c..f3e295d 100644
--- a/src/main/python/systemds/operator/operation_node.py
+++ b/src/main/python/systemds/operator/operation_node.py
@@ -19,17 +19,17 @@
 #
 # -------------------------------------------------------------
 
-from typing import Union, Optional, Iterable, Dict, Sequence, Tuple, TYPE_CHECKING
 from multiprocessing import Process
+from typing import (TYPE_CHECKING, Dict, Iterable, Optional, Sequence, Tuple,
+                    Union)
 
 import numpy as np
-from py4j.java_gateway import JVMView, JavaObject
-
-from systemds.utils.consts import VALID_INPUT_TYPES, BINARY_OPERATIONS, VALID_ARITHMETIC_TYPES
-from systemds.utils.helpers import create_params_string
-from systemds.utils.converters import matrix_block_to_numpy, frame_block_to_pandas
+from py4j.java_gateway import JavaObject, JVMView
+from systemds.script_building.dag import DAGNode, OutputType
 from systemds.script_building.script import DMLScript
-from systemds.script_building.dag import OutputType, DAGNode
+from systemds.utils.consts import (BINARY_OPERATIONS, VALID_ARITHMETIC_TYPES,
+                                   VALID_INPUT_TYPES)
+from systemds.utils.helpers import create_params_string
 
 if TYPE_CHECKING:
     # to avoid cyclic dependencies during runtime
@@ -44,14 +44,13 @@ class OperationNode(DAGNode):
     _script: Optional[DMLScript]
     _output_types: Optional[Iterable[VALID_INPUT_TYPES]]
     _source_node: Optional["DAGNode"]
-    
+
     def __init__(self, sds_context: 'SystemDSContext', operation: str,
                  unnamed_input_nodes: Union[str,
                                             Iterable[VALID_INPUT_TYPES]] = None,
                  named_input_nodes: Dict[str, VALID_INPUT_TYPES] = None,
                  output_type: OutputType = OutputType.MATRIX,
                  is_python_local_data: bool = False):
-                 
         """
         Create general `OperationNode`
 
@@ -93,13 +92,14 @@ class OperationNode(DAGNode):
                 print("SCRIPT:")
                 print(self._script.dml_script)
 
+
             if lineage:
                 result_variables, self._lineage_trace = self._script.execute_with_lineage()
             else:
                 result_variables = self._script.execute()
 
             if result_variables is not None:
-                self._result_var = self.__parse_output_result_variables(
+                self._result_var = self._parse_output_result_variables(
                     result_variables)
 
         if verbose:
@@ -108,32 +108,14 @@ class OperationNode(DAGNode):
             for y in self.sds_context.get_stderr():
                 print(y)
 
+        self._script.clear(self)
         if lineage:
             return self._result_var, self._lineage_trace
         else:
             return self._result_var
 
-    def __parse_output_result_variables(self, result_variables):
-        if self.output_type == OutputType.DOUBLE:
-            return self.__parse_output_result_double(result_variables, self._script.out_var_name[0])
-        elif self.output_type == OutputType.MATRIX:
-            return self.__parse_output_result_matrix(result_variables, self._script.out_var_name[0])
-        elif self.output_type == OutputType.LIST:
-            return self.__parse_output_result_list(result_variables)
-        elif self.output_type == OutputType.FRAME:
-            return self.__parse_output_result_frame(result_variables, self._script.out_var_name[0])
-
-    def __parse_output_result_double(self, result_variables, var_name):
-        return result_variables.getDouble(var_name)
-
-    def __parse_output_result_matrix(self, result_variables, var_name):
-        return matrix_block_to_numpy(self.sds_context.java_gateway.jvm,
-                                     result_variables.getMatrixBlock(var_name))
-
-    def __parse_output_result_frame(self, result_variables, var_name):
-        return frame_block_to_pandas(
-            self.sds_context, result_variables.getFrameBlock(var_name)
-        )
+    def _parse_output_result_variables(self, result_variables):
+        raise NotImplementedError("This method should be overwritten by subclasses")
 
     def get_lineage_trace(self) -> str:
         """Get the lineage trace for this node.
@@ -156,12 +138,11 @@ class OperationNode(DAGNode):
                 unnamed_input_vars) == 2, 'Binary Operations need exactly two input variables'
             return f'{var_name}={unnamed_input_vars[0]}{self.operation}{unnamed_input_vars[1]}'
 
-        inputs_comma_sep = create_params_string(unnamed_input_vars, named_input_vars)
+        inputs_comma_sep = create_params_string(
+            unnamed_input_vars, named_input_vars)
 
         if self.output_type == OutputType.NONE:
             return f'{self.operation}({inputs_comma_sep});'
-        # elif self.output_type == OutputType.ASSIGN:
-        #     return f'{var_name}={self.operation};'
         else:
             return f'{var_name}={self.operation}({inputs_comma_sep});'
 
@@ -169,47 +150,6 @@ class OperationNode(DAGNode):
         raise NotImplementedError(
             'Operation node has no python local data. Missing implementation in derived class?')
 
-    def _check_matrix_op(self):
-        """Perform checks to assure operation is allowed to be performed on data type of this `OperationNode`
-
-        :raise: AssertionError
-        """
-        assert self.output_type == OutputType.MATRIX, f'{self.operation} only supported for matrices'
-
-    def _check_frame_op(self):
-        """Perform checks to assure operation is allowed to be performed on data type of this `OperationNode`
-
-        :raise: AssertionError
-        """
-        assert self.output_type == OutputType.FRAME, f'{self.operation} only supported for frames'
-
-    def _check_matrix_or_frame_op(self):
-        """Perform checks to assure operation is allowed to be performed on data type of this `OperationNode`
-
-        :raise: AssertionError
-        """
-        assert (
-            self.output_type == OutputType.FRAME
-            or self.output_type == OutputType.MATRIX
-        ), f"{self.operation} only supported for frames or matrices"
-
-    def _check_equal_op_type_as(self, other: "OperationNode"):
-        """Perform checks to assure operation is equal to 'other'. Used for rBind and cBind type equality check.
-
-        :raise: AssertionError
-        """
-        assert (
-            self.output_type == other.output_type
-        ), f"{self.operation} only supported for Nodes of equal output-type. Got self: {self.output_type} and other: {other.output_type}"
-
-    def _check_other(self, other: "OperationNode", expectedOutputType: OutputType):
-        """Perform check to assure other operation has expected output type.
-
-        :raise: AssertionError
-        """
-        assert other.output_type == expectedOutputType, "not correctly asserted output types expected: " + \
-            str(expectedOutputType) + " got " + str(other.output_type)
-
     def write(self, destination: str, format: str = "binary", **kwargs: Dict[str, VALID_INPUT_TYPES]) -> 'OperationNode':
         """ Write input to disk. 
         The written format is easily read by SystemDSContext.read(). 
@@ -230,16 +170,3 @@ class OperationNode(DAGNode):
         To get the returned string look at the stdout of SystemDSContext.
         """
         return OperationNode(self.sds_context, 'print', [self], kwargs, output_type=OutputType.NONE)
-
-    def rev(self) -> 'OperationNode':
-        """ Reverses the rows
-
-        :return: the OperationNode representing this operation
-        """
-        return OperationNode(self.sds_context, 'rev', [self])
-
-    def to_string(self, **kwargs: Dict[str, VALID_INPUT_TYPES]) -> 'OperationNode':
-        """ Converts the input to a string representation.
-        :return: `Scalar` containing the string.
-        """
-        return OperationNode(self.sds_context, 'toString', [self], kwargs, output_type=OutputType.STRING)
\ No newline at end of file
diff --git a/src/main/python/systemds/script_building/dag.py b/src/main/python/systemds/script_building/dag.py
index 2ca2e8f..f156b6c 100644
--- a/src/main/python/systemds/script_building/dag.py
+++ b/src/main/python/systemds/script_building/dag.py
@@ -38,11 +38,13 @@ class OutputType(Enum):
     DOUBLE = auto()
     FRAME = auto()
     LIST = auto()
+    MULTI_RETURN = auto()
     MATRIX = auto()
     NONE = auto()
     SCALAR = auto()
     STRING = auto()
     IMPORT = auto()
+    UNKNOWN = auto()
 
     @staticmethod
     def from_str(label: Union[str, VALID_INPUT_TYPES]):
diff --git a/src/main/python/systemds/script_building/script.py b/src/main/python/systemds/script_building/script.py
index 3eed51c..649e2ab 100644
--- a/src/main/python/systemds/script_building/script.py
+++ b/src/main/python/systemds/script_building/script.py
@@ -35,10 +35,6 @@ if TYPE_CHECKING:
 class DMLScript:
     """DMLScript is the class used to describe our intended behavior in DML. This script can be then executed to
     get the results.
-
-    TODO caching
-
-    TODO rerun with different inputs without recompilation
     """
     sds_context: 'SystemDSContext'
     dml_script: str
@@ -159,9 +155,9 @@ class DMLScript:
         """
         baseOutVarString = self._dfs_dag_nodes(dag_root)
         if dag_root.output_type != OutputType.NONE:
-            if dag_root.output_type == OutputType.LIST:
+            if dag_root.output_type == OutputType.MULTI_RETURN:
                 self.out_var_name = []
-                for idx, output_node in enumerate(dag_root.named_output_nodes):
+                for idx, output_node in enumerate(dag_root._outputs):
                     self.add_code(
                         f'write({baseOutVarString}_{idx}, \'./tmp_{idx}\');')
                     self.out_var_name.append(f'{baseOutVarString}_{idx}')
@@ -169,6 +165,10 @@ class DMLScript:
                 self.out_var_name.append(baseOutVarString)
                 self.add_code(f'write({baseOutVarString}, \'./tmp\');')
 
+    def clear(self, dag_root: DAGNode):
+        self._dfs_clear_dag_nodes(dag_root)
+        self._variable_counter = 0
+
     def _dfs_dag_nodes(self, dag_node: VALID_INPUT_TYPES) -> str:
         """Uses Depth-First-Search to create code from DAG
 
@@ -180,6 +180,8 @@ class DMLScript:
                 return 'TRUE' if dag_node else 'FALSE'
             return str(dag_node)
 
+        # If the node already have a name then it is already defined
+        # in the script, therefore reuse.
         if dag_node.dml_name != "":
             return dag_node.dml_name
 
@@ -193,7 +195,8 @@ class DMLScript:
         # for each node do the dfs operation and save the variable names in `input_var_names`
         # get variable names of unnamed parameters
 
-        unnamed_input_vars = [self._dfs_dag_nodes(input_node) for input_node in dag_node.unnamed_input_nodes]
+        unnamed_input_vars = [self._dfs_dag_nodes(
+            input_node) for input_node in dag_node.unnamed_input_nodes]
 
         named_input_vars = {}
         for name, input_node in dag_node.named_input_nodes.items():
@@ -202,15 +205,30 @@ class DMLScript:
                 dag_node.dml_name = named_input_vars[name] + name
                 return dag_node.dml_name
 
+        # check if the node gets a name after multireturns
+        # If it has, great, return that name
+        if dag_node.dml_name != "":
+            return dag_node.dml_name
+
         dag_node.dml_name = self._next_unique_var()
 
         if dag_node.is_python_local_data:
             self.add_input_from_python(dag_node.dml_name, dag_node)
 
-        code_line = dag_node.code_line(dag_node.dml_name, unnamed_input_vars, named_input_vars)
+        code_line = dag_node.code_line(
+            dag_node.dml_name, unnamed_input_vars, named_input_vars)
         self.add_code(code_line)
         return dag_node.dml_name
 
+    def _dfs_clear_dag_nodes(self, dag_node: VALID_INPUT_TYPES) -> str:
+        if not isinstance(dag_node, DAGNode):
+            return
+        dag_node.dml_name = ""
+        for n in dag_node.unnamed_input_nodes:
+            self._dfs_clear_dag_nodes(n)
+        for name, n in dag_node.named_input_nodes.items():
+            self._dfs_clear_dag_nodes(n)
+
     def _next_unique_var(self) -> str:
         """Gets the next unique variable name
 
diff --git a/src/main/python/tests/frame/test_transform_encode.py b/src/main/python/tests/frame/test_transform_encode.py
index ced3886..70b1605 100644
--- a/src/main/python/tests/frame/test_transform_encode.py
+++ b/src/main/python/tests/frame/test_transform_encode.py
@@ -69,5 +69,20 @@ class TestTransformEncode(unittest.TestCase):
             self.assertTrue(M[col_name].nunique() == pd_F1[col_name].nunique())
 
 
+    def test_encode_recode_and_use_matrix(self):
+        with open(self.JSPEC_PATH) as jspec_file:
+            JSPEC = json.load(jspec_file)
+        F1 = self.sds.read(
+            self.HOMES_PATH,
+            data_type="frame",
+            schema=self.HOMES_SCHEMA,
+            format="csv",
+            header=True,
+        )
+        jspec = self.sds.read(self.JSPEC_PATH, data_type="scalar", value_type="string")
+        X, M = F1.transform_encode(spec=jspec)
+        xm = X + 1
+        res = xm.compute(verbose=True)
+
 if __name__ == "__main__":
     unittest.main(exit=False)
diff --git a/src/main/python/systemds/operator/__init__.py b/src/main/python/tests/list/__init__.py
similarity index 68%
copy from src/main/python/systemds/operator/__init__.py
copy to src/main/python/tests/list/__init__.py
index cda9ba2..e66abb4 100644
--- a/src/main/python/systemds/operator/__init__.py
+++ b/src/main/python/tests/list/__init__.py
@@ -18,13 +18,3 @@
 # under the License.
 #
 # -------------------------------------------------------------
-
-from systemds.operator.operation_node import OperationNode
-from systemds.operator.nodes.scalar import Scalar
-from systemds.operator.nodes.matrix import Matrix
-from systemds.operator.nodes.frame import Frame
-from systemds.operator.nodes.source import Source
-from systemds.operator.nodes.list import List
-from systemds.operator import algorithm
-
-__all__ = [OperationNode, algorithm, Scalar, List, Matrix, Frame, Source]
diff --git a/src/main/python/systemds/operator/__init__.py b/src/main/python/tests/list/return_list.dml
similarity index 59%
copy from src/main/python/systemds/operator/__init__.py
copy to src/main/python/tests/list/return_list.dml
index cda9ba2..cb316e3 100644
--- a/src/main/python/systemds/operator/__init__.py
+++ b/src/main/python/tests/list/return_list.dml
@@ -1,4 +1,4 @@
-# -------------------------------------------------------------
+#-------------------------------------------------------------
 #
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
@@ -17,14 +17,11 @@
 # specific language governing permissions and limitations
 # under the License.
 #
-# -------------------------------------------------------------
+#-------------------------------------------------------------
 
-from systemds.operator.operation_node import OperationNode
-from systemds.operator.nodes.scalar import Scalar
-from systemds.operator.nodes.matrix import Matrix
-from systemds.operator.nodes.frame import Frame
-from systemds.operator.nodes.source import Source
-from systemds.operator.nodes.list import List
-from systemds.operator import algorithm
-
-__all__ = [OperationNode, algorithm, Scalar, List, Matrix, Frame, Source]
+f = function() return(list[unknown] C){
+    m1 = matrix(1,1,1)
+    m2 = matrix(2,2,2)
+    m3 = matrix(3,3,3)
+    C = list(m1, m2, m3)
+}
\ No newline at end of file
diff --git a/src/main/python/tests/source/test_source_neural_net.py b/src/main/python/tests/list/test_list.py
similarity index 53%
copy from src/main/python/tests/source/test_source_neural_net.py
copy to src/main/python/tests/list/test_list.py
index 07f2e02..ada06c4 100644
--- a/src/main/python/tests/source/test_source_neural_net.py
+++ b/src/main/python/tests/list/test_list.py
@@ -23,11 +23,15 @@ import unittest
 
 import numpy as np
 from systemds.context import SystemDSContext
+from systemds.operator.algorithm import pca
 
-class TestSource_NeuralNet(unittest.TestCase):
+from systemds.operator import List
+from systemds.script_building.dag import OutputType
+
+
+class TestListOperations(unittest.TestCase):
 
     sds: SystemDSContext = None
-    src_path: str = "./tests/source/neural_net_source.dml"
 
     @classmethod
     def setUpClass(cls):
@@ -37,14 +41,31 @@ class TestSource_NeuralNet(unittest.TestCase):
     def tearDownClass(cls):
         cls.sds.close()
 
-    def test_01(self):
-        ## Verify that it parses it...
-        s = self.sds.source(self.src_path,"test")
-        
-    def test_test_method(self):
-        ## Verify that we can call a function.
-        m = self.sds.full((1,2), 1)
-        self.sds.source(self.src_path,"test").test_function(m).to_string().compute()
+    def test_creation(self):
+        """
+        Tests the creation of a List object via the SystemDSContext
+        """
+        m1 = np.array([1., 2., 3.])
+        m1p = self.sds.from_numpy(m1)
+        m2 = np.array([4., 5., 6.])
+        m2p = self.sds.from_numpy(m2)
+        list_obj = self.sds.list(m1p, m2p)
+        tmp = list_obj[0] + list_obj[1]
+        res = tmp.compute().flatten()
+        self.assertTrue(np.allclose(m1 + m2, res))
+
+    def test_addition(self):
+        """
+        Tests the creation of a List object via the SystemDSContext and adds a value
+        """
+        m1 = np.array([1., 2., 3.])
+        m1p = self.sds.from_numpy(m1)
+        m2 = np.array([4., 5., 6.])
+        m2p = self.sds.from_numpy(m2)
+        list_obj = self.sds.list(m1p, m2p)
+        tmp = list_obj[0] + 2
+        res = tmp.compute().flatten()
+        self.assertTrue(np.allclose(m1 + 2, res))
 
 if __name__ == "__main__":
     unittest.main(exit=False)
diff --git a/src/main/python/tests/source/test_source_neural_net.py b/src/main/python/tests/list/test_list_unknown.py
similarity index 60%
copy from src/main/python/tests/source/test_source_neural_net.py
copy to src/main/python/tests/list/test_list_unknown.py
index 07f2e02..5ee7304 100644
--- a/src/main/python/tests/source/test_source_neural_net.py
+++ b/src/main/python/tests/list/test_list_unknown.py
@@ -23,11 +23,15 @@ import unittest
 
 import numpy as np
 from systemds.context import SystemDSContext
+from systemds.operator import List
+from systemds.operator.algorithm import pca
+from systemds.script_building.dag import OutputType
 
-class TestSource_NeuralNet(unittest.TestCase):
+
+class TestListOperationsUnknown(unittest.TestCase):
 
     sds: SystemDSContext = None
-    src_path: str = "./tests/source/neural_net_source.dml"
+    src_path: str = "./tests/list/return_list.dml"
 
     @classmethod
     def setUpClass(cls):
@@ -37,14 +41,21 @@ class TestSource_NeuralNet(unittest.TestCase):
     def tearDownClass(cls):
         cls.sds.close()
 
-    def test_01(self):
-        ## Verify that it parses it...
-        s = self.sds.source(self.src_path,"test")
-        
-    def test_test_method(self):
-        ## Verify that we can call a function.
-        m = self.sds.full((1,2), 1)
-        self.sds.source(self.src_path,"test").test_function(m).to_string().compute()
+    def test_access_other_index_1(self):
+        s = self.sds.source(self.src_path, "func")
+        res = s.f()[1].as_matrix().compute()[0]
+        self.assertEqual(1, res)
+
+    def test_access_other_index_2(self):
+        s = self.sds.source(self.src_path, "func")
+        res = s.f()[2].as_matrix().compute()
+        self.assertTrue(np.allclose(np.full((2, 2), 2), res))
+
+    def test_access_other_index_3(self):
+        s = self.sds.source(self.src_path, "func")
+        res = s.f()[3].as_matrix().compute()
+        self.assertTrue(np.allclose(np.full((3, 3), 3), res))
+
 
 if __name__ == "__main__":
     unittest.main(exit=False)
diff --git a/src/main/python/tests/list/test_operations.py b/src/main/python/tests/list/test_operations.py
deleted file mode 100644
index 818042f..0000000
--- a/src/main/python/tests/list/test_operations.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# -------------------------------------------------------------
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# -------------------------------------------------------------
-
-import unittest
-
-import numpy as np
-from systemds.context import SystemDSContext
-from systemds.operator.algorithm import pca
-
-from systemds.operator import List
-from systemds.script_building.dag import OutputType
-
-
-class TestListOperations(unittest.TestCase):
-
-    sds: SystemDSContext = None
-
-    @classmethod
-    def setUpClass(cls):
-        cls.sds = SystemDSContext()
-
-    @classmethod
-    def tearDownClass(cls):
-        cls.sds.close()
-
-    def test_creation(self):
-        """
-        Tests the creation of a List object via the SystemDSContext
-        """
-        m1 = self.sds.from_numpy(np.array([1, 2, 3]))
-        m2 = self.sds.from_numpy(np.array([4, 5, 6]))
-        list_obj = self.sds.list(m1, m2)
-        tmp = list_obj[0] + list_obj[1]
-        res = tmp.compute()
-        self.assertTrue(np.allclose(m2, res))
-
-    def test_addition(self):
-        """
-        Tests the creation of a List object via the SystemDSContext and adds a value
-        """
-        m1 = self.sds.from_numpy(np.array([1, 2, 3]))
-        m2 = self.sds.from_numpy(np.array([4, 5, 6]))
-        list_obj = self.sds.list(m1, m2)
-        tmp = list_obj[0] + 2
-        res = tmp.compute()
-        self.assertTrue(np.allclose(m2 + 2, res))
-
-    def test_500x2b(self):
-        """
-        The purpose of this test is to show that an operation can be performed on the output of a multi output list node,
-        without the need of calculating the result first.
-        """
-        m1 = self.generate_matrices_for_pca(30, seed=1304)
-        node0 = self.sds.from_numpy(m1)
-        # print(features)
-        node1 = List(node0.sds_context, 'pca', named_input_nodes={"X": node0, "K": 1, "scale": "FALSE", "center": "FALSE"},
-                     outputs=[("res", OutputType.MATRIX), ("model", OutputType.MATRIX), ("scale", OutputType.MATRIX), ("center", OutputType.MATRIX)])
-        node2 = node1["res"].abs()
-        res = node2.compute(verbose=False)
-
-    def test_multiple_outputs(self):
-        """
-        The purpose of this test is to show that we can use multiple outputs
-        of a single list node in the DAG in one script
-        """
-        node0 = self.sds.from_numpy(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9]))
-        node1 = self.sds.from_numpy(np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]))
-        params_dict = {'X': node0, 'Y': node1}
-        node2 = List(self.sds, 'split', named_input_nodes=params_dict,
-                     outputs=[("X_train", OutputType.MATRIX), ("X_test", OutputType.MATRIX), ("Y_train", OutputType.MATRIX), ("Y_test", OutputType.MATRIX)])
-        node3 = node2["X_train"] + node2["Y_train"]
-        # X_train and Y_train are of the same shape because node0 and node1 have both only one dimension.
-        # Therefore they can be added together
-        res = node3.compute(verbose=False)
-
-    def generate_matrices_for_pca(self, dims: int, seed: int = 1234):
-        np.random.seed(seed)
-
-        mu, sigma = 0, 0.1
-        s = np.random.normal(mu, sigma,  dims)
-
-        m1 = np.array(np.c_[np.copy(s) * 1, np.copy(s)*0.3], dtype=np.double)
-
-        return m1
-
-
-if __name__ == "__main__":
-    unittest.main(exit=False)
diff --git a/src/main/python/tests/source/test_source_neural_net.py b/src/main/python/tests/source/test_source_neural_net.py
index 07f2e02..5eff8c5 100644
--- a/src/main/python/tests/source/test_source_neural_net.py
+++ b/src/main/python/tests/source/test_source_neural_net.py
@@ -24,6 +24,7 @@ import unittest
 import numpy as np
 from systemds.context import SystemDSContext
 
+
 class TestSource_NeuralNet(unittest.TestCase):
 
     sds: SystemDSContext = None
@@ -38,13 +39,17 @@ class TestSource_NeuralNet(unittest.TestCase):
         cls.sds.close()
 
     def test_01(self):
-        ## Verify that it parses it...
-        s = self.sds.source(self.src_path,"test")
-        
+        # Verify that it parses it...
+        s = self.sds.source(self.src_path, "test")
+
     def test_test_method(self):
-        ## Verify that we can call a function.
-        m = self.sds.full((1,2), 1)
-        self.sds.source(self.src_path,"test").test_function(m).to_string().compute()
+        # Verify that we can call a function.
+        m = np.full((1, 2), 1)
+        res = self.sds.source(self.src_path, "test")\
+            .test_function(self.sds.full((1, 2), 1))[1]\
+            .as_matrix().compute()
+        self.assertTrue(np.allclose(m, res))
+
 
 if __name__ == "__main__":
     unittest.main(exit=False)