You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@toree.apache.org by ma...@apache.org on 2016/11/15 04:29:51 UTC

[2/3] incubator-toree git commit: Added a simple integration test for verifying that case classes and the dataset api work

Added a simple integration test for verifying that case classes and the dataset api work


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/2d478dc2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/2d478dc2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/2d478dc2

Branch: refs/heads/master
Commit: 2d478dc2722aa9c47585385a6fd33dacb337bcd9
Parents: 46fab76
Author: Marius van Niekerk <ma...@gmail.com>
Authored: Mon Oct 17 17:08:02 2016 -0400
Committer: mariusvniekerk <ma...@gmail.com>
Committed: Mon Nov 14 19:38:19 2016 -0500

----------------------------------------------------------------------
 test_toree.py | 12 ++++++++++++
 1 file changed, 12 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/2d478dc2/test_toree.py
----------------------------------------------------------------------
diff --git a/test_toree.py b/test_toree.py
index db639e8..fe2d02b 100644
--- a/test_toree.py
+++ b/test_toree.py
@@ -17,6 +17,8 @@
 
 import unittest
 import jupyter_kernel_test
+import textwrap
+
 
 class ToreeScalaKernelTests(jupyter_kernel_test.KernelTests):
     # Required --------------------------------------
@@ -35,6 +37,16 @@ class ToreeScalaKernelTests(jupyter_kernel_test.KernelTests):
     test_statements_execute_result = [
         {'code': '6*7', 'result': '42'},
         {'code': 'sc.parallelize(List(1, 2, 3, 4)).map(_*2).reduce(_+_)', 'result': '20'},
+        {
+            'code': textwrap.dedent("""\
+                case class Foo(bar: Int)
+                val ses = spark
+                import ses.implicits._
+                import org.apache.spark.sql.functions._
+                val ds = spark.createDataset(Seq(Foo(1), Foo(2)))
+                ds.agg(sum($"bar")).collect.head"""), 
+          'result': '3'
+        },
         {'code': '%showtypes on\n1', 'result': 'Int = 1'},
         {'code': '%showtypes off\n1', 'result': '1'}
     ]