You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@toree.apache.org by ch...@apache.org on 2016/03/31 16:13:11 UTC

[1/6] incubator-toree git commit: Added support for interpreters to share sqlcontext

Repository: incubator-toree
Updated Branches:
  refs/heads/master 7129ce222 -> f8f3022ae


Added support for interpreters to share sqlcontext


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/bbf28661
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/bbf28661
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/bbf28661

Branch: refs/heads/master
Commit: bbf2866123385691f969842d2cc86a546171f5dd
Parents: 7129ce2
Author: Gino Bustelo <lb...@apache.org>
Authored: Tue Mar 15 17:18:50 2016 -0500
Committer: Gino Bustelo <lb...@apache.org>
Committed: Wed Mar 30 11:42:41 2016 -0500

----------------------------------------------------------------------
 etc/examples/notebooks/cars.json                |  5 +
 etc/examples/notebooks/people.json              |  3 +
 etc/examples/notebooks/sqlcontext_sharing.ipynb | 96 ++++++++++++++++++++
 .../main/resources/PySpark/pyspark_runner.py    | 13 +--
 4 files changed, 111 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/bbf28661/etc/examples/notebooks/cars.json
----------------------------------------------------------------------
diff --git a/etc/examples/notebooks/cars.json b/etc/examples/notebooks/cars.json
new file mode 100644
index 0000000..5617879
--- /dev/null
+++ b/etc/examples/notebooks/cars.json
@@ -0,0 +1,5 @@
+{"manufacturer": "Porsche","model": "911","price": 135000,"wiki":"http://en.wikipedia.org/wiki/Porsche_997"}
+{"manufacturer": "Nissan","model": "GT-R","price": 80000,"wiki":"http://en.wikipedia.org/wiki/Nissan_Gt-r"}
+{"manufacturer": "BMW","model": "M3","price": 60500,"wiki":"http://en.wikipedia.org/wiki/Bmw_m3"}
+{"manufacturer": "Audi","model": "S5","price": 53000,"wiki":"http://en.wikipedia.org/wiki/Audi_S5#Audi_S5"}
+{"manufacturer": "Audi","model": "TT","price": 40000,"wiki":"http://en.wikipedia.org/wiki/Audi_TT"}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/bbf28661/etc/examples/notebooks/people.json
----------------------------------------------------------------------
diff --git a/etc/examples/notebooks/people.json b/etc/examples/notebooks/people.json
new file mode 100644
index 0000000..0168759
--- /dev/null
+++ b/etc/examples/notebooks/people.json
@@ -0,0 +1,3 @@
+{"name":"Michael"}
+{"name":"Andy", "age":30}
+{"name":"Justin", "age":19}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/bbf28661/etc/examples/notebooks/sqlcontext_sharing.ipynb
----------------------------------------------------------------------
diff --git a/etc/examples/notebooks/sqlcontext_sharing.ipynb b/etc/examples/notebooks/sqlcontext_sharing.ipynb
new file mode 100644
index 0000000..7213c19
--- /dev/null
+++ b/etc/examples/notebooks/sqlcontext_sharing.ipynb
@@ -0,0 +1,96 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Create a DataFrame in Scala"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "val people = sqlContext.read.json(\"people.json\")\n",
+    "people.registerTempTable(\"people\")\n",
+    "people.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Read DataFrame in Python"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "%%PySpark\n",
+    "people= sqlContext.table(\"people\")\n",
+    "people.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Create a DataFrame in Python"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "%%PySpark\n",
+    "cars = sqlContext.read.json(\"cars.json\")\n",
+    "cars.registerTempTable(\"cars\")\n",
+    "cars.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Read DataFrame in Scala"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "val cars = sqlContext.table(\"cars\")\n",
+    "cars.show()"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Toree",
+   "language": "",
+   "name": "toree"
+  },
+  "language_info": {
+   "name": "scala"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/bbf28661/pyspark-interpreter/src/main/resources/PySpark/pyspark_runner.py
----------------------------------------------------------------------
diff --git a/pyspark-interpreter/src/main/resources/PySpark/pyspark_runner.py b/pyspark-interpreter/src/main/resources/PySpark/pyspark_runner.py
index 22569ea..52cf927 100644
--- a/pyspark-interpreter/src/main/resources/PySpark/pyspark_runner.py
+++ b/pyspark-interpreter/src/main/resources/PySpark/pyspark_runner.py
@@ -69,12 +69,8 @@ java_import(gateway.jvm, "scala.Tuple2")
 
 
 sc = None
+sqlContext = None
 
-#jconf = bridge.sparkConf()
-#conf = SparkConf(_jvm = gateway.jvm, _jconf = jconf)
-#sc = SparkContext(jsc = jsc, gateway = gateway, conf = conf)
-#sqlc = SQLContext(sc, bridge.sqlContext())
-#sqlContext = sqlc
 kernel = bridge.kernel()
 
 class Logger(object):
@@ -122,11 +118,16 @@ while True :
 
     if sc is None:
       jsc = kernel.javaSparkContext()
-      if jsc != None:
+      if jsc is not None:
         jconf = kernel.sparkConf()
         conf = SparkConf(_jvm = gateway.jvm, _jconf = jconf)
         sc = SparkContext(jsc = jsc, gateway = gateway, conf = conf)
 
+    if sqlContext is None:
+      jsqlContext = kernel.sqlContext()
+      if jsqlContext is not None and sc is not None:
+        sqlContext = SQLContext(sc, sqlContext=jsqlContext)
+
     if final_code:
       compiled_code = compile(final_code, "<string>", "exec")
       #sc.setJobGroup(jobGroup, "Spark Kernel")


[3/6] incubator-toree git commit: Fixed a mixed up during merge

Posted by ch...@apache.org.
Fixed a mixed up during merge


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/dc493ebb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/dc493ebb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/dc493ebb

Branch: refs/heads/master
Commit: dc493ebbac409376bca4ba6da6e3d8e137b3d502
Parents: 049b7fd
Author: Gino Bustelo <lb...@apache.org>
Authored: Wed Mar 30 10:10:23 2016 -0500
Committer: Gino Bustelo <lb...@apache.org>
Committed: Wed Mar 30 11:52:34 2016 -0500

----------------------------------------------------------------------
 .../org/apache/toree/boot/layer/ComponentInitialization.scala      | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/dc493ebb/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala b/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
index 17a5e59..69f71ae 100644
--- a/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
+++ b/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
@@ -80,6 +80,8 @@ trait StandardComponentInitialization extends ComponentInitialization {
 
     val kernel = initializeKernel(config, actorLoader, interpreterManager, commManager, pluginManager)
 
+    initializePlugins(config, pluginManager)
+
     initializeSparkContext(config, kernel, appName)
 
     interpreterManager.initializeInterpreters(kernel)


[4/6] incubator-toree git commit: Fixes to tests

Posted by ch...@apache.org.
Fixes to tests


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/049b7fd2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/049b7fd2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/049b7fd2

Branch: refs/heads/master
Commit: 049b7fd2fb2bdd839d50ffaed3e8c9f13a274195
Parents: 5fe5245
Author: Gino Bustelo <lb...@apache.org>
Authored: Tue Mar 29 17:06:31 2016 -0500
Committer: Gino Bustelo <lb...@apache.org>
Committed: Wed Mar 30 11:52:34 2016 -0500

----------------------------------------------------------------------
 .../boot/layer/ComponentInitialization.scala    | 42 ++++++++++++++------
 .../org/apache/toree/kernel/api/Kernel.scala    | 15 ++-----
 .../InterpreterActorSpecForIntegration.scala    |  9 +----
 .../PostProcessorSpecForIntegration.scala       | 10 +----
 .../scala/test/utils/DummyInterpreter.scala     | 14 -------
 .../interpreter/scala/ScalaInterpreter.scala    | 11 ++---
 .../AddExternalJarMagicSpecForIntegration.scala |  9 +----
 7 files changed, 42 insertions(+), 68 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/049b7fd2/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala b/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
index 61ca88b..17a5e59 100644
--- a/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
+++ b/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
@@ -22,6 +22,7 @@ import java.util.concurrent.ConcurrentHashMap
 
 import akka.actor.ActorRef
 import com.typesafe.config.Config
+import org.apache.spark.SparkConf
 import org.apache.toree.comm.{CommManager, CommRegistrar, CommStorage, KernelCommManager}
 import org.apache.toree.dependencies.{CoursierDependencyDownloader, DependencyDownloader}
 import org.apache.toree.interpreter._
@@ -72,26 +73,21 @@ trait StandardComponentInitialization extends ComponentInitialization {
     val (commStorage, commRegistrar, commManager) =
       initializeCommObjects(actorLoader)
 
-    val manager =  InterpreterManager(config)
-    val scalaInterpreter = manager.interpreters.get("Scala").orNull
+    val interpreterManager =  InterpreterManager(config)
 
     val dependencyDownloader = initializeDependencyDownloader(config)
-    val pluginManager = createPluginManager(
-      config, scalaInterpreter, dependencyDownloader)
+    val pluginManager = createPluginManager(config, interpreterManager, dependencyDownloader)
 
-    val kernel = initializeKernel(
-        config, actorLoader, manager, commManager, pluginManager
-    )
+    val kernel = initializeKernel(config, actorLoader, interpreterManager, commManager, pluginManager)
 
     initializeSparkContext(config, kernel, appName)
 
-    manager.initializeInterpreters(kernel)
+    interpreterManager.initializeInterpreters(kernel)
 
     val responseMap = initializeResponseMap()
 
-
     (commStorage, commRegistrar, commManager,
-      manager.defaultInterpreter.orNull, kernel,
+      interpreterManager.defaultInterpreter.orNull, kernel,
       dependencyDownloader, kernel.magics, pluginManager, responseMap)
 
   }
@@ -139,27 +135,47 @@ trait StandardComponentInitialization extends ComponentInitialization {
     commManager: CommManager,
     pluginManager: PluginManager
   ) = {
+
+    //kernel has a dependency on ScalaInterpreter to get the ClassServerURI for the SparkConf
+    //we need to pre-start the ScalaInterpreter
+    val scalaInterpreter = interpreterManager.interpreters("Scala")
+    scalaInterpreter.start()
+
     val kernel = new Kernel(
       config,
       actorLoader,
       interpreterManager,
       commManager,
       pluginManager
-    )
+    ){
+      override protected[toree] def createSparkConf(conf: SparkConf) = {
+        val theConf = super.createSparkConf(conf)
+
+        // TODO: Move SparkIMain to private and insert in a different way
+        logger.warn("Locked to Scala interpreter with SparkIMain until decoupled!")
+
+        // TODO: Construct class server outside of SparkIMain
+        logger.warn("Unable to control initialization of REPL class server!")
+        logger.info("REPL Class Server Uri: " + scalaInterpreter.classServerURI)
+        conf.set("spark.repl.class.uri", scalaInterpreter.classServerURI)
+
+        theConf
+      }
+    }
     pluginManager.dependencyManager.add(kernel)
 
     kernel
   }
 
   private def createPluginManager(
-    config: Config, interpreter: Interpreter,
+    config: Config, interpreterManager: InterpreterManager,
     dependencyDownloader: DependencyDownloader
   ) = {
     logger.debug("Constructing plugin manager")
     val pluginManager = new PluginManager()
 
     logger.debug("Building dependency map")
-    pluginManager.dependencyManager.add(interpreter)
+    pluginManager.dependencyManager.add(interpreterManager.interpreters.get("Scala").orNull)
     pluginManager.dependencyManager.add(dependencyDownloader)
     pluginManager.dependencyManager.add(config)
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/049b7fd2/kernel/src/main/scala/org/apache/toree/kernel/api/Kernel.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/org/apache/toree/kernel/api/Kernel.scala b/kernel/src/main/scala/org/apache/toree/kernel/api/Kernel.scala
index 777cd0f..9e6fffb 100644
--- a/kernel/src/main/scala/org/apache/toree/kernel/api/Kernel.scala
+++ b/kernel/src/main/scala/org/apache/toree/kernel/api/Kernel.scala
@@ -371,24 +371,15 @@ class Kernel (
   }
 
   // TODO: Think of a better way to test without exposing this
-  protected[kernel] def createSparkConf(conf: SparkConf) = {
+  protected[toree] def createSparkConf(conf: SparkConf) = {
 
     logger.info("Setting deployMode to client")
     conf.set("spark.submit.deployMode", "client")
-
-    // TODO: Move SparkIMain to private and insert in a different way
-    logger.warn("Locked to Scala interpreter with SparkIMain until decoupled!")
-
-    // TODO: Construct class server outside of SparkIMain
-    logger.warn("Unable to control initialization of REPL class server!")
-    logger.info("REPL Class Server Uri: " + interpreter.classServerURI)
-    conf.set("spark.repl.class.uri", interpreter.classServerURI)
-
     conf
   }
 
   // TODO: Think of a better way to test without exposing this
-  protected[kernel] def initializeSparkContext(sparkConf: SparkConf): SparkContext = {
+  protected[toree] def initializeSparkContext(sparkConf: SparkConf): SparkContext = {
 
     logger.debug("Constructing new Spark Context")
     // TODO: Inject stream redirect headers in Spark dynamically
@@ -408,7 +399,7 @@ class Kernel (
     sparkContext
   }
 
-  protected[kernel] def initializeSqlContext(
+  protected[toree] def initializeSqlContext(
     sparkContext: SparkContext
   ): SQLContext = {
     val sqlContext: SQLContext = try {

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/049b7fd2/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala b/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
index 08effdb..b08ad54 100644
--- a/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
+++ b/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
@@ -58,14 +58,6 @@ class InterpreterActorSpecForIntegration extends TestKit(
   private val interpreter = new ScalaInterpreter {
     override protected val multiOutputStream = MultiOutputStream(List(mock[OutputStream], lastResultOut))
 
-    override protected def interpreterArgs(kernel: KernelLike): List[String] = {
-      Nil
-    }
-
-    override protected def maxInterpreterThreads(kernel: KernelLike): Int = {
-      TaskManager.DefaultMaximumWorkers
-    }
-
     override protected def bindKernelVariable(kernel: KernelLike): Unit = { }
   }
 
@@ -77,6 +69,7 @@ class InterpreterActorSpecForIntegration extends TestKit(
 
   before {
     output.reset()
+    interpreter.start()
     interpreter.init(mock[KernelLike])
 
     interpreter.doQuietly({

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/049b7fd2/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala b/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
index cfd7ea0..dc40937 100644
--- a/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
+++ b/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
@@ -39,16 +39,10 @@ class PostProcessorSpecForIntegration extends FunSpec with Matchers
     scalaInterpreter = new ScalaInterpreter {
       override protected val multiOutputStream = MultiOutputStream(List(mock[OutputStream], lastResultOut))
 
-      override protected def interpreterArgs(kernel: KernelLike): List[String] = {
-        Nil
-      }
-
-      override protected def maxInterpreterThreads(kernel: KernelLike): Int = {
-        TaskManager.DefaultMaximumWorkers
-      }
-
       override protected def bindKernelVariable(kernel: KernelLike): Unit = { }
     }
+
+    scalaInterpreter.start()
     scalaInterpreter.init(mock[KernelLike])
 
     postProcessor = new PostProcessor(scalaInterpreter)

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/049b7fd2/kernel/src/test/scala/test/utils/DummyInterpreter.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/test/utils/DummyInterpreter.scala b/kernel/src/test/scala/test/utils/DummyInterpreter.scala
index 9963ca1..8c29738 100644
--- a/kernel/src/test/scala/test/utils/DummyInterpreter.scala
+++ b/kernel/src/test/scala/test/utils/DummyInterpreter.scala
@@ -128,18 +128,4 @@ class DummyInterpreter(kernel: KernelLike) extends Interpreter {
    * @return The newly initialized interpreter
    */
   override def init(kernel: KernelLike): Interpreter = ???
-
-  /**
-   * Binds the SparkContext instance to the interpreter's namespace.
-   *
-   * @param sparkContext The SparkContext to bind
-   */
-  override def bindSparkContext(sparkContext: SparkContext): Unit = ???
-
-  /**
-   * Binds the SQLContext instance to the interpreter's namespace.
-   *
-   * @param sqlContext The SQLContext to bind
-   */
-  override def bindSqlContext(sqlContext: SQLContext): Unit = ???
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/049b7fd2/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala b/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala
index 5a888fe..ce0b8ec 100644
--- a/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala
+++ b/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala
@@ -22,7 +22,7 @@ import java.net.{URL, URLClassLoader}
 import java.nio.charset.Charset
 import java.util.concurrent.ExecutionException
 
-import com.typesafe.config.Config
+import com.typesafe.config.{ConfigFactory, Config}
 import org.apache.spark.SparkContext
 import org.apache.spark.repl.{SparkCommandLine, SparkIMain, SparkJLineCompletion}
 import org.apache.spark.sql.SQLContext
@@ -43,7 +43,7 @@ import scala.tools.nsc.util.{ClassPath, MergedClassPath}
 import scala.tools.nsc.{Global, Settings, io}
 import scala.util.{Try => UtilTry}
 
-class ScalaInterpreter(config:Config) extends Interpreter {
+class ScalaInterpreter(config:Config = ConfigFactory.empty) extends Interpreter {
 
   protected val logger = LoggerFactory.getLogger(this.getClass.getName)
 
@@ -74,8 +74,6 @@ class ScalaInterpreter(config:Config) extends Interpreter {
       TaskManager.DefaultMaximumWorkers
   }
 
-  start()
-
   protected def newSparkIMain(
     settings: Settings, out: JPrintWriter
   ): SparkIMain = {
@@ -232,7 +230,10 @@ class ScalaInterpreter(config:Config) extends Interpreter {
 
   protected def interpreterArgs(): List[String] = {
     import scala.collection.JavaConverters._
-    config.getStringList("interpreter_args").asScala.toList
+    if(config.hasPath("interpreter_args"))
+      config.getStringList("interpreter_args").asScala.toList
+    else
+      Nil
   }
 
   protected def bindKernelVariable(kernel: KernelLike): Unit = {

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/049b7fd2/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala b/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
index b4bf596..2863bec 100644
--- a/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
+++ b/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
@@ -38,16 +38,9 @@ class AddExternalJarMagicSpecForIntegration
     interpreter = new ScalaInterpreter {
       override protected val multiOutputStream = MultiOutputStream(List(mock[OutputStream], lastResultOut))
 
-      override protected def interpreterArgs(kernel: KernelLike): List[String] = {
-        Nil
-      }
-
-      override protected def maxInterpreterThreads(kernel: KernelLike): Int = {
-        TaskManager.DefaultMaximumWorkers
-      }
-
       override protected def bindKernelVariable(kernel: KernelLike): Unit = { }
     }
+    interpreter.start()
     interpreter.init(mock[KernelLike])
 
     StreamState.setStreams(outputStream = outputResult)


[6/6] incubator-toree git commit: A bit of more on the example notebook

Posted by ch...@apache.org.
A bit of more on the example notebook


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/f8f3022a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/f8f3022a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/f8f3022a

Branch: refs/heads/master
Commit: f8f3022aef68a2f6cea4bf665d89574358d0157b
Parents: 8dd92a1
Author: Gino Bustelo <lb...@apache.org>
Authored: Wed Mar 30 14:35:47 2016 -0500
Committer: Gino Bustelo <lb...@apache.org>
Committed: Wed Mar 30 14:35:47 2016 -0500

----------------------------------------------------------------------
 etc/examples/notebooks/sqlcontext_sharing.ipynb | 65 ++++++++++++++++++--
 1 file changed, 61 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/f8f3022a/etc/examples/notebooks/sqlcontext_sharing.ipynb
----------------------------------------------------------------------
diff --git a/etc/examples/notebooks/sqlcontext_sharing.ipynb b/etc/examples/notebooks/sqlcontext_sharing.ipynb
index 7213c19..be40dab 100644
--- a/etc/examples/notebooks/sqlcontext_sharing.ipynb
+++ b/etc/examples/notebooks/sqlcontext_sharing.ipynb
@@ -4,7 +4,45 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "### Create a DataFrame in Scala"
+    "# SQLContext Sharing <a name=\"top\"></a>"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "This example shows how Toree enables sharing of the SQLContext across the variety of languages that it supports (Scala, Python, R, SQL). To demostrate, this notebook will load data using one language and read it from another. Refer to the [Spark documentation](http://spark.apache.org/docs/latest/sql-programming-guide.html) for details about the DataFrame and SQL APIs."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "<div class=\"alert alert-info\" role=\"alert\" style=\"margin-top: 10px\">\n",
+    "<p><strong>Note</strong><p>\n",
+    "\n",
+    "<p>Due to an issue installing R and running it using DockerMachine, we are not able to show an example with R.</p>\n",
+    "</div>"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "**Table of Contents**\n",
+    "\n",
+    "1. [Create a DataFrame in Scala](#create-in-scala)\n",
+    "2. [Read DataFrame in Python](#read-in-python)\n",
+    "3. [Create a DataFrame in Python](#create-in-python)\n",
+    "4. [Read DataFrame in Scala](#read-in-scala)\n",
+    "5. [Read DataFrame in SQL](#read-in-sql)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Create a DataFrame in Scala <a name=\"create-in-scala\"></a><span style=\"float: right; font-size: 0.5em\"><a href=\"#top\">Top</a></span>"
    ]
   },
   {
@@ -24,7 +62,7 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "### Read DataFrame in Python"
+    "## Read DataFrame in Python <a name=\"read-in-python\"></a> <span style=\"float: right; font-size: 0.5em\"><a href=\"#top\">Top</a></span>"
    ]
   },
   {
@@ -44,7 +82,7 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "### Create a DataFrame in Python"
+    "## Create a DataFrame in Python <a name=\"create-in-python\"></a> <span style=\"float: right; font-size: 0.5em\"><a href=\"#top\">Top</a></span>"
    ]
   },
   {
@@ -65,7 +103,7 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "### Read DataFrame in Scala"
+    "## Read DataFrame in Scala <a name=\"read-in-scala\"></a><span style=\"float: right; font-size: 0.5em\"><a href=\"#top\">Top</a></span>"
    ]
   },
   {
@@ -79,6 +117,25 @@
     "val cars = sqlContext.table(\"cars\")\n",
     "cars.show()"
    ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Read DataFrame in SQL <a name=\"read-in-sql\"></a><span style=\"float: right; font-size: 0.5em\"><a href=\"#top\">Top</a></span>"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "%%sql\n",
+    "select * from cars where manufacturer == 'Audi'"
+   ]
   }
  ],
  "metadata": {


[5/6] incubator-toree git commit: Changes based on comments in PR

Posted by ch...@apache.org.
Changes based on comments in PR


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/8dd92a1d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/8dd92a1d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/8dd92a1d

Branch: refs/heads/master
Commit: 8dd92a1df8cf441e30d863be095d3cb0ddbab01d
Parents: dc493eb
Author: Gino Bustelo <lb...@apache.org>
Authored: Wed Mar 30 11:23:20 2016 -0500
Committer: Gino Bustelo <lb...@apache.org>
Committed: Wed Mar 30 14:02:17 2016 -0500

----------------------------------------------------------------------
 .../org/apache/toree/boot/CommandLineOptions.scala      | 12 +++++-------
 .../toree/boot/layer/ComponentInitialization.scala      |  4 ++--
 .../kernel/interpreter/scala/ScalaInterpreter.scala     |  9 +++------
 3 files changed, 10 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/8dd92a1d/kernel/src/main/scala/org/apache/toree/boot/CommandLineOptions.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/org/apache/toree/boot/CommandLineOptions.scala b/kernel/src/main/scala/org/apache/toree/boot/CommandLineOptions.scala
index da3ef49..43cfc8e 100644
--- a/kernel/src/main/scala/org/apache/toree/boot/CommandLineOptions.scala
+++ b/kernel/src/main/scala/org/apache/toree/boot/CommandLineOptions.scala
@@ -154,15 +154,13 @@ class CommandLineOptions(args: Seq[String]) {
     pair => if (pair._2.isDefined) Some((pair._1, pair._2.get)) else None
   }
 
-  /**
-   *
-   * @return
-   */
   private def interpreterArgs: Option[java.util.List[String]] = {
-    args.dropWhile(_ != "--").drop(1).toList match {
-      case Nil => None
-      case list: List[String] => Some(list.asJava)
+    val interpreterArgsList = args.dropWhile(_ != "--").drop(1).toList match {
+      case Nil => List.empty[String]
+      case list: List[String] => list
     }
+
+    Some(interpreterArgsList.asJava)
   }
 
   private def interpreterPlugins: Option[java.util.List[String]] = {

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/8dd92a1d/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala b/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
index 69f71ae..a7194eb 100644
--- a/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
+++ b/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
@@ -89,7 +89,7 @@ trait StandardComponentInitialization extends ComponentInitialization {
     val responseMap = initializeResponseMap()
 
     (commStorage, commRegistrar, commManager,
-      interpreterManager.defaultInterpreter.orNull, kernel,
+      interpreterManager.defaultInterpreter.get, kernel,
       dependencyDownloader, kernel.magics, pluginManager, responseMap)
 
   }
@@ -177,7 +177,7 @@ trait StandardComponentInitialization extends ComponentInitialization {
     val pluginManager = new PluginManager()
 
     logger.debug("Building dependency map")
-    pluginManager.dependencyManager.add(interpreterManager.interpreters.get("Scala").orNull)
+    pluginManager.dependencyManager.add(interpreterManager.interpreters.get("Scala").get)
     pluginManager.dependencyManager.add(dependencyDownloader)
     pluginManager.dependencyManager.add(config)
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/8dd92a1d/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala b/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala
index ce0b8ec..d037884 100644
--- a/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala
+++ b/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala
@@ -43,7 +43,7 @@ import scala.tools.nsc.util.{ClassPath, MergedClassPath}
 import scala.tools.nsc.{Global, Settings, io}
 import scala.util.{Try => UtilTry}
 
-class ScalaInterpreter(config:Config = ConfigFactory.empty) extends Interpreter {
+class ScalaInterpreter(private val config:Config = ConfigFactory.load) extends Interpreter {
 
   protected val logger = LoggerFactory.getLogger(this.getClass.getName)
 
@@ -230,10 +230,7 @@ class ScalaInterpreter(config:Config = ConfigFactory.empty) extends Interpreter
 
   protected def interpreterArgs(): List[String] = {
     import scala.collection.JavaConverters._
-    if(config.hasPath("interpreter_args"))
-      config.getStringList("interpreter_args").asScala.toList
-    else
-      Nil
+    config.getStringList("interpreter_args").asScala.toList
   }
 
   protected def bindKernelVariable(kernel: KernelLike): Unit = {
@@ -519,7 +516,7 @@ class ScalaInterpreter(config:Config = ConfigFactory.empty) extends Interpreter
 
     doQuietly {
       logger.debug(s"Binding SparkContext into interpreter as $bindName")
-      interpret(s"""def ${bindName}: org.apache.spark.SparkContext = kernel.sparkContext""")
+      interpret(s"""def ${bindName}: ${classOf[SparkContext].getName} = kernel.sparkContext""")
 
       // NOTE: This is needed because interpreter blows up after adding
       //       dependencies to SparkContext and Interpreter before the


[2/6] incubator-toree git commit: Refactoring some unnecessary methods for binding contexts

Posted by ch...@apache.org.
Refactoring some unnecessary methods for binding contexts


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/5fe5245a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/5fe5245a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/5fe5245a

Branch: refs/heads/master
Commit: 5fe5245a3ed6a6c6bbcdf4c062fed8bdedb9059e
Parents: bbf2866
Author: Gino Bustelo <lb...@apache.org>
Authored: Mon Mar 28 13:38:13 2016 -0500
Committer: Gino Bustelo <lb...@apache.org>
Committed: Wed Mar 30 11:42:42 2016 -0500

----------------------------------------------------------------------
 .../apache/toree/interpreter/Interpreter.scala  | 14 ----
 .../toree/interpreter/broker/BrokerBridge.scala |  4 --
 .../producer/JavaSparkContextProducerLike.scala | 43 -----------
 .../producer/SQLContextProducerLike.scala       | 43 -----------
 .../interpreter/broker/BrokerBridgeSpec.scala   |  5 --
 .../boot/layer/ComponentInitialization.scala    | 15 ++--
 .../toree/boot/layer/InterpreterManager.scala   | 31 ++++++--
 .../org/apache/toree/kernel/api/Kernel.scala    | 20 ------
 .../InterpreterActorSpecForIntegration.scala    |  2 +-
 .../PostProcessorSpecForIntegration.scala       |  2 +-
 .../interpreter/pyspark/PySparkBridge.scala     |  6 +-
 .../pyspark/PySparkInterpreter.scala            |  9 +--
 .../interpreter/scala/ScalaInterpreter.scala    | 76 +++++++++-----------
 .../AddExternalJarMagicSpecForIntegration.scala |  2 +-
 .../interpreter/sparkr/SparkRBridge.scala       |  6 +-
 .../interpreter/sparkr/SparkRInterpreter.scala  |  8 ---
 .../interpreter/sparkr/SparkRService.scala      |  6 +-
 .../kernel/interpreter/sql/SqlInterpreter.scala | 14 +---
 18 files changed, 76 insertions(+), 230 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/kernel-api/src/main/scala/org/apache/toree/interpreter/Interpreter.scala
----------------------------------------------------------------------
diff --git a/kernel-api/src/main/scala/org/apache/toree/interpreter/Interpreter.scala b/kernel-api/src/main/scala/org/apache/toree/interpreter/Interpreter.scala
index 9a0ffcb..67bd889 100644
--- a/kernel-api/src/main/scala/org/apache/toree/interpreter/Interpreter.scala
+++ b/kernel-api/src/main/scala/org/apache/toree/interpreter/Interpreter.scala
@@ -82,20 +82,6 @@ trait Interpreter {
   def doQuietly[T](body: => T): T
 
   /**
-   * Binds the SparkContext instance to the interpreter's namespace.
-   *
-   * @param sparkContext The SparkContext to bind
-   */
-  def bindSparkContext(sparkContext: SparkContext): Unit
-
-  /**
-   * Binds the SQLContext instance to the interpreter's namespace.
-   *
-   * @param sqlContext The SQLContext to bind
-   */
-  def bindSqlContext(sqlContext: SQLContext): Unit
-
-  /**
    * Binds a variable in the interpreter to a value.
    * @param variableName The name to expose the value in the interpreter
    * @param typeName The type of the variable, must be the fully qualified class name

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/kernel-api/src/main/scala/org/apache/toree/interpreter/broker/BrokerBridge.scala
----------------------------------------------------------------------
diff --git a/kernel-api/src/main/scala/org/apache/toree/interpreter/broker/BrokerBridge.scala b/kernel-api/src/main/scala/org/apache/toree/interpreter/broker/BrokerBridge.scala
index 1c54495..202ee0b 100644
--- a/kernel-api/src/main/scala/org/apache/toree/interpreter/broker/BrokerBridge.scala
+++ b/kernel-api/src/main/scala/org/apache/toree/interpreter/broker/BrokerBridge.scala
@@ -17,11 +17,7 @@
 
 package org.apache.toree.interpreter.broker
 
-import org.apache.toree.interpreter.broker.producer.{SQLContextProducerLike, JavaSparkContextProducerLike}
 import org.apache.toree.kernel.api.KernelLike
-import org.apache.spark.api.java.JavaSparkContext
-import org.apache.spark.sql.SQLContext
-import org.apache.spark.{SparkConf, SparkContext}
 
 /**
  * Represents the API available to the broker to act as the bridge for data

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/kernel-api/src/main/scala/org/apache/toree/interpreter/broker/producer/JavaSparkContextProducerLike.scala
----------------------------------------------------------------------
diff --git a/kernel-api/src/main/scala/org/apache/toree/interpreter/broker/producer/JavaSparkContextProducerLike.scala b/kernel-api/src/main/scala/org/apache/toree/interpreter/broker/producer/JavaSparkContextProducerLike.scala
deleted file mode 100644
index 610b319..0000000
--- a/kernel-api/src/main/scala/org/apache/toree/interpreter/broker/producer/JavaSparkContextProducerLike.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-
-package org.apache.toree.interpreter.broker.producer
-
-import org.apache.spark.SparkContext
-import org.apache.spark.api.java.JavaSparkContext
-
-/**
- * Represents a producer for a JavaSparkContext.
- */
-trait JavaSparkContextProducerLike {
-  /**
-   * Creates a new JavaSparkContext instance.
-   *
-   * @param sparkContext The SparkContext instance to use to create the Java one
-   *
-   * @return The new JavaSparkContext
-   */
-  def newJavaSparkContext(sparkContext: SparkContext): JavaSparkContext
-}
-
-/**
- * Represents the standard producer for a JavaSparkContext.
- */
-trait StandardJavaSparkContextProducer extends JavaSparkContextProducerLike {
-  def newJavaSparkContext(sparkContext: SparkContext): JavaSparkContext =
-    new JavaSparkContext(sparkContext)
-}

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/kernel-api/src/main/scala/org/apache/toree/interpreter/broker/producer/SQLContextProducerLike.scala
----------------------------------------------------------------------
diff --git a/kernel-api/src/main/scala/org/apache/toree/interpreter/broker/producer/SQLContextProducerLike.scala b/kernel-api/src/main/scala/org/apache/toree/interpreter/broker/producer/SQLContextProducerLike.scala
deleted file mode 100644
index 4a78c2b..0000000
--- a/kernel-api/src/main/scala/org/apache/toree/interpreter/broker/producer/SQLContextProducerLike.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-
-package org.apache.toree.interpreter.broker.producer
-
-import org.apache.spark.SparkContext
-import org.apache.spark.sql.SQLContext
-
-/**
- * Represents a producer for a SQLContext.
- */
-trait SQLContextProducerLike {
-  /**
-   * Creates a new SQLContext instance.
-   *
-   * @param sparkContext The SparkContext instance to use to create the SQL one
-   *
-   * @return The new SQLContext
-   */
-  def newSQLContext(sparkContext: SparkContext): SQLContext
-}
-
-/**
- * Represents the standard producer for a SQLContext.
- */
-trait StandardSQLContextProducer extends SQLContextProducerLike {
-  def newSQLContext(sparkContext: SparkContext): SQLContext =
-    new SQLContext(sparkContext)
-}

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/kernel-api/src/test/scala/org/apache/toree/interpreter/broker/BrokerBridgeSpec.scala
----------------------------------------------------------------------
diff --git a/kernel-api/src/test/scala/org/apache/toree/interpreter/broker/BrokerBridgeSpec.scala b/kernel-api/src/test/scala/org/apache/toree/interpreter/broker/BrokerBridgeSpec.scala
index 8ab4959..26cfca8 100644
--- a/kernel-api/src/test/scala/org/apache/toree/interpreter/broker/BrokerBridgeSpec.scala
+++ b/kernel-api/src/test/scala/org/apache/toree/interpreter/broker/BrokerBridgeSpec.scala
@@ -16,14 +16,9 @@
  */
 package org.apache.toree.interpreter.broker
 
-import org.apache.toree.interpreter.broker.producer.{SQLContextProducerLike, JavaSparkContextProducerLike}
 import org.apache.toree.kernel.api.KernelLike
-import org.apache.spark.api.java.JavaSparkContext
-import org.apache.spark.sql.SQLContext
-import org.apache.spark.{SparkConf, SparkContext}
 import org.scalatest.mock.MockitoSugar
 import org.scalatest.{FunSpec, Matchers, OneInstancePerTest}
-import org.mockito.Mockito._
 
 class BrokerBridgeSpec extends FunSpec with Matchers with OneInstancePerTest
   with MockitoSugar

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala b/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
index d361708..61ca88b 100644
--- a/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
+++ b/kernel/src/main/scala/org/apache/toree/boot/layer/ComponentInitialization.scala
@@ -80,14 +80,15 @@ trait StandardComponentInitialization extends ComponentInitialization {
       config, scalaInterpreter, dependencyDownloader)
 
     val kernel = initializeKernel(
-      config, actorLoader, manager, commManager, pluginManager
+        config, actorLoader, manager, commManager, pluginManager
     )
 
-    initializePlugins(config, pluginManager)
+    initializeSparkContext(config, kernel, appName)
+
+    manager.initializeInterpreters(kernel)
 
     val responseMap = initializeResponseMap()
 
-    initializeSparkContext(config, kernel, appName)
 
     (commStorage, commRegistrar, commManager,
       manager.defaultInterpreter.orNull, kernel,
@@ -145,14 +146,6 @@ trait StandardComponentInitialization extends ComponentInitialization {
       commManager,
       pluginManager
     )
-    /*
-    interpreter.doQuietly {
-      interpreter.bind(
-        "kernel", "org.apache.toree.kernel.api.Kernel",
-        kernel, List( """@transient implicit""")
-      )
-    }
-    */
     pluginManager.dependencyManager.add(kernel)
 
     kernel

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/kernel/src/main/scala/org/apache/toree/boot/layer/InterpreterManager.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/org/apache/toree/boot/layer/InterpreterManager.scala b/kernel/src/main/scala/org/apache/toree/boot/layer/InterpreterManager.scala
index 1b5ad72..a0409fe 100644
--- a/kernel/src/main/scala/org/apache/toree/boot/layer/InterpreterManager.scala
+++ b/kernel/src/main/scala/org/apache/toree/boot/layer/InterpreterManager.scala
@@ -57,13 +57,11 @@ object InterpreterManager {
       config.getStringList("default_interpreter_plugin").asScala
 
     val m = ip.foldLeft(Map[String, Interpreter]())( (acc, v) => {
+
       v.split(":") match {
         case Array(name, className) =>
           try {
-            val i = Class
-                .forName(className)
-                .newInstance()
-                .asInstanceOf[Interpreter]
+            val i = instantiate(className, config)
             acc + (name -> i)
           }
           catch {
@@ -80,4 +78,29 @@ object InterpreterManager {
 
     InterpreterManager(interpreters = m, default = default)
   }
+
+  /**
+   * instantiate will look for a constructor that take a Config. If available, will
+   * call that, else it will assume that there is a default empty constructor.
+   * @param className
+   * @param config
+   * @return
+   */
+  private def instantiate(className:String, config:Config):Interpreter = {
+    try {
+      Class
+        .forName(className)
+        .getConstructor(Class.forName("com.typesafe.config.Config"))
+        .newInstance(config).asInstanceOf[Interpreter]
+    }
+    catch {
+      case e: NoSuchMethodException =>
+        logger.debug("Using default constructor for class " + className)
+        Class
+          .forName(className)
+          .newInstance().asInstanceOf[Interpreter]
+    }
+
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/kernel/src/main/scala/org/apache/toree/kernel/api/Kernel.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/org/apache/toree/kernel/api/Kernel.scala b/kernel/src/main/scala/org/apache/toree/kernel/api/Kernel.scala
index bf7f56a..777cd0f 100644
--- a/kernel/src/main/scala/org/apache/toree/kernel/api/Kernel.scala
+++ b/kernel/src/main/scala/org/apache/toree/kernel/api/Kernel.scala
@@ -110,9 +110,6 @@ class Kernel (
    */
   val data: java.util.Map[String, Any] = new ConcurrentHashMap[String, Any]()
 
-
-  interpreterManager.initializeInterpreters(this)
-
   val interpreter = interpreterManager.defaultInterpreter.get
 
   /**
@@ -356,9 +353,6 @@ class Kernel (
     val sparkMaster = _sparkConf.getOption("spark.master").getOrElse("not_set")
     logger.info( s"Connecting to spark.master $sparkMaster")
 
-    updateInterpreterWithSparkContext(interpreter, sparkContext)
-    updateInterpreterWithSqlContext(interpreter, sqlContext)
-
     // TODO: Convert to events
     pluginManager.dependencyManager.add(_sparkConf)
     pluginManager.dependencyManager.add(_sparkContext)
@@ -414,14 +408,6 @@ class Kernel (
     sparkContext
   }
 
-  // TODO: Think of a better way to test without exposing this
-  protected[kernel] def updateInterpreterWithSparkContext(
-    interpreter: Interpreter, sparkContext: SparkContext
-  ) = {
-
-    interpreter.bindSparkContext(sparkContext)
-  }
-
   protected[kernel] def initializeSqlContext(
     sparkContext: SparkContext
   ): SQLContext = {
@@ -451,12 +437,6 @@ class Kernel (
     sqlContext
   }
 
-  protected[kernel] def updateInterpreterWithSqlContext(
-    interpreter: Interpreter, sqlContext: SQLContext
-  ): Unit = {
-    interpreter.bindSqlContext(sqlContext)
-  }
-
   override def interpreter(name: String): Option[Interpreter] = {
     interpreterManager.interpreters.get(name)
   }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala b/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
index 62da297..08effdb 100644
--- a/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
+++ b/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
@@ -66,7 +66,7 @@ class InterpreterActorSpecForIntegration extends TestKit(
       TaskManager.DefaultMaximumWorkers
     }
 
-    override protected def bindKernelVarialble(kernel: KernelLike): Unit = { }
+    override protected def bindKernelVariable(kernel: KernelLike): Unit = { }
   }
 
   private val conf = new SparkConf()

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala b/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
index 6a401b9..cfd7ea0 100644
--- a/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
+++ b/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
@@ -47,7 +47,7 @@ class PostProcessorSpecForIntegration extends FunSpec with Matchers
         TaskManager.DefaultMaximumWorkers
       }
 
-      override protected def bindKernelVarialble(kernel: KernelLike): Unit = { }
+      override protected def bindKernelVariable(kernel: KernelLike): Unit = { }
     }
     scalaInterpreter.init(mock[KernelLike])
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkBridge.scala
----------------------------------------------------------------------
diff --git a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkBridge.scala b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkBridge.scala
index 4926aef..ae6e15a 100644
--- a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkBridge.scala
+++ b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkBridge.scala
@@ -16,10 +16,8 @@
  */
 package org.apache.toree.kernel.interpreter.pyspark
 
-import org.apache.toree.interpreter.broker.producer.{StandardSQLContextProducer, StandardJavaSparkContextProducer, SQLContextProducerLike, JavaSparkContextProducerLike}
-import org.apache.toree.interpreter.broker.{BrokerState, BrokerBridge}
+import org.apache.toree.interpreter.broker.{BrokerBridge, BrokerState}
 import org.apache.toree.kernel.api.KernelLike
-import org.apache.spark.SparkContext
 
 /**
  * Represents constants for the PySpark bridge.
@@ -43,7 +41,7 @@ object PySparkBridge {
     new PySparkBridge(
       _brokerState = brokerState,
       _kernel = kernel
-    ) with StandardJavaSparkContextProducer with StandardSQLContextProducer
+    )
   }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkInterpreter.scala
----------------------------------------------------------------------
diff --git a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkInterpreter.scala b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkInterpreter.scala
index 4a40fb7..b07200d 100644
--- a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkInterpreter.scala
+++ b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkInterpreter.scala
@@ -18,11 +18,10 @@ package org.apache.toree.kernel.interpreter.pyspark
 
 import java.net.URL
 
+import com.typesafe.config.Config
 import org.apache.toree.interpreter.Results.Result
 import org.apache.toree.interpreter._
 import org.apache.toree.kernel.api.KernelLike
-import org.apache.spark.SparkContext
-import org.apache.spark.sql.SQLContext
 import org.slf4j.LoggerFactory
 import py4j.GatewayServer
 
@@ -80,12 +79,6 @@ class PySparkInterpreter(
     this
   }
 
-  // Unsupported (but can be invoked)
-  override def bindSparkContext(sparkContext: SparkContext): Unit = {}
-
-  // Unsupported (but can be invoked)
-  override def bindSqlContext(sqlContext: SQLContext): Unit = {}
-
   /**
    * Executes the provided code with the option to silence output.
    * @param code The code to execute

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala b/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala
index 249441e..5a888fe 100644
--- a/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala
+++ b/scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala
@@ -22,49 +22,60 @@ import java.net.{URL, URLClassLoader}
 import java.nio.charset.Charset
 import java.util.concurrent.ExecutionException
 
-import akka.actor.Actor
-import akka.actor.Actor.Receive
+import com.typesafe.config.Config
+import org.apache.spark.SparkContext
+import org.apache.spark.repl.{SparkCommandLine, SparkIMain, SparkJLineCompletion}
+import org.apache.spark.sql.SQLContext
 import org.apache.toree.global.StreamState
-import org.apache.toree.interpreter
 import org.apache.toree.interpreter._
 import org.apache.toree.interpreter.imports.printers.{WrapperConsole, WrapperSystem}
 import org.apache.toree.kernel.api.{KernelLike, KernelOptions}
 import org.apache.toree.utils.{MultiOutputStream, TaskManager}
-import org.apache.spark.SparkContext
-import org.apache.spark.repl.{SparkCommandLine, SparkIMain, SparkJLineCompletion}
-import org.apache.spark.sql.SQLContext
 import org.slf4j.LoggerFactory
 
 import scala.annotation.tailrec
 import scala.concurrent.{Await, Future}
 import scala.language.reflectiveCalls
 import scala.tools.nsc.backend.JavaPlatform
-import scala.tools.nsc.interpreter.{OutputStream, IR, JPrintWriter, InputStream}
+import scala.tools.nsc.interpreter.{IR, InputStream, JPrintWriter, OutputStream}
 import scala.tools.nsc.io.AbstractFile
 import scala.tools.nsc.util.{ClassPath, MergedClassPath}
 import scala.tools.nsc.{Global, Settings, io}
 import scala.util.{Try => UtilTry}
 
-class ScalaInterpreter() extends Interpreter {
+class ScalaInterpreter(config:Config) extends Interpreter {
 
   protected val logger = LoggerFactory.getLogger(this.getClass.getName)
 
   private val ExecutionExceptionName = "lastException"
-  protected var settings: Settings = null
-
   protected val _thisClassloader = this.getClass.getClassLoader
+
   protected val _runtimeClassloader =
     new URLClassLoader(Array(), _thisClassloader) {
       def addJar(url: URL) = this.addURL(url)
     }
+  protected val lastResultOut = new ByteArrayOutputStream()
 
 
-  protected val lastResultOut = new ByteArrayOutputStream()
   protected val multiOutputStream = MultiOutputStream(List(Console.out, lastResultOut))
   private var taskManager: TaskManager = _
   var sparkIMain: SparkIMain = _
   protected var jLineCompleter: SparkJLineCompletion = _
 
+  protected var settings: Settings = newSettings(interpreterArgs())
+
+  settings.classpath.value = buildClasspath(_thisClassloader)
+  settings.embeddedDefaults(_runtimeClassloader)
+
+  private val maxInterpreterThreads: Int = {
+    if(config.hasPath("max_interpreter_threads"))
+      config.getInt("max_interpreter_threads")
+    else
+      TaskManager.DefaultMaximumWorkers
+  }
+
+  start()
+
   protected def newSparkIMain(
     settings: Settings, out: JPrintWriter
   ): SparkIMain = {
@@ -74,8 +85,6 @@ class ScalaInterpreter() extends Interpreter {
     s
   }
 
-  private var maxInterpreterThreads:Int = TaskManager.DefaultMaximumWorkers
-
   protected def newTaskManager(): TaskManager =
     new TaskManager(maximumWorkers = maxInterpreterThreads)
 
@@ -193,16 +202,9 @@ class ScalaInterpreter() extends Interpreter {
   }
 
   override def init(kernel: KernelLike): Interpreter = {
-    val args = interpreterArgs(kernel)
-    this.settings = newSettings(args)
-
-    this.settings.classpath.value = buildClasspath(_thisClassloader)
-    this.settings.embeddedDefaults(_runtimeClassloader)
-
-    maxInterpreterThreads = maxInterpreterThreads(kernel)
-
-    start()
-    bindKernelVarialble(kernel)
+    bindKernelVariable(kernel)
+    bindSparkContext(kernel.sparkContext)
+    bindSqlContext(kernel.sqlContext)
 
     this
   }
@@ -228,16 +230,12 @@ class ScalaInterpreter() extends Interpreter {
     urls.foldLeft("")((l, r) => ClassPath.join(l, r.toString))
   }
 
-  protected def interpreterArgs(kernel: KernelLike): List[String] = {
+  protected def interpreterArgs(): List[String] = {
     import scala.collection.JavaConverters._
-    kernel.config.getStringList("interpreter_args").asScala.toList
+    config.getStringList("interpreter_args").asScala.toList
   }
 
-  protected def maxInterpreterThreads(kernel: KernelLike): Int = {
-    kernel.config.getInt("max_interpreter_threads")
-  }
-
-  protected def bindKernelVarialble(kernel: KernelLike): Unit = {
+  protected def bindKernelVariable(kernel: KernelLike): Unit = {
     doQuietly {
       bind(
         "kernel", "org.apache.toree.kernel.api.Kernel",
@@ -515,17 +513,12 @@ class ScalaInterpreter() extends Interpreter {
     sparkIMain.beQuietDuring[T](body)
   }
 
-  override def bindSparkContext(sparkContext: SparkContext) = {
+  def bindSparkContext(sparkContext: SparkContext) = {
     val bindName = "sc"
 
     doQuietly {
       logger.debug(s"Binding SparkContext into interpreter as $bindName")
-      bind(
-        bindName,
-        "org.apache.spark.SparkContext",
-        sparkContext,
-        List( """@transient""")
-      )
+      interpret(s"""def ${bindName}: org.apache.spark.SparkContext = kernel.sparkContext""")
 
       // NOTE: This is needed because interpreter blows up after adding
       //       dependencies to SparkContext and Interpreter before the
@@ -546,20 +539,15 @@ class ScalaInterpreter() extends Interpreter {
     }
   }
 
-  override def bindSqlContext(sqlContext: SQLContext): Unit = {
+  def bindSqlContext(sqlContext: SQLContext): Unit = {
     val bindName = "sqlContext"
 
     doQuietly {
       // TODO: This only adds the context to the main interpreter AND
       //       is limited to the Scala interpreter interface
       logger.debug(s"Binding SQLContext into interpreter as $bindName")
-      bind(
-        bindName,
-        classOf[SQLContext].getName,
-        sqlContext,
-        List( """@transient""")
-      )
 
+      interpret(s"""def ${bindName}: ${classOf[SQLContext].getName} = kernel.sqlContext""")
       sqlContext
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala b/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
index 2d5faa4..b4bf596 100644
--- a/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
+++ b/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
@@ -46,7 +46,7 @@ class AddExternalJarMagicSpecForIntegration
         TaskManager.DefaultMaximumWorkers
       }
 
-      override protected def bindKernelVarialble(kernel: KernelLike): Unit = { }
+      override protected def bindKernelVariable(kernel: KernelLike): Unit = { }
     }
     interpreter.init(mock[KernelLike])
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRBridge.scala
----------------------------------------------------------------------
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRBridge.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRBridge.scala
index 5d82e51..a9cfc63 100644
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRBridge.scala
+++ b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRBridge.scala
@@ -16,10 +16,8 @@
  */
 package org.apache.toree.kernel.interpreter.sparkr
 
-import org.apache.toree.interpreter.broker.producer.{StandardSQLContextProducer, StandardJavaSparkContextProducer, JavaSparkContextProducerLike, SQLContextProducerLike}
-import org.apache.toree.interpreter.broker.{BrokerState, BrokerBridge}
+import org.apache.toree.interpreter.broker.{BrokerBridge, BrokerState}
 import org.apache.toree.kernel.api.KernelLike
-import org.apache.spark.SparkContext
 
 /**
  * Represents constants for the SparkR bridge.
@@ -60,7 +58,7 @@ object SparkRBridge {
     new SparkRBridge(
       _brokerState = brokerState,
       _kernel = kernel
-    ) with StandardJavaSparkContextProducer with StandardSQLContextProducer
+    )
   }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRInterpreter.scala
----------------------------------------------------------------------
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRInterpreter.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRInterpreter.scala
index 851a2d2..c99ada0 100644
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRInterpreter.scala
+++ b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRInterpreter.scala
@@ -21,8 +21,6 @@ import java.net.URL
 import org.apache.toree.interpreter.Results.Result
 import org.apache.toree.interpreter._
 import org.apache.toree.kernel.api.KernelLike
-import org.apache.spark.SparkContext
-import org.apache.spark.sql.SQLContext
 import org.slf4j.LoggerFactory
 
 import scala.concurrent.Await
@@ -135,12 +133,6 @@ class SparkRInterpreter(
   // Unsupported
   override def classServerURI: String = ""
 
-  // Unsupported (but can be invoked)
-  override def bindSparkContext(sparkContext: SparkContext): Unit = {}
-
-  // Unsupported (but can be invoked)
-  override def bindSqlContext(sqlContext: SQLContext): Unit = {}
-
   // Unsupported
   override def interrupt(): Interpreter = ???
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRService.scala
----------------------------------------------------------------------
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRService.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRService.scala
index be10e63..b44eae4 100644
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRService.scala
+++ b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRService.scala
@@ -16,15 +16,13 @@
  */
 package org.apache.toree.kernel.interpreter.sparkr
 
-import java.util.concurrent.{TimeUnit, Semaphore}
+import java.util.concurrent.{Semaphore, TimeUnit}
 
 import org.apache.toree.interpreter.broker.BrokerService
-import org.apache.toree.kernel.api.KernelLike
 import org.apache.toree.kernel.interpreter.sparkr.SparkRTypes.{Code, CodeResults}
-import org.apache.spark.SparkContext
 import org.slf4j.LoggerFactory
 
-import scala.concurrent.{future, Future}
+import scala.concurrent.{Future, future}
 
 /**
  * Represents the service that provides the high-level interface between the

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5fe5245a/sql-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sql/SqlInterpreter.scala
----------------------------------------------------------------------
diff --git a/sql-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sql/SqlInterpreter.scala b/sql-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sql/SqlInterpreter.scala
index b9d534f..207ab57 100644
--- a/sql-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sql/SqlInterpreter.scala
+++ b/sql-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sql/SqlInterpreter.scala
@@ -18,15 +18,13 @@ package org.apache.toree.kernel.interpreter.sql
 
 import java.net.URL
 
-import org.apache.toree.interpreter.{ExecuteFailure, ExecuteOutput, Interpreter}
 import org.apache.toree.interpreter.Results.Result
+import org.apache.toree.interpreter.{ExecuteFailure, ExecuteOutput, Interpreter}
 import org.apache.toree.kernel.api.KernelLike
-import org.apache.spark.SparkContext
-import org.apache.spark.sql.SQLContext
 
-import scala.concurrent.duration._
 import scala.concurrent.Await
-import scala.tools.nsc.interpreter.{OutputStream, InputStream}
+import scala.concurrent.duration._
+import scala.tools.nsc.interpreter.{InputStream, OutputStream}
 
 /**
  * Represents an interpreter interface to Spark SQL.
@@ -103,12 +101,6 @@ class SqlInterpreter() extends Interpreter {
   // Unsupported
   override def classServerURI: String = ""
 
-  // Unsupported (but can be invoked)
-  override def bindSparkContext(sparkContext: SparkContext): Unit = {}
-
-  // Unsupported (but can be invoked)
-  override def bindSqlContext(sqlContext: SQLContext): Unit = {}
-
   // Unsupported
   override def interrupt(): Interpreter = ???