You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@toree.apache.org by lr...@apache.org on 2016/01/11 22:01:43 UTC

[01/50] [abbrv] incubator-toree git commit: Merge pull request #191 from bpburns/sparkcontext

Repository: incubator-toree
Updated Branches:
  refs/heads/AddJRubySupport [created] 225eb62f9
  refs/heads/AddMacrosForDependencies [created] c3de594c7
  refs/heads/AddNightlyBuildSupport [created] d68f07acd
  refs/heads/AddZeppelinSupport [created] 96b4c0c32
  refs/heads/ExtraStuff [created] db9429993
  refs/heads/MonitorMemory [created] f72176ae7
  refs/heads/branch-0.1.3 [created] 9bd066292
  refs/heads/branch-0.1.4 [created] c4e459f63
  refs/heads/gh-pages [created] 0fefd2a5e
  refs/heads/issue-123-make-imports-configurable [created] 36c1ace0e
  refs/heads/master [created] 0a5a7f6ba
  refs/heads/sqlInjection [created] ca1feaae8
Updated Tags:  refs/tags/v0.1.2 [created] 16919853d
  refs/tags/v0.1.2-jeromq [created] b673937de
  refs/tags/v0.1.5-assembly [created] ecfa5dd2a


Merge pull request #191 from bpburns/sparkcontext

Option to create SparkContext programmatically.  New API is kernel.createSparkContext(String, String) or kernel.createSparkContext(SparkConf) 

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/9db161f8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/9db161f8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/9db161f8

Branch: refs/heads/master
Commit: 9db161f8667a3f148cd5d811be044db137db13c9
Parents: c054ae0 18708cc
Author: Brian Burns <br...@gmail.com>
Authored: Wed Nov 4 09:20:13 2015 -0500
Committer: Brian Burns <br...@gmail.com>
Committed: Wed Nov 4 09:20:13 2015 -0500

----------------------------------------------------------------------
 .../spark/interpreter/broker/BrokerBridge.scala |  20 +--
 .../com/ibm/spark/kernel/api/KernelLike.scala   |  17 ++
 .../scala/com/ibm/spark/magic/MagicLoader.scala |   2 +-
 .../interpreter/broker/BrokerBridgeSpec.scala   |  37 +----
 .../com/ibm/spark/boot/CommandLineOptions.scala |   7 +-
 .../com/ibm/spark/boot/KernelBootstrap.scala    |  11 +-
 .../boot/layer/ComponentInitialization.scala    |  37 +++--
 .../scala/com/ibm/spark/kernel/api/Kernel.scala | 161 ++++++++++++++++++-
 .../StandardComponentInitializationSpec.scala   |   2 +
 .../com/ibm/spark/kernel/api/KernelSpec.scala   |  53 +++++-
 .../InterpreterActorSpecForIntegration.scala    |   8 +-
 .../scala/test/utils/SparkKernelDeployer.scala  |   6 +-
 .../main/resources/PySpark/pyspark_runner.py    |  22 ++-
 .../interpreter/pyspark/PySparkBridge.scala     |  15 +-
 .../pyspark/PySparkInterpreter.scala            |   7 +-
 .../interpreter/pyspark/PySparkProcess.scala    |   2 +
 .../interpreter/pyspark/PySparkService.scala    |   4 +-
 resources/compile/reference.conf                |   2 +
 .../interpreter/scala/ScalaInterpreter.scala    |   3 +
 .../src/main/resources/kernelR/sparkr_runner.R  |  18 ++-
 .../interpreter/sparkr/SparkRBridge.scala       |  15 +-
 .../interpreter/sparkr/SparkRInterpreter.scala  |   7 +-
 .../kernel/interpreter/sql/SqlInterpreter.scala |   5 +-
 .../kernel/interpreter/sql/SqlService.scala     |   7 +-
 24 files changed, 340 insertions(+), 128 deletions(-)
----------------------------------------------------------------------



[23/50] [abbrv] incubator-toree git commit: Travis modifications to build using Makefile

Posted by lr...@apache.org.
Travis modifications to build using Makefile


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/be758d81
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/be758d81
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/be758d81

Branch: refs/heads/master
Commit: be758d8156451befb8ab6c4cbb6b110f3b4cac7c
Parents: 5ccdfeb
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Wed Nov 18 14:11:00 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Tue Nov 24 08:49:50 2015 -0600

----------------------------------------------------------------------
 .travis.yml | 16 +++++++++-------
 Makefile    |  5 +++++
 2 files changed, 14 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/be758d81/.travis.yml
----------------------------------------------------------------------
diff --git a/.travis.yml b/.travis.yml
index d2e701a..afc2969 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,19 +1,21 @@
 language: scala
 scala:
     - "2.10.4"
+
 jdk:
     - oraclejdk7
     - openjdk7
-script: 
-    - "sbt clean test -Dakka.test.timefactor=3"
-    - find $HOME/.sbt -name "*.lock" | xargs rm
-    - find $HOME/.ivy2 -name "ivydata-*.properties" | xargs rm
+
+script:
+    - make test-travis
+
 sudo: false
+
 cache:
     directories:
         - $HOME/.ivy2/cache
         - $HOME/.sbt/boot/
-branches:
-  only:
-    - master
 
+branches:
+    only:
+        - master
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/be758d81/Makefile
----------------------------------------------------------------------
diff --git a/Makefile b/Makefile
index e4f17f7..745610d 100644
--- a/Makefile
+++ b/Makefile
@@ -55,3 +55,8 @@ dist: kernel/target/scala-2.10/$(ASSEMBLY_JAR)
 	@echo "VERSION: $(FULL_VERSION)" > $(VERSION_FILE)
 	@echo "COMMIT: $(COMMIT)" >> $(VERSION_FILE)
 	@cd dist; tar -cvzf spark-kernel-$(FULL_VERSION).tar.gz spark-kernel
+
+test-travis:
+	$(ENV_OPTS) sbt clean test -Dakka.test.timefactor=3
+	find $(HOME)/.sbt -name "*.lock" | xargs rm
+	find $(HOME)/.ivy2 -name "ivydata-*.properties" | xargs rm
\ No newline at end of file


[20/50] [abbrv] incubator-toree git commit: Removing spark.master cmd-line arg from kernel

Posted by lr...@apache.org.
Removing spark.master cmd-line arg from kernel


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/4ebe0b29
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/4ebe0b29
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/4ebe0b29

Branch: refs/heads/master
Commit: 4ebe0b292f0fdc2d5556fd3968f7595a408674d8
Parents: ef77e3f
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Wed Nov 11 17:28:45 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Tue Nov 24 08:49:50 2015 -0600

----------------------------------------------------------------------
 .../com/ibm/spark/boot/CommandLineOptions.scala |  5 ----
 .../ibm/spark/boot/CommandLineOptionsSpec.scala | 30 +-------------------
 resources/compile/reference.conf                |  1 -
 3 files changed, 1 insertion(+), 35 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/4ebe0b29/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala b/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
index b3871f5..a5acbc2 100644
--- a/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
+++ b/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
@@ -42,10 +42,6 @@ class CommandLineOptions(args: Seq[String]) {
     parser.accepts("profile", "path to IPython JSON connection file")
       .withRequiredArg().ofType(classOf[File])
 
-  private val _master =
-    parser.accepts("master", "location of master Spark node")
-      .withRequiredArg().ofType(classOf[String])
-
   private val _ip =
     parser.accepts("ip", "ip used to bind sockets")
       .withRequiredArg().ofType(classOf[String])
@@ -139,7 +135,6 @@ class CommandLineOptions(args: Seq[String]) {
     }
 
     val commandLineConfig: Config = ConfigFactory.parseMap(Map(
-      "spark.master" -> get(_master),
       "stdin_port" -> get(_stdin_port),
       "shell_port" -> get(_shell_port),
       "iopub_port" -> get(_iopub_port),

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/4ebe0b29/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala b/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala
index ab6748a..703d677 100644
--- a/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala
+++ b/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala
@@ -132,31 +132,6 @@ class CommandLineOptionsSpec extends FunSpec with Matchers {
       }
     }
 
-    describe("when received --master=<value>") {
-      it("should error if value is not set") {
-        intercept[OptionException] {
-          new CommandLineOptions(Seq("--master"))
-        }
-      }
-
-      describe("#toConfig") {
-        it("should set master to specified value") {
-          val expected = "test"
-          val options = new CommandLineOptions(s"--master=${expected}" :: Nil)
-          val config: Config = options.toConfig
-
-          config.getString("spark.master") should be(expected)
-        }
-
-        it("should set master to local[*]") {
-          val options = new CommandLineOptions(Nil)
-          val config: Config = options.toConfig
-
-          config.getString("spark.master") should be("local[*]")
-        }
-      }
-    }
-
     describe("when received --profile=<path>") {
       it("should error if path is not set") {
         intercept[OptionException] {
@@ -308,7 +283,7 @@ class CommandLineOptionsSpec extends FunSpec with Matchers {
           config.getString("ip") should be(expected)
         }
 
-        it("should set master to local[*]") {
+        it("should set ip to 127.0.0.1") {
           val options = new CommandLineOptions(Nil)
           val config: Config = options.toConfig
 
@@ -319,18 +294,15 @@ class CommandLineOptionsSpec extends FunSpec with Matchers {
 
     describe("when received options with surrounding whitespace") {
       it("should trim whitespace") {
-        val master = "test"
         val url1 = "url1"
         val url2 = "url2"
 
         val options = new CommandLineOptions(Seq(
-          s"--master=${master} ",
           " --magic-url ", s" ${url1}\t",
           "--magic-url", s" \t ${url2} \t"
         ))
         val config: Config = options.toConfig
 
-        config.getString("spark.master") should be(master)
         config.getList("magic_urls").unwrapped.asScala should
           be (Seq(url1, url2))
       }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/4ebe0b29/resources/compile/reference.conf
----------------------------------------------------------------------
diff --git a/resources/compile/reference.conf b/resources/compile/reference.conf
index d64d2b8..a01a88c 100644
--- a/resources/compile/reference.conf
+++ b/resources/compile/reference.conf
@@ -35,7 +35,6 @@ ip = ${?IP}
 transport = "tcp"
 signature_scheme = "hmac-sha256"
 key = ""
-spark.master = "local[*]"
 
 ivy_local = "/tmp/.ivy2"
 ivy_local = ${?IVY_LOCAL}


[27/50] [abbrv] incubator-toree git commit: Merge pull request #196 from ibm-et/SPARK_HOME_REQUIRED

Posted by lr...@apache.org.
Merge pull request #196 from ibm-et/SPARK_HOME_REQUIRED

Require SPARK-HOME to run and do not pack spark dependencies

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/ecfa5dd2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/ecfa5dd2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/ecfa5dd2

Branch: refs/heads/master
Commit: ecfa5dd2abe8f992e54b12f4c5e411855b72e898
Parents: 6f46c20 bc834e8
Author: Chip Senkbeil <ch...@gmail.com>
Authored: Wed Nov 25 11:26:18 2015 -0600
Committer: Chip Senkbeil <ch...@gmail.com>
Committed: Wed Nov 25 11:26:18 2015 -0600

----------------------------------------------------------------------
 .gitignore                                      |  1 +
 .travis.yml                                     | 16 ++--
 Makefile                                        | 76 ++++++++--------
 README.md                                       |  9 +-
 Vagrantfile                                     |  7 +-
 etc/bin/spark-kernel                            | 36 ++++++++
 kernel-api/build.sbt                            | 38 +-------
 kernel/build.sbt                                |  5 --
 kernel/project/plugins.sbt                      |  4 -
 .../com/ibm/spark/boot/CommandLineOptions.scala |  5 --
 .../ibm/spark/boot/CommandLineOptionsSpec.scala | 30 +------
 project/Build.scala                             | 13 ++-
 project/Common.scala                            | 94 +++++++++-----------
 project/plugins.sbt                             |  8 +-
 protocol/build.sbt                              |  7 +-
 protocol/project/plugins.sbt                    |  3 -
 resources/compile/reference.conf                |  1 -
 17 files changed, 148 insertions(+), 205 deletions(-)
----------------------------------------------------------------------



[08/50] [abbrv] incubator-toree git commit: make sure we can instantiate ScalaInterpreter the same as the rest

Posted by lr...@apache.org.
make sure we can instantiate ScalaInterpreter the same as the rest


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/09ad06e0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/09ad06e0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/09ad06e0

Branch: refs/heads/master
Commit: 09ad06e0f4d40706f190e2a1fe692716a650e493
Parents: 25b343b
Author: Brian Burns <bb...@us.ibm.com>
Authored: Thu Nov 12 16:47:45 2015 -0500
Committer: Brian Burns <bb...@us.ibm.com>
Committed: Thu Nov 12 16:47:45 2015 -0500

----------------------------------------------------------------------
 .../com/ibm/spark/kernel/api/KernelLike.scala   |   3 +
 .../boot/layer/ComponentInitialization.scala    | 116 ++-----------------
 .../spark/boot/layer/InterpreterManager.scala   |   2 +-
 .../scala/com/ibm/spark/kernel/api/Kernel.scala |  17 ++-
 .../com/ibm/spark/kernel/api/KernelSpec.scala   |   2 +-
 .../InterpreterActorSpecForIntegration.scala    |  25 +++-
 .../PostProcessorSpecForIntegration.scala       |  27 ++++-
 .../scala/test/utils/SparkKernelDeployer.scala  |  45 +------
 resources/compile/reference.conf                |   1 +
 resources/test/reference.conf                   |   1 +
 .../interpreter/scala/ScalaInterpreter.scala    |  67 +++++++----
 .../scala/ScalaInterpreterSpec.scala            |   5 +-
 .../AddExternalJarMagicSpecForIntegration.scala |  26 ++++-
 13 files changed, 136 insertions(+), 201 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/09ad06e0/kernel-api/src/main/scala/com/ibm/spark/kernel/api/KernelLike.scala
----------------------------------------------------------------------
diff --git a/kernel-api/src/main/scala/com/ibm/spark/kernel/api/KernelLike.scala b/kernel-api/src/main/scala/com/ibm/spark/kernel/api/KernelLike.scala
index 8fb5d80..c9442aa 100644
--- a/kernel-api/src/main/scala/com/ibm/spark/kernel/api/KernelLike.scala
+++ b/kernel-api/src/main/scala/com/ibm/spark/kernel/api/KernelLike.scala
@@ -18,6 +18,7 @@ package com.ibm.spark.kernel.api
 
 import java.io.{PrintStream, InputStream, OutputStream}
 
+import com.typesafe.config.Config
 import org.apache.spark.api.java.JavaSparkContext
 import org.apache.spark.{SparkConf, SparkContext}
 import org.apache.spark.sql.SQLContext
@@ -93,6 +94,8 @@ trait KernelLike {
 
   def interpreter(name: String): Option[com.ibm.spark.interpreter.Interpreter]
 
+  def config: Config
+
   def sparkContext: SparkContext
 
   def sparkConf: SparkConf

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/09ad06e0/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala b/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
index e403e9b..973075b 100644
--- a/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
+++ b/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
@@ -25,10 +25,6 @@ import com.ibm.spark.dependencies.{DependencyDownloader, IvyDependencyDownloader
 import com.ibm.spark.global
 import com.ibm.spark.interpreter._
 import com.ibm.spark.kernel.api.{KernelLike, Kernel}
-import com.ibm.spark.kernel.interpreter.pyspark.PySparkInterpreter
-import com.ibm.spark.kernel.interpreter.sparkr.SparkRInterpreter
-import com.ibm.spark.kernel.interpreter.scala.{TaskManagerProducerLike, StandardSparkIMainProducer, StandardSettingsProducer, ScalaInterpreter}
-import com.ibm.spark.kernel.interpreter.sql.SqlInterpreter
 import com.ibm.spark.kernel.protocol.v5.KMBuilder
 import com.ibm.spark.kernel.protocol.v5.kernel.ActorLoader
 import com.ibm.spark.kernel.protocol.v5.stream.KernelOutputStream
@@ -81,105 +77,28 @@ trait StandardComponentInitialization extends ComponentInitialization {
   ) = {
     val (commStorage, commRegistrar, commManager) =
       initializeCommObjects(actorLoader)
-    val interpreter = initializeInterpreter(config)
+
+    val manager =  InterpreterManager(config)
+    val scalaInterpreter = manager.interpreters.get("Scala").orNull
 
     val dependencyDownloader = initializeDependencyDownloader(config)
     val magicLoader = initializeMagicLoader(
-      config, interpreter, dependencyDownloader)
-    val manager =  InterpreterManager(config)
-      .addInterpreter("Scala",interpreter)
+      config, scalaInterpreter, dependencyDownloader)
+
     val kernel = initializeKernel(
       config, actorLoader, manager, commManager, magicLoader
     )
-    val responseMap = initializeResponseMap()
-
 
-    /*
-    // NOTE: Tested via initializing the following and returning this
-    //       interpreter instead of the Scala one
-    val pySparkInterpreter = new PySparkInterpreter(kernel)
-    //pySparkInterpreter.start()
-    kernel.data.put("PySpark", pySparkInterpreter)
-
-    // NOTE: Tested via initializing the following and returning this
-    //       interpreter instead of the Scala one
-    val sparkRInterpreter = new SparkRInterpreter(kernel)
-    //sparkRInterpreter.start()
-    kernel.data.put("SparkR", sparkRInterpreter)
-
-    val sqlInterpreter = new SqlInterpreter(kernel)
-    //sqlInterpreter.start()
-    kernel.data.put("SQL", sqlInterpreter)
-
-
-    val plugins = initializeInterpreterPlugins(kernel, config)
-
-    kernel.data.putAll(plugins.asJava)
-
-    // Add Scala to available data map
-    kernel.data.put("Scala", interpreter)
-    val defaultInterpreter: Interpreter =
-      config.getString("default_interpreter").toLowerCase match {
-        case "scala" =>
-          logger.info("Using Scala interpreter as default!")
-          interpreter.doQuietly {
-            interpreter.bind(
-              "kernel", "com.ibm.spark.kernel.api.Kernel",
-              kernel, List( """@transient implicit""")
-            )
-          }
-          interpreter
-        case "pyspark" =>
-          logger.info("Using PySpark interpreter as default!")
-          pySparkInterpreter
-        case "sparkr" =>
-          logger.info("Using SparkR interpreter as default!")
-          sparkRInterpreter
-        case "sql" =>
-          logger.info("Using SQL interpreter as default!")
-          sqlInterpreter
-        case p if(kernel.data.containsKey(p)) =>
-          kernel.data.get(p).asInstanceOf[Interpreter]
-        case unknown =>
-          logger.warn(s"Unknown interpreter '$unknown'! Defaulting to Scala!")
-          interpreter
-      }
+    val responseMap = initializeResponseMap()
 
-    */
-    //kernel.interpreter = defaultInterpreter
     initializeSparkContext(config, kernel, appName)
 
     (commStorage, commRegistrar, commManager,
-      manager.defaultInterpreter.getOrElse(null), kernel,
+      manager.defaultInterpreter.orNull, kernel,
       dependencyDownloader, magicLoader, responseMap)
 
   }
 
-  def initializeInterpreterPlugins(
-    kernel: KernelLike,
-    config: Config
-  ): Map[String, Interpreter] = {
-    val p = config
-      .getStringList("interpreter_plugins")
-      .listIterator().asScala
-
-    p.foldLeft(Map[String, Interpreter]())( (acc, v) => {
-      v.split(":") match {
-        case Array(name, className) =>
-          try {
-            acc + (name -> Class
-              .forName(className)
-              .getConstructor(classOf[KernelLike])
-              .newInstance(kernel)
-              .asInstanceOf[Interpreter])
-          }
-          catch {
-            case _:Throwable => acc
-          }
-        case _ => acc
-      }
-    })
-  }
 
   def initializeSparkContext(config:Config, kernel:Kernel, appName:String) = {
     if(!config.getBoolean("nosparkcontext")) {
@@ -209,27 +128,6 @@ trait StandardComponentInitialization extends ComponentInitialization {
     dependencyDownloader
   }
 
-  protected def initializeInterpreter(config: Config) = {
-    val interpreterArgs = config.getStringList("interpreter_args").asScala.toList
-    val maxInterpreterThreads = config.getInt("max_interpreter_threads")
-
-    logger.info(
-      s"Constructing interpreter with $maxInterpreterThreads threads and " +
-      "with arguments: " + interpreterArgs.mkString(" "))
-    val interpreter = new ScalaInterpreter(interpreterArgs, Console.out)
-      with StandardSparkIMainProducer
-      with TaskManagerProducerLike
-      with StandardSettingsProducer {
-      override def newTaskManager(): TaskManager =
-        new TaskManager(maximumWorkers = maxInterpreterThreads)
-    }
-
-    logger.debug("Starting interpreter")
-    interpreter.start()
-
-    interpreter
-  }
-
   protected[layer] def initializeSqlContext(sparkContext: SparkContext) = {
     val sqlContext: SQLContext = try {
       logger.info("Attempting to create Hive Context")

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/09ad06e0/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala b/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala
index 903ab69..520d68c 100644
--- a/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala
+++ b/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala
@@ -26,7 +26,7 @@ case class InterpreterManager(
     copy(interpreters = interpreters + (name -> interpreter))
   }
 
-  def defaultInterpreter(): Option[Interpreter] = {
+  def defaultInterpreter: Option[Interpreter] = {
     interpreters.get(default)
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/09ad06e0/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala b/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
index 9a91a2e..fe6bc2d 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
@@ -46,19 +46,20 @@ import com.ibm.spark.global.ExecuteRequestState
 /**
  * Represents the main kernel API to be used for interaction.
  *
- * @param config The configuration used when starting the kernel
+ * @param _config The configuration used when starting the kernel
  * @param interpreterManager The interpreter manager to expose in this instance
  * @param comm The Comm manager to expose in this instance
  * @param actorLoader The actor loader to use for message relaying
  */
 @Experimental
 class Kernel (
-  private val config: Config,
+  private val _config: Config,
   private val actorLoader: ActorLoader,
   val interpreterManager: InterpreterManager,
   val comm: CommManager,
   val magicLoader: MagicLoader
 ) extends KernelLike with LogLike {
+
   /**
    * Represents the current input stream used by the kernel for the specific
    * thread.
@@ -111,7 +112,7 @@ class Kernel (
 
   interpreterManager.initializeInterpreters(this)
 
-  val interpreter = interpreterManager.defaultInterpreter().get
+  val interpreter = interpreterManager.defaultInterpreter.get
 
   /**
    * Handles the output of interpreting code.
@@ -136,6 +137,10 @@ class Kernel (
     }
   }
 
+  override def config:Config = {
+    _config
+  }
+
   /**
    * Executes a block of code represented as a string and returns the result.
    *
@@ -202,7 +207,7 @@ class Kernel (
     parentMessage: v5.KernelMessage = lastKernelMessage(),
     kmBuilder: v5.KMBuilder = v5.KMBuilder()
   ): FactoryMethods = {
-    new FactoryMethods(config, actorLoader, parentMessage, kmBuilder)
+    new FactoryMethods(_config, actorLoader, parentMessage, kmBuilder)
   }
 
   /**
@@ -347,7 +352,7 @@ class Kernel (
     conf.set("spark.submit.deployMode", "client")
 
     KeyValuePairUtils.stringToKeyValuePairSeq(
-      config.getString("spark_configuration")
+      _config.getString("spark_configuration")
     ).foreach { keyValuePair =>
       logger.info(s"Setting ${keyValuePair.key} to ${keyValuePair.value}")
       Try(conf.set(keyValuePair.key, keyValuePair.value))
@@ -372,7 +377,7 @@ class Kernel (
     var sparkContext: SparkContext = null
     val outStream = new KernelOutputStream(
       actorLoader, KMBuilder(), global.ScheduledTaskManager.instance,
-      sendEmptyOutput = config.getBoolean("send_empty_output")
+      sendEmptyOutput = _config.getBoolean("send_empty_output")
     )
 
     // Update global stream state and use it to set the Console local variables

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/09ad06e0/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala b/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
index f5d5517..a5756a9 100644
--- a/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
+++ b/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
@@ -44,7 +44,7 @@ class KernelSpec extends FunSpec with Matchers with MockitoSugar
     mockInterpreterManager = mock[InterpreterManager]
     mockSparkContext = mock[SparkContext]
     mockSparkConf = mock[SparkConf]
-    when(mockInterpreterManager.defaultInterpreter())
+    when(mockInterpreterManager.defaultInterpreter)
       .thenReturn(Some(mockInterpreter))
     when(mockInterpreterManager.interpreters)
       .thenReturn(Map[String, com.ibm.spark.interpreter.Interpreter]())

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/09ad06e0/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala b/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
index 93c86bc..09aa6fd 100644
--- a/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
+++ b/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
@@ -21,11 +21,13 @@ import java.io.{ByteArrayOutputStream, OutputStream}
 import akka.actor.{ActorSystem, Props}
 import akka.testkit.{ImplicitSender, TestKit}
 import com.ibm.spark.interpreter._
+import com.ibm.spark.kernel.api.KernelLike
 import com.ibm.spark.kernel.interpreter.scala.{StandardTaskManagerProducer, StandardSparkIMainProducer, StandardSettingsProducer, ScalaInterpreter}
 import com.ibm.spark.kernel.protocol.v5._
 import com.ibm.spark.kernel.protocol.v5.content._
 import com.ibm.spark.kernel.protocol.v5.interpreter.InterpreterActor
 import com.ibm.spark.kernel.protocol.v5.interpreter.tasks.InterpreterTaskFactory
+import com.ibm.spark.utils.MultiOutputStream
 import com.typesafe.config.ConfigFactory
 import org.apache.spark.{SparkConf, SparkContext}
 import org.scalatest.mock.MockitoSugar
@@ -52,11 +54,25 @@ class InterpreterActorSpecForIntegration extends TestKit(
   with MockitoSugar with UncaughtExceptionSuppression {
 
   private val output = new ByteArrayOutputStream()
-  private val interpreter = new ScalaInterpreter(List(), output)
-    with StandardSparkIMainProducer
-    with StandardTaskManagerProducer
-    with StandardSettingsProducer
+  private val interpreter = new ScalaInterpreter {
+    override protected val multiOutputStream = MultiOutputStream(List(mock[OutputStream], lastResultOut))
+    override def init(kernel: KernelLike): Interpreter = {
+      settings = newSettings(List[String]())
+
+      val urls = _thisClassloader match {
+        case cl: java.net.URLClassLoader => cl.getURLs.toList
+        case a => // TODO: Should we really be using sys.error here?
+          sys.error("[SparkInterpreter] Unexpected class loader: " + a.getClass)
+      }
+      val classpath = urls.map(_.toString)
+
+      settings.classpath.value =
+        classpath.distinct.mkString(java.io.File.pathSeparator)
+      settings.embeddedDefaults(_runtimeClassloader)
 
+      this
+    }
+  }
   private val conf = new SparkConf()
     .setMaster("local[*]")
     .setAppName("Test Kernel")
@@ -65,6 +81,7 @@ class InterpreterActorSpecForIntegration extends TestKit(
 
   before {
     output.reset()
+    interpreter.init(mock[KernelLike])
     interpreter.start()
 
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/09ad06e0/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala b/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
index 28d08b1..2bdce57 100644
--- a/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
+++ b/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
@@ -18,8 +18,11 @@ package integration
 
 import java.io.OutputStream
 
+import com.ibm.spark.interpreter.Interpreter
+import com.ibm.spark.kernel.api.KernelLike
 import com.ibm.spark.kernel.interpreter.scala.{StandardSettingsProducer, StandardTaskManagerProducer, StandardSparkIMainProducer, ScalaInterpreter}
 import com.ibm.spark.kernel.protocol.v5.magic.PostProcessor
+import com.ibm.spark.utils.MultiOutputStream
 import org.scalatest.mock.MockitoSugar
 import org.scalatest.{BeforeAndAfter, Matchers, FunSpec}
 
@@ -32,10 +35,26 @@ class PostProcessorSpecForIntegration extends FunSpec with Matchers
   before {
     // TODO: Move instantiation and start of interpreter to a beforeAll
     //       for performance improvements
-    scalaInterpreter = new ScalaInterpreter(Nil, mock[OutputStream])
-      with StandardSparkIMainProducer
-      with StandardTaskManagerProducer
-      with StandardSettingsProducer
+    scalaInterpreter = new ScalaInterpreter {
+      override protected val multiOutputStream = MultiOutputStream(List(mock[OutputStream], lastResultOut))
+      override def init(kernel: KernelLike): Interpreter = {
+        settings = newSettings(List[String]())
+
+        val urls = _thisClassloader match {
+          case cl: java.net.URLClassLoader => cl.getURLs.toList
+          case a => // TODO: Should we really be using sys.error here?
+            sys.error("[SparkInterpreter] Unexpected class loader: " + a.getClass)
+        }
+        val classpath = urls.map(_.toString)
+
+        settings.classpath.value =
+          classpath.distinct.mkString(java.io.File.pathSeparator)
+        settings.embeddedDefaults(_runtimeClassloader)
+
+        this
+      }
+    }
+    scalaInterpreter.init(mock[KernelLike])
 
     scalaInterpreter.start()
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/09ad06e0/kernel/src/test/scala/test/utils/SparkKernelDeployer.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/test/utils/SparkKernelDeployer.scala b/kernel/src/test/scala/test/utils/SparkKernelDeployer.scala
index 4f0412a..877b162 100644
--- a/kernel/src/test/scala/test/utils/SparkKernelDeployer.scala
+++ b/kernel/src/test/scala/test/utils/SparkKernelDeployer.scala
@@ -62,50 +62,7 @@ object SparkKernelDeployer extends LogLike with MockitoSugar {
     }
   }
 
-  private trait ExposedComponentInitialization extends StandardComponentInitialization
-    with LogLike {
-    override protected def initializeInterpreter(config: Config): ScalaInterpreter
-      with StandardSparkIMainProducer with StandardTaskManagerProducer
-      with StandardSettingsProducer = {
-      val interpreterArgs = config.getStringList("interpreter_args").asScala.toList
-
-      logger.info("Constructing interpreter with arguments: " +
-        interpreterArgs.mkString(" "))
-      val interpreter = new ScalaInterpreter(interpreterArgs, mock[OutputStream])
-        with StandardSparkIMainProducer
-        with StandardTaskManagerProducer
-        with StandardSettingsProducer
-
-      logger.debug("Starting interpreter")
-      interpreter.start()
-
-      interpreter
-    }
-
-
 
-    /*
-    def reallyInitializeSparkContext(
-      config: Config,
-      actorLoader: ActorLoader,
-      kmBuilder: KMBuilder,
-      sparkConf: SparkConf
-    ): SparkContext = {
-      logger.debug("Constructing new Spark Context")
-      // TODO: Inject stream redirect headers in Spark dynamically
-      var sparkContext: SparkContext = null
-      val outStream = new KernelOutputStream(
-        actorLoader, KMBuilder(), global.ScheduledTaskManager.instance)
-      global.StreamState.setStreams(System.in, outStream, outStream)
-      global.StreamState.withStreams {
-        sparkContext = SparkContextProvider.sparkContext
-      }
-
-      sparkContext
-     }
-     */
-
-  }
 
   /**
    * Runs bare initialization, wrapping socket actors with test logic to
@@ -200,7 +157,7 @@ object SparkKernelDeployer extends LogLike with MockitoSugar {
     val kernelBootstrap =
       (new KernelBootstrap(new CommandLineOptions(Nil).toConfig)
         with ExposedBareInitialization
-        with ExposedComponentInitialization
+        with StandardComponentInitialization
         with StandardHandlerInitialization
         with StandardHookInitialization).initialize()
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/09ad06e0/resources/compile/reference.conf
----------------------------------------------------------------------
diff --git a/resources/compile/reference.conf b/resources/compile/reference.conf
index 2aa6d63..d64d2b8 100644
--- a/resources/compile/reference.conf
+++ b/resources/compile/reference.conf
@@ -60,6 +60,7 @@ default_interpreter = "Scala"
 default_interpreter = ${?DEFAULT_INTERPRETER}
 
 default_interpreter_plugin = [
+  "Scala:com.ibm.spark.kernel.interpreter.scala.ScalaInterpreter",
   "PySpark:com.ibm.spark.kernel.interpreter.pyspark.PySparkInterpreter",
   "SparkR:com.ibm.spark.kernel.interpreter.sparkr.SparkRInterpreter",
   "SQL:com.ibm.spark.kernel.interpreter.sql.SqlInterpreter"

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/09ad06e0/resources/test/reference.conf
----------------------------------------------------------------------
diff --git a/resources/test/reference.conf b/resources/test/reference.conf
index 7f3fcbb..6100469 100644
--- a/resources/test/reference.conf
+++ b/resources/test/reference.conf
@@ -58,6 +58,7 @@ default_interpreter = "Scala"
 default_interpreter = ${?DEFAULT_INTERPRETER}
 
 default_interpreter_plugin = [
+  "Scala:com.ibm.spark.kernel.interpreter.scala.ScalaInterpreter",
   "PySpark:com.ibm.spark.kernel.interpreter.pyspark.PySparkInterpreter",
   "SparkR:com.ibm.spark.kernel.interpreter.sparkr.SparkRInterpreter",
   "SQL:com.ibm.spark.kernel.interpreter.sql.SqlInterpreter"

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/09ad06e0/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
index bccdf62..5d64b45 100644
--- a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
+++ b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
@@ -30,7 +30,7 @@ import com.ibm.spark.interpreter.imports.printers.{WrapperConsole, WrapperSystem
 import com.ibm.spark.kernel.api.{KernelLike, KernelOptions}
 import com.ibm.spark.utils.{MultiOutputStream, TaskManager}
 import org.apache.spark.SparkContext
-import org.apache.spark.repl.{SparkIMain, SparkJLineCompletion}
+import org.apache.spark.repl.{SparkCommandLine, SparkIMain, SparkJLineCompletion}
 import org.slf4j.LoggerFactory
 
 import scala.concurrent.{Await, Future}
@@ -42,44 +42,43 @@ import scala.tools.nsc.util.MergedClassPath
 import scala.tools.nsc.{Global, Settings, io}
 import scala.util.{Try => UtilTry}
 
-class ScalaInterpreter(
-  args: List[String],
-  out: OutputStream
-) extends Interpreter {
-  this: SparkIMainProducerLike
-    with TaskManagerProducerLike
-    with SettingsProducerLike =>
+class ScalaInterpreter() extends Interpreter {
 
   protected val logger = LoggerFactory.getLogger(this.getClass.getName)
 
   private val ExecutionExceptionName = "lastException"
-  val settings: Settings = newSettings(args)
+  protected var settings: Settings = null
 
-  private val _thisClassloader = this.getClass.getClassLoader
+  protected val _thisClassloader = this.getClass.getClassLoader
   protected val _runtimeClassloader =
     new URLClassLoader(Array(), _thisClassloader) {
       def addJar(url: URL) = this.addURL(url)
     }
 
-  /* Add scala.runtime libraries to interpreter classpath */ {
-    val urls = _thisClassloader match {
-      case cl: java.net.URLClassLoader => cl.getURLs.toList
-      case a => // TODO: Should we really be using sys.error here?
-        sys.error("[SparkInterpreter] Unexpected class loader: " + a.getClass)
-    }
-    val classpath = urls.map(_.toString)
-
-    settings.classpath.value =
-      classpath.distinct.mkString(java.io.File.pathSeparator)
-    settings.embeddedDefaults(_runtimeClassloader)
-  }
 
-  private val lastResultOut = new ByteArrayOutputStream()
-  private val multiOutputStream = MultiOutputStream(List(out, lastResultOut))
+  protected val lastResultOut = new ByteArrayOutputStream()
+  protected val multiOutputStream = MultiOutputStream(List(Console.out, lastResultOut))
   private var taskManager: TaskManager = _
   var sparkIMain: SparkIMain = _
   protected var jLineCompleter: SparkJLineCompletion = _
 
+  protected def newSparkIMain(
+    settings: Settings, out: JPrintWriter
+  ): SparkIMain = {
+    val s = new SparkIMain(settings, out)
+    s.initializeSynchronous()
+
+    s
+  }
+
+  private var maxInterpreterThreads:Int = TaskManager.DefaultMaximumWorkers
+
+  protected def newTaskManager(): TaskManager =
+    new TaskManager(maximumWorkers = maxInterpreterThreads)
+
+  protected def newSettings(args: List[String]): Settings =
+    new SparkCommandLine(args).settings
+
   /**
    * Adds jars to the runtime and compile time classpaths. Does not work with
    * directories or expanding star in a path.
@@ -191,6 +190,25 @@ class ScalaInterpreter(
   }
 
   override def init(kernel: KernelLike): Interpreter = {
+    import scala.collection.JavaConverters._
+    val args = kernel.config.getStringList("interpreter_args").asScala.toList
+    this.settings = newSettings(args)
+
+    val urls = _thisClassloader match {
+      case cl: java.net.URLClassLoader => cl.getURLs.toList
+      case a => // TODO: Should we really be using sys.error here?
+        sys.error("[SparkInterpreter] Unexpected class loader: " + a.getClass)
+    }
+    val classpath = urls.map(_.toString)
+
+    this.settings.classpath.value =
+      classpath.distinct.mkString(java.io.File.pathSeparator)
+    this.settings.embeddedDefaults(_runtimeClassloader)
+
+    maxInterpreterThreads = kernel.config.getInt("max_interpreter_threads")
+
+    start()
+
     doQuietly {
       bind(
         "kernel", "com.ibm.spark.kernel.api.Kernel",
@@ -201,6 +219,7 @@ class ScalaInterpreter(
     this
   }
 
+
   override def interrupt(): Interpreter = {
     require(sparkIMain != null && taskManager != null)
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/09ad06e0/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala b/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala
index 34a75f7..0de546c 100644
--- a/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala
+++ b/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala
@@ -86,10 +86,7 @@ class ScalaInterpreterSpec extends FunSpec
   }
 
   class StubbedStartInterpreter
-    extends ScalaInterpreter(mock[List[String]], mock[OutputStream])
-    with SparkIMainProducerLike
-    with TaskManagerProducerLike
-    with SettingsProducerLike
+    extends ScalaInterpreter
   {
     override def newSparkIMain(settings: Settings, out: JPrintWriter): SparkIMain = mockSparkIMain
     override def newTaskManager(): TaskManager = mockTaskManager

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/09ad06e0/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala b/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
index 490bff0..4032435 100644
--- a/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
+++ b/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
@@ -20,7 +20,9 @@ import java.io.{ByteArrayOutputStream, OutputStream}
 
 import com.ibm.spark.global.StreamState
 import com.ibm.spark.interpreter._
+import com.ibm.spark.kernel.api.KernelLike
 import com.ibm.spark.kernel.interpreter.scala.{ScalaInterpreter, StandardSettingsProducer, StandardSparkIMainProducer, StandardTaskManagerProducer}
+import com.ibm.spark.utils.MultiOutputStream
 import org.scalatest.mock.MockitoSugar
 import org.scalatest.{BeforeAndAfter, FunSpec, Matchers}
 
@@ -32,10 +34,26 @@ class AddExternalJarMagicSpecForIntegration
   private var interpreter: Interpreter = _
 
   before {
-    interpreter = new ScalaInterpreter(Nil, mock[OutputStream])
-      with StandardSparkIMainProducer
-      with StandardTaskManagerProducer
-      with StandardSettingsProducer
+    interpreter = new ScalaInterpreter {
+      override protected val multiOutputStream = MultiOutputStream(List(mock[OutputStream], lastResultOut))
+      override def init(kernel: KernelLike): Interpreter = {
+        settings = newSettings(List[String]())
+
+        val urls = _thisClassloader match {
+          case cl: java.net.URLClassLoader => cl.getURLs.toList
+          case a => // TODO: Should we really be using sys.error here?
+            sys.error("[SparkInterpreter] Unexpected class loader: " + a.getClass)
+        }
+        val classpath = urls.map(_.toString)
+
+        settings.classpath.value =
+          classpath.distinct.mkString(java.io.File.pathSeparator)
+        settings.embeddedDefaults(_runtimeClassloader)
+
+        this
+      }
+    }
+    interpreter.init(mock[KernelLike])
     interpreter.start()
 
     StreamState.setStreams(outputStream = outputResult)


[16/50] [abbrv] incubator-toree git commit: Issue #173 handling multiple lines in return result

Posted by lr...@apache.org.
Issue #173 handling multiple lines in return result

In the scala interpreter file, the truncateResult regular expression
doesn’t handle multiple lines. Add multiple lines option (?s) to the
regular expression.

Unit Tests: 1). One line output and 2). two lines output like listed in
the issue 3). More than two lines output. All work fine.

As this is a simple fix, I didn’t create pull request. I will create a
diff for code review for later cases.


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/6f46c20f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/6f46c20f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/6f46c20f

Branch: refs/heads/master
Commit: 6f46c20fbfd4a7eb7e9fe4dd559e6d404bdca4e4
Parents: 29856a7
Author: wm624@hotmail.com <wm...@hotmail.com>
Authored: Thu Nov 19 11:57:11 2015 -0800
Committer: wm624@hotmail.com <wm...@hotmail.com>
Committed: Thu Nov 19 11:57:11 2015 -0800

----------------------------------------------------------------------
 .../com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/6f46c20f/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
index 09930c6..6fc0aa6 100644
--- a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
+++ b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
@@ -246,7 +246,7 @@ class ScalaInterpreter() extends Interpreter {
   }
 
   def truncateResult(result:String, showType:Boolean =false, noTruncate: Boolean = false): String = {
-    val resultRX="""(res\d+):\s+(.+)\s+=\s+(.*)""".r
+    val resultRX="""(?s)(res\d+):\s+(.+)\s+=\s+(.*)""".r
 
     result match {
       case resultRX(varName,varType,resString) => {


[35/50] [abbrv] incubator-toree git commit: Merge pull request #221 from ibm-et/FixClasspathToInterpreter

Posted by lr...@apache.org.
Merge pull request #221 from ibm-et/FixClasspathToInterpreter

Build interpreter classpath based on Classloader hierarchy

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/a4f4bea6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/a4f4bea6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/a4f4bea6

Branch: refs/heads/master
Commit: a4f4bea61de86aa663748fbee75d5b6635f06cd8
Parents: 655a35b cac39e9
Author: Chip Senkbeil <ch...@gmail.com>
Authored: Thu Dec 3 09:45:46 2015 -0600
Committer: Chip Senkbeil <ch...@gmail.com>
Committed: Thu Dec 3 09:45:46 2015 -0600

----------------------------------------------------------------------
 etc/bin/spark-kernel                            |  1 -
 .../interpreter/scala/ScalaInterpreter.scala    | 34 ++++++++++++++------
 .../scala/ScalaInterpreterSpec.scala            | 33 ++++++++++++++++++-
 3 files changed, 56 insertions(+), 12 deletions(-)
----------------------------------------------------------------------



[05/50] [abbrv] incubator-toree git commit: fixed bug SqlInterpreter constructor

Posted by lr...@apache.org.
fixed bug SqlInterpreter constructor


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/2e6bf021
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/2e6bf021
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/2e6bf021

Branch: refs/heads/master
Commit: 2e6bf02151c8126bd1fe3c549712cb7c0e19920f
Parents: 502372d
Author: Brian Burns <bb...@us.ibm.com>
Authored: Tue Nov 10 16:30:08 2015 -0500
Committer: Brian Burns <bb...@us.ibm.com>
Committed: Tue Nov 10 16:30:08 2015 -0500

----------------------------------------------------------------------
 kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala        | 2 +-
 .../com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala      | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/2e6bf021/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala b/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
index f070004..9a91a2e 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
@@ -322,7 +322,7 @@ class Kernel (
 
   override def createSparkContext(conf: SparkConf): SparkContext = {
     _sparkConf = createSparkConf(conf)
-    _sparkContext = initializeSparkContext(sparkConf);
+    _sparkContext = initializeSparkContext(sparkConf)
     _javaSparkContext = new JavaSparkContext(_sparkContext)
     _sqlContext = new SQLContext(_sparkContext)
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/2e6bf021/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala
----------------------------------------------------------------------
diff --git a/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala b/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala
index 76ae262..837d917 100644
--- a/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala
+++ b/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala
@@ -30,7 +30,7 @@ import scala.tools.nsc.interpreter.{OutputStream, InputStream}
 /**
  * Represents an interpreter interface to Spark SQL.
  */
-class SqlInterpreter(private val kernel: KernelLike) extends Interpreter {
+class SqlInterpreter() extends Interpreter {
   private var _kernel: KernelLike = _
   private lazy val sqlService = new SqlService(_kernel)
   private lazy val sqlTransformer = new SqlTransformer


[17/50] [abbrv] incubator-toree git commit: check in fix for #173

Posted by lr...@apache.org.
check in fix for #173


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/06071e85
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/06071e85
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/06071e85

Branch: refs/heads/master
Commit: 06071e85702d27f5d2eb9d243dffb3ec125d7638
Parents: 9db161f
Author: wm624@hotmail.com <wm...@hotmail.com>
Authored: Fri Nov 20 10:20:53 2015 -0800
Committer: wm624@hotmail.com <wm...@hotmail.com>
Committed: Fri Nov 20 10:20:53 2015 -0800

----------------------------------------------------------------------
 .../com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala  | 2 +-
 .../ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala  | 2 ++
 2 files changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/06071e85/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
index a327f68..4989715 100644
--- a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
+++ b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
@@ -205,7 +205,7 @@ class ScalaInterpreter(
   }
 
   def truncateResult(result:String, showType:Boolean =false, noTruncate: Boolean = false): String = {
-    val resultRX="""(res\d+):\s+(.+)\s+=\s+(.*)""".r
+    val resultRX="""(?s)(res\d+):\s+(.+)\s+=\s+(.*)""".r
 
     result match {
       case resultRX(varName,varType,resString) => {

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/06071e85/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala b/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala
index 34a75f7..c9ef820 100644
--- a/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala
+++ b/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala
@@ -352,6 +352,8 @@ class ScalaInterpreterSpec extends FunSpec
         //  Results that match
         interpreter.truncateResult("res7: Int = 38") should be("38")
         interpreter.truncateResult("res7: Int = 38",true) should be("Int = 38")
+        interpreter.truncateResult("res4: String = \nVector(1\n, 2\n)") should be ("Vector(1\n, 2\n)")
+        interpreter.truncateResult("res4: String = \nVector(1\n, 2\n)",true) should be ("String = Vector(1\n, 2\n)")
         interpreter.truncateResult("res123") should be("")
         interpreter.truncateResult("res1") should be("")
         //  Results that don't match


[36/50] [abbrv] incubator-toree git commit: Add ordered support to fix message ordering between kernel message relay and IOPub socket. Flush outputstream after code execution.

Posted by lr...@apache.org.
Add ordered support to fix message ordering between kernel message relay and IOPub socket. Flush outputstream after code execution.


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/5b041721
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/5b041721
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/5b041721

Branch: refs/heads/master
Commit: 5b041721b385b704080ce6a3d959a2d6a3b061a8
Parents: a4f4bea
Author: wellecks <we...@gmail.com>
Authored: Thu Dec 3 11:08:10 2015 -0600
Committer: wellecks <we...@gmail.com>
Committed: Thu Dec 3 11:08:10 2015 -0600

----------------------------------------------------------------------
 .../communication/actors/PubSocketActor.scala   |  19 +++-
 .../security/SignatureCheckerActor.scala        |  13 ++-
 .../security/SignatureManagerActor.scala        |  34 ++++++-
 .../security/SignatureProducerActor.scala       |  13 ++-
 .../communication/utils/OrderedSupport.scala    |  89 ++++++++++++++++
 .../utils/OrderedSupportSpec.scala              | 101 +++++++++++++++++++
 .../kernel/protocol/v5/OrderedSupport.scala     |  89 ----------------
 .../protocol/v5/handler/BaseHandler.scala       |   1 +
 .../v5/handler/ExecuteRequestHandler.scala      |   7 +-
 .../handler/GenericSocketMessageHandler.scala   |  13 ++-
 .../v5/handler/InputRequestReplyHandler.scala   |   3 +-
 .../protocol/v5/kernel/socket/IOPub.scala       |   1 +
 .../protocol/v5/relay/KernelMessageRelay.scala  |   8 +-
 .../kernel/protocol/v5/OrderedSupportSpec.scala | 101 -------------------
 .../v5/handler/ExecuteRequestHandlerSpec.scala  |   5 +
 15 files changed, 290 insertions(+), 207 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/communication/src/main/scala/com/ibm/spark/communication/actors/PubSocketActor.scala
----------------------------------------------------------------------
diff --git a/communication/src/main/scala/com/ibm/spark/communication/actors/PubSocketActor.scala b/communication/src/main/scala/com/ibm/spark/communication/actors/PubSocketActor.scala
index 5f3b87b..f74764e 100644
--- a/communication/src/main/scala/com/ibm/spark/communication/actors/PubSocketActor.scala
+++ b/communication/src/main/scala/com/ibm/spark/communication/actors/PubSocketActor.scala
@@ -16,17 +16,23 @@
 package com.ibm.spark.communication.actors
 
 import akka.actor.Actor
+import com.ibm.spark.communication.utils.OrderedSupport
 import com.ibm.spark.communication.{SocketManager, ZMQMessage}
+import com.ibm.spark.kernel.protocol.v5.KernelMessage
 import com.ibm.spark.utils.LogLike
 import org.zeromq.ZMQ
 
 /**
  * Represents an actor containing a publish socket.
  *
+ * Note: OrderedSupport is used to ensure correct processing order.
+ *       A similar pattern may be useful for other socket actors if
+ *       issues arise in the future.
+ *
  * @param connection The address to bind to
  */
 class PubSocketActor(connection: String)
-  extends Actor with LogLike
+  extends Actor with LogLike with OrderedSupport
 {
   logger.debug(s"Initializing publish socket actor for $connection")
   private val manager: SocketManager = new SocketManager
@@ -37,9 +43,18 @@ class PubSocketActor(connection: String)
   }
 
   override def receive: Actor.Receive = {
-    case zmqMessage: ZMQMessage =>
+    case zmqMessage: ZMQMessage => withProcessing {
       val frames = zmqMessage.frames.map(byteString =>
         new String(byteString.toArray, ZMQ.CHARSET))
+
       socket.send(frames: _*)
+    }
   }
+
+  /**
+   * Defines the types that will be stashed by {@link #waiting() waiting}
+   * while the Actor is in processing state.
+   * @return
+   */
+  override def orderedTypes(): Seq[Class[_]] = Seq(classOf[ZMQMessage])
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/communication/src/main/scala/com/ibm/spark/communication/security/SignatureCheckerActor.scala
----------------------------------------------------------------------
diff --git a/communication/src/main/scala/com/ibm/spark/communication/security/SignatureCheckerActor.scala b/communication/src/main/scala/com/ibm/spark/communication/security/SignatureCheckerActor.scala
index d66cdb0..c3fabd7 100644
--- a/communication/src/main/scala/com/ibm/spark/communication/security/SignatureCheckerActor.scala
+++ b/communication/src/main/scala/com/ibm/spark/communication/security/SignatureCheckerActor.scala
@@ -17,6 +17,7 @@
 package com.ibm.spark.communication.security
 
 import akka.actor.Actor
+import com.ibm.spark.communication.utils.OrderedSupport
 import com.ibm.spark.utils.LogLike
 
 /**
@@ -25,14 +26,22 @@ import com.ibm.spark.utils.LogLike
  */
 class SignatureCheckerActor(
   private val hmac: Hmac
-) extends Actor with LogLike {
+) extends Actor with LogLike with OrderedSupport {
   override def receive: Receive = {
-    case (signature: String, blob: Seq[_]) =>
+    case (signature: String, blob: Seq[_]) => withProcessing {
       val stringBlob: Seq[String] = blob.map(_.toString)
       val hmacString = hmac(stringBlob: _*)
       val isValidSignature = hmacString == signature
       logger.trace(s"Signature ${signature} validity checked against " +
         s"hmac ${hmacString} with outcome ${isValidSignature}")
       sender ! isValidSignature
+    }
   }
+
+  /**
+   * Defines the types that will be stashed by {@link #waiting() waiting}
+   * while the Actor is in processing state.
+   * @return
+   */
+  override def orderedTypes(): Seq[Class[_]] = Seq(classOf[(String, Seq[_])])
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/communication/src/main/scala/com/ibm/spark/communication/security/SignatureManagerActor.scala
----------------------------------------------------------------------
diff --git a/communication/src/main/scala/com/ibm/spark/communication/security/SignatureManagerActor.scala b/communication/src/main/scala/com/ibm/spark/communication/security/SignatureManagerActor.scala
index 0225506..f381644 100644
--- a/communication/src/main/scala/com/ibm/spark/communication/security/SignatureManagerActor.scala
+++ b/communication/src/main/scala/com/ibm/spark/communication/security/SignatureManagerActor.scala
@@ -18,6 +18,7 @@ package com.ibm.spark.communication.security
 
 import akka.actor.{Props, ActorRef, Actor}
 import akka.util.Timeout
+import com.ibm.spark.communication.utils.OrderedSupport
 import com.ibm.spark.kernel.protocol.v5.KernelMessage
 import com.ibm.spark.utils.LogLike
 
@@ -27,7 +28,7 @@ import akka.pattern.pipe
 
 class SignatureManagerActor(
   key: String, scheme: String
-) extends Actor with LogLike {
+) extends Actor with LogLike with OrderedSupport {
   private val hmac = Hmac(key, HmacAlgorithm(scheme))
 
   def this(key: String) = this(key, HmacAlgorithm.SHA256.toString)
@@ -61,13 +62,38 @@ class SignatureManagerActor(
   override def receive: Receive = {
     // Check blob strings for matching digest
     case (signature: String, blob: Seq[_]) =>
-      (signatureChecker ? ((signature, blob))) pipeTo sender
+      startProcessing()
+      val destActor = sender()
+      val sigFuture = signatureChecker ? ((signature, blob))
+
+      sigFuture foreach { case isValid =>
+          destActor ! isValid
+          finishedProcessing()
+      }
 
     case message: KernelMessage =>
+      startProcessing()
+      val destActor = sender()
+
       // TODO: Proper error handling for possible exception from mapTo
-      (signatureProducer ? message).mapTo[String].map(
+      val sigFuture = (signatureProducer ? message).mapTo[String].map(
         result => message.copy(signature = result)
-      ) pipeTo sender
+      )
+
+      sigFuture foreach { case kernelMessage =>
+        destActor ! kernelMessage
+        finishedProcessing()
+      }
   }
+
+  /**
+   * Defines the types that will be stashed by {@link #waiting() waiting}
+   * while the Actor is in processing state.
+   * @return
+   */
+  override def orderedTypes(): Seq[Class[_]] = Seq(
+    classOf[(String, Seq[_])],
+    classOf[KernelMessage]
+  )
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/communication/src/main/scala/com/ibm/spark/communication/security/SignatureProducerActor.scala
----------------------------------------------------------------------
diff --git a/communication/src/main/scala/com/ibm/spark/communication/security/SignatureProducerActor.scala b/communication/src/main/scala/com/ibm/spark/communication/security/SignatureProducerActor.scala
index d280883..36b5688 100644
--- a/communication/src/main/scala/com/ibm/spark/communication/security/SignatureProducerActor.scala
+++ b/communication/src/main/scala/com/ibm/spark/communication/security/SignatureProducerActor.scala
@@ -17,6 +17,7 @@
 package com.ibm.spark.communication.security
 
 import akka.actor.Actor
+import com.ibm.spark.communication.utils.OrderedSupport
 import com.ibm.spark.kernel.protocol.v5.KernelMessage
 import com.ibm.spark.utils.LogLike
 import play.api.libs.json.Json
@@ -27,9 +28,9 @@ import play.api.libs.json.Json
  */
 class SignatureProducerActor(
   private val hmac: Hmac
-) extends Actor with LogLike {
+) extends Actor with LogLike with OrderedSupport {
   override def receive: Receive = {
-    case message: KernelMessage =>
+    case message: KernelMessage => withProcessing {
       val signature = hmac(
         Json.stringify(Json.toJson(message.header)),
         Json.stringify(Json.toJson(message.parentHeader)),
@@ -37,5 +38,13 @@ class SignatureProducerActor(
         message.contentString
       )
       sender ! signature
+    }
   }
+
+  /**
+   * Defines the types that will be stashed by {@link #waiting() waiting}
+   * while the Actor is in processing state.
+   * @return
+   */
+  override def orderedTypes(): Seq[Class[_]] = Seq(classOf[KernelMessage])
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/communication/src/main/scala/com/ibm/spark/communication/utils/OrderedSupport.scala
----------------------------------------------------------------------
diff --git a/communication/src/main/scala/com/ibm/spark/communication/utils/OrderedSupport.scala b/communication/src/main/scala/com/ibm/spark/communication/utils/OrderedSupport.scala
new file mode 100644
index 0000000..8f41861
--- /dev/null
+++ b/communication/src/main/scala/com/ibm/spark/communication/utils/OrderedSupport.scala
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2014 IBM Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.ibm.spark.communication.utils
+
+import akka.actor.{Actor, Stash}
+import com.ibm.spark.utils.LogLike
+
+/**
+ * A trait to enforce ordered processing for messages of particular types.
+ */
+trait OrderedSupport extends Actor with Stash with LogLike {
+  /**
+   * Executes instead of the default receive when the Actor has begun
+   * processing. Stashes incoming messages of particular types, defined by
+   * {@link #orderedTypes() orderedTypes} function, for later processing. Uses
+   * the default receive method for all other types. Upon receiving a
+   * FinishedProcessing message, resumes processing all messages with the
+   * default receive.
+   * @return
+   */
+  def waiting : Receive = {
+    case FinishedProcessing =>
+      context.unbecome()
+      unstashAll()
+    case aVal: Any if (orderedTypes().contains(aVal.getClass)) =>
+      logger.trace(s"Stashing message ${aVal} of type ${aVal.getClass}.")
+      stash()
+    case aVal: Any =>
+      logger.trace(s"Forwarding message ${aVal} of type ${aVal.getClass} " +
+        "to default receive.")
+      receive(aVal)
+  }
+
+  /**
+   * Suspends the default receive method for types defined by the
+   * {@link #orderedTypes() orderedTypes} function.
+   */
+  def startProcessing(): Unit = {
+    logger.debug("Actor is in processing state and will stash messages of " +
+      s"types: ${orderedTypes.mkString(" ")}")
+    context.become(waiting, discardOld = false)
+  }
+
+  /**
+   * Resumes the default receive method for all message types.
+   */
+  def finishedProcessing(): Unit = {
+    logger.debug("Actor is no longer in processing state.")
+    self ! FinishedProcessing
+  }
+
+  /**
+   * Executes a block of code, wrapping it in start/finished processing
+   * needed for ordered execution.
+   *
+   * @param block The block to execute
+   * @tparam T The return type of the block
+   * @return The result of executing the block
+   */
+  def withProcessing[T](block: => T): T = {
+    startProcessing()
+    val results = block
+    finishedProcessing()
+    results
+  }
+
+  /**
+   * Defines the types that will be stashed by {@link #waiting() waiting}
+   * while the Actor is in processing state.
+   * @return
+   */
+  def orderedTypes(): Seq[Class[_]]
+
+  case object FinishedProcessing
+}

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/communication/src/test/scala/com/ibm/spark/communication/utils/OrderedSupportSpec.scala
----------------------------------------------------------------------
diff --git a/communication/src/test/scala/com/ibm/spark/communication/utils/OrderedSupportSpec.scala b/communication/src/test/scala/com/ibm/spark/communication/utils/OrderedSupportSpec.scala
new file mode 100644
index 0000000..b10d4cb
--- /dev/null
+++ b/communication/src/test/scala/com/ibm/spark/communication/utils/OrderedSupportSpec.scala
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2014 IBM Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.ibm.spark.communication.utils
+
+import akka.actor._
+import akka.testkit.{ImplicitSender, TestKit}
+import org.scalatest.mock.MockitoSugar
+import org.scalatest.{FunSpecLike, Matchers}
+
+case class OrderedType()
+case class NotOrderedType()
+case class FinishProcessingMessage()
+case class ReceiveMessageCount(count: Int)
+
+class TestOrderedSupport extends OrderedSupport {
+  var receivedCounter = 0
+  override def orderedTypes(): Seq[Class[_]] = Seq(classOf[OrderedType])
+
+  override def receive: Receive = {
+    case OrderedType() =>
+      startProcessing()
+      receivedCounter = receivedCounter + 1
+      sender ! ReceiveMessageCount(receivedCounter)
+    case NotOrderedType() =>
+      receivedCounter = receivedCounter + 1
+      sender ! ReceiveMessageCount(receivedCounter)
+    case FinishProcessingMessage() =>
+      finishedProcessing()
+  }
+}
+
+class OrderedSupportSpec extends TestKit(ActorSystem("OrderedSupportSystem"))
+  with ImplicitSender with Matchers with FunSpecLike
+  with MockitoSugar  {
+
+  describe("OrderedSupport"){
+    describe("#waiting"){
+      it("should wait for types defined in orderedTypes"){
+      val testOrderedSupport = system.actorOf(Props[TestOrderedSupport])
+
+        // Send a message having a type in orderedTypes
+        // Starts processing and is handled with receive()
+        testOrderedSupport ! new OrderedType
+        // This message should be handled with waiting()
+        testOrderedSupport ! new OrderedType
+
+        // Verify receive was not called for the second OrderedType
+        expectMsg(ReceiveMessageCount(1))
+
+      }
+
+      it("should process types not defined in orderedTypes"){
+        val testOrderedSupport = system.actorOf(Props[TestOrderedSupport])
+
+        // Send a message that starts the processing
+        testOrderedSupport ! new OrderedType
+
+        // Send a message having a type not in orderedTypes
+        testOrderedSupport ! new NotOrderedType
+
+        // Verify receive did get called for NotOrderedType
+        expectMsg(ReceiveMessageCount(1))
+        expectMsg(ReceiveMessageCount(2))
+      }
+    }
+    describe("#finishedProcessing"){
+      it("should switch actor to receive method"){
+        val testOrderedSupport = system.actorOf(Props[TestOrderedSupport])
+        
+        //  Switch actor to waiting mode
+        testOrderedSupport ! new OrderedType
+
+        //  Call finishedProcessing
+        testOrderedSupport ! new FinishProcessingMessage
+
+        //  Sending something that would match in receive, and is in orderedTypes
+        testOrderedSupport ! new OrderedType
+
+        expectMsg(ReceiveMessageCount(1))
+        expectMsg(ReceiveMessageCount(2))
+
+      }
+
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/OrderedSupport.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/OrderedSupport.scala b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/OrderedSupport.scala
deleted file mode 100644
index 7a1360a..0000000
--- a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/OrderedSupport.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright 2014 IBM Corp.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.ibm.spark.kernel.protocol.v5
-
-import akka.actor.{Stash, Actor}
-import com.ibm.spark.utils.LogLike
-
-/**
- * A trait to enforce ordered processing for messages of particular types.
- */
-trait OrderedSupport extends Actor with Stash with LogLike {
-  /**
-   * Executes instead of the default receive when the Actor has begun
-   * processing. Stashes incoming messages of particular types, defined by
-   * {@link #orderedTypes() orderedTypes} function, for later processing. Uses
-   * the default receive method for all other types. Upon receiving a
-   * FinishedProcessing message, resumes processing all messages with the
-   * default receive.
-   * @return
-   */
-  def waiting : Receive = {
-    case FinishedProcessing =>
-      context.unbecome()
-      unstashAll()
-    case aVal: Any if (orderedTypes().contains(aVal.getClass)) =>
-      logger.trace(s"Stashing message ${aVal} of type ${aVal.getClass}.")
-      stash()
-    case aVal: Any =>
-      logger.trace(s"Forwarding message ${aVal} of type ${aVal.getClass} " +
-        "to default receive.")
-      receive(aVal)
-  }
-
-  /**
-   * Suspends the default receive method for types defined by the
-   * {@link #orderedTypes() orderedTypes} function.
-   */
-  def startProcessing(): Unit = {
-    logger.debug("Actor is in processing state and will stash messages of " +
-      s"types: ${orderedTypes.mkString(" ")}")
-    context.become(waiting, discardOld = false)
-  }
-
-  /**
-   * Resumes the default receive method for all message types.
-   */
-  def finishedProcessing(): Unit = {
-    logger.debug("Actor is no longer in processing state.")
-    self ! FinishedProcessing
-  }
-
-  /**
-   * Executes a block of code, wrapping it in start/finished processing
-   * needed for ordered execution.
-   *
-   * @param block The block to execute
-   * @tparam T The return type of the block
-   * @return The result of executing the block
-   */
-  def withProcessing[T](block: => T): T = {
-    startProcessing()
-    val results = block
-    finishedProcessing()
-    results
-  }
-
-  /**
-   * Defines the types that will be stashed by {@link #waiting() waiting}
-   * while the Actor is in processing state.
-   * @return
-   */
-  def orderedTypes(): Seq[Class[_]]
-
-  case object FinishedProcessing
-}

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/BaseHandler.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/BaseHandler.scala b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/BaseHandler.scala
index 45ff16f..abbf6d3 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/BaseHandler.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/BaseHandler.scala
@@ -16,6 +16,7 @@
 
 package com.ibm.spark.kernel.protocol.v5.handler
 
+import com.ibm.spark.communication.utils.OrderedSupport
 import com.ibm.spark.kernel.protocol.v5._
 import com.ibm.spark.kernel.protocol.v5.kernel.ActorLoader
 import com.ibm.spark.utils.MessageLogSupport

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/ExecuteRequestHandler.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/ExecuteRequestHandler.scala b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/ExecuteRequestHandler.scala
index c3fbcdb..c066d98 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/ExecuteRequestHandler.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/ExecuteRequestHandler.scala
@@ -74,7 +74,12 @@ class ExecuteRequestHandler(
         (executeRequest, km, outputStream)
       ).mapTo[(ExecuteReply, ExecuteResult)]
 
-      executeFuture andThen {
+      // Flush the output stream after code execution completes to ensure
+      // stream messages are sent prior to idle status messages.
+      executeFuture andThen { case result =>
+        outputStream.flush()
+        result
+      } andThen {
         case Success(tuple) =>
           val (executeReply, executeResult) = updateCount(tuple, executionCount)
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/GenericSocketMessageHandler.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/GenericSocketMessageHandler.scala b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/GenericSocketMessageHandler.scala
index b04c0e6..834f632 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/GenericSocketMessageHandler.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/GenericSocketMessageHandler.scala
@@ -17,6 +17,7 @@
 package com.ibm.spark.kernel.protocol.v5.handler
 
 import akka.actor.Actor
+import com.ibm.spark.communication.utils.OrderedSupport
 import com.ibm.spark.kernel.protocol.v5.KernelMessage
 import com.ibm.spark.kernel.protocol.v5.kernel.ActorLoader
 import com.ibm.spark.utils.{MessageLogSupport, LogLike}
@@ -39,11 +40,19 @@ import com.ibm.spark.utils.{MessageLogSupport, LogLike}
  * @param socketType The type of socket, mapping to an Actor for this class to pass messages along to
  */
 class GenericSocketMessageHandler(actorLoader: ActorLoader, socketType: Enumeration#Value)
-  extends Actor with LogLike {
+  extends Actor with LogLike with OrderedSupport {
   override def receive: Receive = {
-    case message: KernelMessage =>
+    case message: KernelMessage => withProcessing {
       logger.debug(s"Sending KernelMessage ${message.header.msg_id} of type " +
         s"${message.header.msg_type} to ${socketType} socket")
       actorLoader.load(socketType) ! message
+    }
   }
+
+  /**
+   * Defines the types that will be stashed by {@link #waiting() waiting}
+   * while the Actor is in processing state.
+   * @return
+   */
+  override def orderedTypes(): Seq[Class[_]] = Seq(classOf[KernelMessage])
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/InputRequestReplyHandler.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/InputRequestReplyHandler.scala b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/InputRequestReplyHandler.scala
index 0984e74..dba0eef 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/InputRequestReplyHandler.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/InputRequestReplyHandler.scala
@@ -18,7 +18,8 @@ package com.ibm.spark.kernel.protocol.v5.handler
 
 import akka.actor.ActorRef
 import com.ibm.spark.comm.{CommRegistrar, CommStorage}
-import com.ibm.spark.kernel.protocol.v5.{SystemActorType, OrderedSupport, KernelMessage}
+import com.ibm.spark.communication.utils.OrderedSupport
+import com.ibm.spark.kernel.protocol.v5.{SystemActorType, KernelMessage}
 import com.ibm.spark.kernel.protocol.v5.content.{InputReply, CommOpen}
 import com.ibm.spark.kernel.protocol.v5.kernel.{Utilities, ActorLoader}
 import com.ibm.spark.kernel.protocol.v5

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/kernel/socket/IOPub.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/kernel/socket/IOPub.scala b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/kernel/socket/IOPub.scala
index 2287596..d2ff8e9 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/kernel/socket/IOPub.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/kernel/socket/IOPub.scala
@@ -19,6 +19,7 @@ package com.ibm.spark.kernel.protocol.v5.kernel.socket
 import akka.actor.Actor
 import akka.util.ByteString
 import com.ibm.spark.communication.ZMQMessage
+import com.ibm.spark.communication.utils.OrderedSupport
 import com.ibm.spark.kernel.protocol.v5._
 import com.ibm.spark.kernel.protocol.v5.kernel.Utilities
 import Utilities._

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/relay/KernelMessageRelay.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/relay/KernelMessageRelay.scala b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/relay/KernelMessageRelay.scala
index 8caafc3..cc45479 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/relay/KernelMessageRelay.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/relay/KernelMessageRelay.scala
@@ -19,6 +19,7 @@ package com.ibm.spark.kernel.protocol.v5.relay
 import akka.pattern.ask
 import akka.util.Timeout
 import com.ibm.spark.communication.security.SecurityActorType
+import com.ibm.spark.communication.utils.OrderedSupport
 import com.ibm.spark.kernel.protocol.v5.MessageType.MessageType
 import com.ibm.spark.kernel.protocol.v5.content.ShutdownRequest
 import com.ibm.spark.kernel.protocol.v5.kernel.ActorLoader
@@ -171,7 +172,8 @@ case class KernelMessageRelay(
       }
   }
 
-  override def orderedTypes(): Seq[Class[_]] = {
-    Seq(classOf[(Seq[_], KernelMessage)])
-  }
+  override def orderedTypes(): Seq[Class[_]] = Seq(
+    classOf[(Seq[_], KernelMessage)],
+    classOf[KernelMessage]
+  )
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/OrderedSupportSpec.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/OrderedSupportSpec.scala b/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/OrderedSupportSpec.scala
deleted file mode 100644
index 7cbb153..0000000
--- a/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/OrderedSupportSpec.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright 2014 IBM Corp.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.ibm.spark.kernel.protocol.v5
-
-import akka.actor._
-import akka.testkit.{ImplicitSender, TestKit}
-import org.scalatest.mock.MockitoSugar
-import org.scalatest.{FunSpecLike, Matchers}
-
-case class OrderedType()
-case class NotOrderedType()
-case class FinishProcessingMessage()
-case class ReceiveMessageCount(count: Int)
-
-class TestOrderedSupport extends OrderedSupport {
-  var receivedCounter = 0
-  override def orderedTypes(): Seq[Class[_]] = Seq(classOf[OrderedType])
-
-  override def receive: Receive = {
-    case OrderedType() =>
-      startProcessing()
-      receivedCounter = receivedCounter + 1
-      sender ! ReceiveMessageCount(receivedCounter)
-    case NotOrderedType() =>
-      receivedCounter = receivedCounter + 1
-      sender ! ReceiveMessageCount(receivedCounter)
-    case FinishProcessingMessage() =>
-      finishedProcessing()
-  }
-}
-
-class OrderedSupportSpec extends TestKit(ActorSystem("OrderedSupportSystem"))
-  with ImplicitSender with Matchers with FunSpecLike
-  with MockitoSugar  {
-
-  describe("OrderedSupport"){
-    describe("#waiting"){
-      it("should wait for types defined in orderedTypes"){
-      val testOrderedSupport = system.actorOf(Props[TestOrderedSupport])
-
-        // Send a message having a type in orderedTypes
-        // Starts processing and is handled with receive()
-        testOrderedSupport ! new OrderedType
-        // This message should be handled with waiting()
-        testOrderedSupport ! new OrderedType
-
-        // Verify receive was not called for the second OrderedType
-        expectMsg(ReceiveMessageCount(1))
-
-      }
-
-      it("should process types not defined in orderedTypes"){
-        val testOrderedSupport = system.actorOf(Props[TestOrderedSupport])
-
-        // Send a message that starts the processing
-        testOrderedSupport ! new OrderedType
-
-        // Send a message having a type not in orderedTypes
-        testOrderedSupport ! new NotOrderedType
-
-        // Verify receive did get called for NotOrderedType
-        expectMsg(ReceiveMessageCount(1))
-        expectMsg(ReceiveMessageCount(2))
-      }
-    }
-    describe("#finishedProcessing"){
-      it("should switch actor to receive method"){
-        val testOrderedSupport = system.actorOf(Props[TestOrderedSupport])
-        
-        //  Switch actor to waiting mode
-        testOrderedSupport ! new OrderedType
-
-        //  Call finishedProcessing
-        testOrderedSupport ! new FinishProcessingMessage
-
-        //  Sending something that would match in receive, and is in orderedTypes
-        testOrderedSupport ! new OrderedType
-
-        expectMsg(ReceiveMessageCount(1))
-        expectMsg(ReceiveMessageCount(2))
-
-      }
-
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5b041721/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/ExecuteRequestHandlerSpec.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/ExecuteRequestHandlerSpec.scala b/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/ExecuteRequestHandlerSpec.scala
index d48926d..c22bc41 100644
--- a/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/ExecuteRequestHandlerSpec.scala
+++ b/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/ExecuteRequestHandlerSpec.scala
@@ -44,6 +44,7 @@ class ExecuteRequestHandlerSpec extends TestKit(
   private var mockActorLoader: ActorLoader = _
   private var mockFactoryMethods: FactoryMethods = _
   private var mockKernel: Kernel = _
+  private var mockOutputStream: OutputStream = _
   private var handlerActor: ActorRef = _
   private var kernelMessageRelayProbe: TestProbe = _
   private var executeRequestRelayProbe: TestProbe = _
@@ -53,9 +54,13 @@ class ExecuteRequestHandlerSpec extends TestKit(
     mockActorLoader = mock[ActorLoader]
     mockFactoryMethods = mock[FactoryMethods]
     mockKernel = mock[Kernel]
+    mockOutputStream = mock[OutputStream]
     doReturn(mockFactoryMethods).when(mockKernel)
       .factory(any[KernelMessage], any[KMBuilder])
 
+    doReturn(mockOutputStream).when(mockFactoryMethods)
+      .newKernelOutputStream(anyString(), anyBoolean())
+
     // Add our handler and mock interpreter to the actor system
     handlerActor = system.actorOf(Props(
       classOf[ExecuteRequestHandler],


[11/50] [abbrv] incubator-toree git commit: add protected methods to scala interpreter so tests can override

Posted by lr...@apache.org.
add protected methods to scala interpreter so tests can override


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/ce103836
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/ce103836
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/ce103836

Branch: refs/heads/master
Commit: ce103836314ecb71a7a596630e3191b6a670706a
Parents: 09ad06e
Author: Brian Burns <bb...@us.ibm.com>
Authored: Tue Nov 17 15:15:40 2015 -0500
Committer: Brian Burns <bb...@us.ibm.com>
Committed: Tue Nov 17 15:15:40 2015 -0500

----------------------------------------------------------------------
 .../InterpreterActorSpecForIntegration.scala    | 25 +++++++-------------
 .../PostProcessorSpecForIntegration.scala       | 24 +++++++------------
 .../interpreter/scala/ScalaInterpreter.scala    | 22 ++++++++++++-----
 .../AddExternalJarMagicSpecForIntegration.scala | 23 +++++++-----------
 4 files changed, 41 insertions(+), 53 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/ce103836/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala b/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
index 09aa6fd..81f6656 100644
--- a/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
+++ b/kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala
@@ -27,7 +27,7 @@ import com.ibm.spark.kernel.protocol.v5._
 import com.ibm.spark.kernel.protocol.v5.content._
 import com.ibm.spark.kernel.protocol.v5.interpreter.InterpreterActor
 import com.ibm.spark.kernel.protocol.v5.interpreter.tasks.InterpreterTaskFactory
-import com.ibm.spark.utils.MultiOutputStream
+import com.ibm.spark.utils.{TaskManager, MultiOutputStream}
 import com.typesafe.config.ConfigFactory
 import org.apache.spark.{SparkConf, SparkContext}
 import org.scalatest.mock.MockitoSugar
@@ -56,23 +56,18 @@ class InterpreterActorSpecForIntegration extends TestKit(
   private val output = new ByteArrayOutputStream()
   private val interpreter = new ScalaInterpreter {
     override protected val multiOutputStream = MultiOutputStream(List(mock[OutputStream], lastResultOut))
-    override def init(kernel: KernelLike): Interpreter = {
-      settings = newSettings(List[String]())
 
-      val urls = _thisClassloader match {
-        case cl: java.net.URLClassLoader => cl.getURLs.toList
-        case a => // TODO: Should we really be using sys.error here?
-          sys.error("[SparkInterpreter] Unexpected class loader: " + a.getClass)
-      }
-      val classpath = urls.map(_.toString)
-
-      settings.classpath.value =
-        classpath.distinct.mkString(java.io.File.pathSeparator)
-      settings.embeddedDefaults(_runtimeClassloader)
+    override protected def interpreterArgs(kernel: KernelLike): List[String] = {
+      Nil
+    }
 
-      this
+    override protected def maxInterpreterThreads(kernel: KernelLike): Int = {
+      TaskManager.DefaultMaximumWorkers
     }
+
+    override protected def bindKernelVarialble(kernel: KernelLike): Unit = { }
   }
+
   private val conf = new SparkConf()
     .setMaster("local[*]")
     .setAppName("Test Kernel")
@@ -82,8 +77,6 @@ class InterpreterActorSpecForIntegration extends TestKit(
   before {
     output.reset()
     interpreter.init(mock[KernelLike])
-    interpreter.start()
-
 
     interpreter.doQuietly({
       conf.set("spark.repl.class.uri", interpreter.classServerURI)

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/ce103836/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala b/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
index 2bdce57..5cdddb3 100644
--- a/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
+++ b/kernel/src/test/scala/integration/PostProcessorSpecForIntegration.scala
@@ -22,7 +22,7 @@ import com.ibm.spark.interpreter.Interpreter
 import com.ibm.spark.kernel.api.KernelLike
 import com.ibm.spark.kernel.interpreter.scala.{StandardSettingsProducer, StandardTaskManagerProducer, StandardSparkIMainProducer, ScalaInterpreter}
 import com.ibm.spark.kernel.protocol.v5.magic.PostProcessor
-import com.ibm.spark.utils.MultiOutputStream
+import com.ibm.spark.utils.{TaskManager, MultiOutputStream}
 import org.scalatest.mock.MockitoSugar
 import org.scalatest.{BeforeAndAfter, Matchers, FunSpec}
 
@@ -37,27 +37,19 @@ class PostProcessorSpecForIntegration extends FunSpec with Matchers
     //       for performance improvements
     scalaInterpreter = new ScalaInterpreter {
       override protected val multiOutputStream = MultiOutputStream(List(mock[OutputStream], lastResultOut))
-      override def init(kernel: KernelLike): Interpreter = {
-        settings = newSettings(List[String]())
 
-        val urls = _thisClassloader match {
-          case cl: java.net.URLClassLoader => cl.getURLs.toList
-          case a => // TODO: Should we really be using sys.error here?
-            sys.error("[SparkInterpreter] Unexpected class loader: " + a.getClass)
-        }
-        val classpath = urls.map(_.toString)
-
-        settings.classpath.value =
-          classpath.distinct.mkString(java.io.File.pathSeparator)
-        settings.embeddedDefaults(_runtimeClassloader)
+      override protected def interpreterArgs(kernel: KernelLike): List[String] = {
+        Nil
+      }
 
-        this
+      override protected def maxInterpreterThreads(kernel: KernelLike): Int = {
+        TaskManager.DefaultMaximumWorkers
       }
+
+      override protected def bindKernelVarialble(kernel: KernelLike): Unit = { }
     }
     scalaInterpreter.init(mock[KernelLike])
 
-    scalaInterpreter.start()
-
     postProcessor = new PostProcessor(scalaInterpreter)
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/ce103836/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
index 5d64b45..09930c6 100644
--- a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
+++ b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
@@ -190,8 +190,7 @@ class ScalaInterpreter() extends Interpreter {
   }
 
   override def init(kernel: KernelLike): Interpreter = {
-    import scala.collection.JavaConverters._
-    val args = kernel.config.getStringList("interpreter_args").asScala.toList
+    val args = interpreterArgs(kernel)
     this.settings = newSettings(args)
 
     val urls = _thisClassloader match {
@@ -205,21 +204,32 @@ class ScalaInterpreter() extends Interpreter {
       classpath.distinct.mkString(java.io.File.pathSeparator)
     this.settings.embeddedDefaults(_runtimeClassloader)
 
-    maxInterpreterThreads = kernel.config.getInt("max_interpreter_threads")
+    maxInterpreterThreads = maxInterpreterThreads(kernel)
 
     start()
+    bindKernelVarialble(kernel)
+
+    this
+  }
+
+  protected def interpreterArgs(kernel: KernelLike): List[String] = {
+    import scala.collection.JavaConverters._
+    kernel.config.getStringList("interpreter_args").asScala.toList
+  }
+
+  protected def maxInterpreterThreads(kernel: KernelLike): Int = {
+    kernel.config.getInt("max_interpreter_threads")
+  }
 
+  protected def bindKernelVarialble(kernel: KernelLike): Unit = {
     doQuietly {
       bind(
         "kernel", "com.ibm.spark.kernel.api.Kernel",
         kernel, List( """@transient implicit""")
       )
     }
-
-    this
   }
 
-
   override def interrupt(): Interpreter = {
     require(sparkIMain != null && taskManager != null)
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/ce103836/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala b/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
index 4032435..60cfcac 100644
--- a/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
+++ b/scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala
@@ -22,7 +22,7 @@ import com.ibm.spark.global.StreamState
 import com.ibm.spark.interpreter._
 import com.ibm.spark.kernel.api.KernelLike
 import com.ibm.spark.kernel.interpreter.scala.{ScalaInterpreter, StandardSettingsProducer, StandardSparkIMainProducer, StandardTaskManagerProducer}
-import com.ibm.spark.utils.MultiOutputStream
+import com.ibm.spark.utils.{TaskManager, MultiOutputStream}
 import org.scalatest.mock.MockitoSugar
 import org.scalatest.{BeforeAndAfter, FunSpec, Matchers}
 
@@ -36,25 +36,18 @@ class AddExternalJarMagicSpecForIntegration
   before {
     interpreter = new ScalaInterpreter {
       override protected val multiOutputStream = MultiOutputStream(List(mock[OutputStream], lastResultOut))
-      override def init(kernel: KernelLike): Interpreter = {
-        settings = newSettings(List[String]())
 
-        val urls = _thisClassloader match {
-          case cl: java.net.URLClassLoader => cl.getURLs.toList
-          case a => // TODO: Should we really be using sys.error here?
-            sys.error("[SparkInterpreter] Unexpected class loader: " + a.getClass)
-        }
-        val classpath = urls.map(_.toString)
-
-        settings.classpath.value =
-          classpath.distinct.mkString(java.io.File.pathSeparator)
-        settings.embeddedDefaults(_runtimeClassloader)
+      override protected def interpreterArgs(kernel: KernelLike): List[String] = {
+        Nil
+      }
 
-        this
+      override protected def maxInterpreterThreads(kernel: KernelLike): Int = {
+        TaskManager.DefaultMaximumWorkers
       }
+
+      override protected def bindKernelVarialble(kernel: KernelLike): Unit = { }
     }
     interpreter.init(mock[KernelLike])
-    interpreter.start()
 
     StreamState.setStreams(outputStream = outputResult)
   }


[13/50] [abbrv] incubator-toree git commit: Merge pull request #203 from ibm-et/CommParentInfo

Posted by lr...@apache.org.
Merge pull request #203 from ibm-et/CommParentInfo

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/e6e278dc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/e6e278dc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/e6e278dc

Branch: refs/heads/master
Commit: e6e278dcd61a2f7b655ed17aac148c4e0feb21af
Parents: 45e451a 7c45d15
Author: Gino Bustelo <gi...@bustelos.com>
Authored: Tue Nov 17 15:56:08 2015 -0600
Committer: Gino Bustelo <gi...@bustelos.com>
Committed: Tue Nov 17 15:56:08 2015 -0600

----------------------------------------------------------------------
 .../protocol/v5/handler/CommCloseHandler.scala  | 10 ++++---
 .../protocol/v5/handler/CommMsgHandler.scala    | 10 ++++---
 .../protocol/v5/handler/CommOpenHandler.scala   | 10 ++++---
 .../v5/handler/CommCloseHandlerSpec.scala       | 29 +++++++++++++++++++-
 .../v5/handler/CommMsgHandlerSpec.scala         | 27 ++++++++++++++++++
 .../v5/handler/CommOpenHandlerSpec.scala        | 29 ++++++++++++++++++++
 6 files changed, 102 insertions(+), 13 deletions(-)
----------------------------------------------------------------------



[46/50] [abbrv] incubator-toree git commit: Fixed sqlContext not appearing

Posted by lr...@apache.org.
Fixed sqlContext not appearing


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/73cc589d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/73cc589d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/73cc589d

Branch: refs/heads/master
Commit: 73cc589d412277a1fd1c79e70945bc1d2cccd33d
Parents: 3905e47
Author: Chip Senkbeil <pa...@us.ibm.com>
Authored: Tue Dec 8 11:47:18 2015 -0600
Committer: Chip Senkbeil <pa...@us.ibm.com>
Committed: Tue Dec 8 11:49:02 2015 -0600

----------------------------------------------------------------------
 .../com/ibm/spark/interpreter/Interpreter.scala | 15 ++++++-
 .../boot/layer/ComponentInitialization.scala    | 45 --------------------
 .../scala/com/ibm/spark/kernel/api/Kernel.scala | 45 ++++++++++++++++++--
 .../com/ibm/spark/kernel/api/KernelSpec.scala   |  1 -
 .../scala/test/utils/DummyInterpreter.scala     | 16 +++++++
 .../pyspark/PySparkInterpreter.scala            |  8 ++--
 .../interpreter/scala/ScalaInterpreter.scala    | 41 +++++++++++++-----
 .../interpreter/sparkr/SparkRInterpreter.scala  |  5 +++
 .../kernel/interpreter/sql/SqlInterpreter.scala |  4 ++
 9 files changed, 116 insertions(+), 64 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/73cc589d/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala
----------------------------------------------------------------------
diff --git a/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala b/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala
index 76d4432..6200b9b 100644
--- a/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala
+++ b/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala
@@ -20,6 +20,7 @@ import java.net.URL
 
 import com.ibm.spark.kernel.api.KernelLike
 import org.apache.spark.SparkContext
+import org.apache.spark.sql.SQLContext
 
 import scala.tools.nsc.interpreter._
 
@@ -79,7 +80,19 @@ trait Interpreter {
    */
   def doQuietly[T](body: => T): T
 
-  def bindSparkContext(sparkContext: SparkContext): Unit = ???
+  /**
+   * Binds the SparkContext instance to the interpreter's namespace.
+   *
+   * @param sparkContext The SparkContext to bind
+   */
+  def bindSparkContext(sparkContext: SparkContext): Unit
+
+  /**
+   * Binds the SQLContext instance to the interpreter's namespace.
+   *
+   * @param sqlContext The SQLContext to bind
+   */
+  def bindSqlContext(sqlContext: SQLContext): Unit
 
   /**
    * Binds a variable in the interpreter to a value.

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/73cc589d/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala b/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
index 973075b..939b896 100644
--- a/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
+++ b/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
@@ -128,51 +128,6 @@ trait StandardComponentInitialization extends ComponentInitialization {
     dependencyDownloader
   }
 
-  protected[layer] def initializeSqlContext(sparkContext: SparkContext) = {
-    val sqlContext: SQLContext = try {
-      logger.info("Attempting to create Hive Context")
-      val hiveContextClassString =
-        "org.apache.spark.sql.hive.HiveContext"
-
-      logger.debug(s"Looking up $hiveContextClassString")
-      val hiveContextClass = Class.forName(hiveContextClassString)
-
-      val sparkContextClass = classOf[SparkContext]
-      val sparkContextClassName = sparkContextClass.getName
-
-      logger.debug(s"Searching for constructor taking $sparkContextClassName")
-      val hiveContextContructor =
-        hiveContextClass.getConstructor(sparkContextClass)
-
-      logger.debug("Invoking Hive Context constructor")
-      hiveContextContructor.newInstance(sparkContext).asInstanceOf[SQLContext]
-    } catch {
-      case _: Throwable =>
-        logger.warn("Unable to create Hive Context! Defaulting to SQL Context!")
-        new SQLContext(sparkContext)
-    }
-
-    sqlContext
-  }
-
-  protected[layer] def updateInterpreterWithSqlContext(
-    sqlContext: SQLContext, interpreter: Interpreter
-  ): Unit = {
-    interpreter.doQuietly {
-      // TODO: This only adds the context to the main interpreter AND
-      //       is limited to the Scala interpreter interface
-      logger.debug("Adding SQL Context to main interpreter")
-      interpreter.bind(
-        "sqlContext",
-        classOf[SQLContext].getName,
-        sqlContext,
-        List( """@transient""")
-      )
-
-      sqlContext
-    }
-  }
-
   protected def initializeResponseMap(): collection.mutable.Map[String, ActorRef] =
     new ConcurrentHashMap[String, ActorRef]().asScala
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/73cc589d/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala b/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
index 3ff4f85..219804a 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
@@ -329,11 +329,13 @@ class Kernel (
     _sparkConf = createSparkConf(conf)
     _sparkContext = initializeSparkContext(sparkConf)
     _javaSparkContext = new JavaSparkContext(_sparkContext)
-    _sqlContext = new SQLContext(_sparkContext)
+    _sqlContext = initializeSqlContext(_sparkContext)
 
-    logger.info( s"Connecting to spark.master ${_sparkConf.getOption("spark.master").getOrElse("not_set")}")
+    val sparkMaster = _sparkConf.getOption("spark.master").getOrElse("not_set")
+    logger.info( s"Connecting to spark.master $sparkMaster")
 
-    updateInterpreterWithSparkContext(sparkContext)
+    updateInterpreterWithSparkContext(interpreter, sparkContext)
+    updateInterpreterWithSqlContext(interpreter, sqlContext)
 
     magicLoader.dependencyMap =
       magicLoader.dependencyMap.setSparkContext(_sparkContext)
@@ -394,12 +396,47 @@ class Kernel (
 
   // TODO: Think of a better way to test without exposing this
   protected[kernel] def updateInterpreterWithSparkContext(
-    sparkContext: SparkContext
+    interpreter: Interpreter, sparkContext: SparkContext
   ) = {
 
     interpreter.bindSparkContext(sparkContext)
   }
 
+  protected[kernel] def initializeSqlContext(
+    sparkContext: SparkContext
+  ): SQLContext = {
+    val sqlContext: SQLContext = try {
+      logger.info("Attempting to create Hive Context")
+      val hiveContextClassString =
+        "org.apache.spark.sql.hive.HiveContext"
+
+      logger.debug(s"Looking up $hiveContextClassString")
+      val hiveContextClass = Class.forName(hiveContextClassString)
+
+      val sparkContextClass = classOf[SparkContext]
+      val sparkContextClassName = sparkContextClass.getName
+
+      logger.debug(s"Searching for constructor taking $sparkContextClassName")
+      val hiveContextContructor =
+        hiveContextClass.getConstructor(sparkContextClass)
+
+      logger.debug("Invoking Hive Context constructor")
+      hiveContextContructor.newInstance(sparkContext).asInstanceOf[SQLContext]
+    } catch {
+      case _: Throwable =>
+        logger.warn("Unable to create Hive Context! Defaulting to SQL Context!")
+        new SQLContext(sparkContext)
+    }
+
+    sqlContext
+  }
+
+  protected[kernel] def updateInterpreterWithSqlContext(
+    interpreter: Interpreter, sqlContext: SQLContext
+  ): Unit = {
+    interpreter.bindSqlContext(sqlContext)
+  }
+
   override def interpreter(name: String): Option[Interpreter] = {
     interpreterManager.interpreters.get(name)
   }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/73cc589d/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala b/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
index 98157a9..58ea0c5 100644
--- a/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
+++ b/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
@@ -10,7 +10,6 @@ import com.ibm.spark.kernel.protocol.v5.kernel.ActorLoader
 import com.ibm.spark.magic.MagicLoader
 import com.typesafe.config.Config
 import org.apache.spark.{SparkConf, SparkContext}
-import org.mockito.ArgumentCaptor
 import org.mockito.Mockito._
 import org.mockito.Matchers._
 import org.scalatest.mock.MockitoSugar

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/73cc589d/kernel/src/test/scala/test/utils/DummyInterpreter.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/test/utils/DummyInterpreter.scala b/kernel/src/test/scala/test/utils/DummyInterpreter.scala
index ee7c096..aec9635 100644
--- a/kernel/src/test/scala/test/utils/DummyInterpreter.scala
+++ b/kernel/src/test/scala/test/utils/DummyInterpreter.scala
@@ -5,6 +5,8 @@ import java.net.URL
 import com.ibm.spark.interpreter.{ExecuteFailure, ExecuteOutput, Interpreter}
 import com.ibm.spark.interpreter.Results.Result
 import com.ibm.spark.kernel.api.KernelLike
+import org.apache.spark.SparkContext
+import org.apache.spark.sql.SQLContext
 
 import scala.tools.nsc.interpreter.{OutputStream, InputStream}
 
@@ -109,4 +111,18 @@ class DummyInterpreter(kernel: KernelLike) extends Interpreter {
    * @return The newly initialized interpreter
    */
   override def init(kernel: KernelLike): Interpreter = ???
+
+  /**
+   * Binds the SparkContext instance to the interpreter's namespace.
+   *
+   * @param sparkContext The SparkContext to bind
+   */
+  override def bindSparkContext(sparkContext: SparkContext): Unit = ???
+
+  /**
+   * Binds the SQLContext instance to the interpreter's namespace.
+   *
+   * @param sqlContext The SQLContext to bind
+   */
+  override def bindSqlContext(sqlContext: SQLContext): Unit = ???
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/73cc589d/pyspark-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/pyspark/PySparkInterpreter.scala
----------------------------------------------------------------------
diff --git a/pyspark-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/pyspark/PySparkInterpreter.scala b/pyspark-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/pyspark/PySparkInterpreter.scala
index 38e1d68..615ed19 100644
--- a/pyspark-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/pyspark/PySparkInterpreter.scala
+++ b/pyspark-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/pyspark/PySparkInterpreter.scala
@@ -21,6 +21,7 @@ import com.ibm.spark.interpreter.Results.Result
 import com.ibm.spark.interpreter._
 import com.ibm.spark.kernel.api.KernelLike
 import org.apache.spark.SparkContext
+import org.apache.spark.sql.SQLContext
 import org.slf4j.LoggerFactory
 import py4j.GatewayServer
 
@@ -78,10 +79,11 @@ class PySparkInterpreter(
     this
   }
 
+  // Unsupported (but can be invoked)
+  override def bindSparkContext(sparkContext: SparkContext): Unit = {}
 
-  override def bindSparkContext(sparkContext: SparkContext) = {
-
-  }
+  // Unsupported (but can be invoked)
+  override def bindSqlContext(sqlContext: SQLContext): Unit = {}
 
   /**
    * Executes the provided code with the option to silence output.

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/73cc589d/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
index 078054a..ca9bd7a 100644
--- a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
+++ b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
@@ -31,6 +31,7 @@ import com.ibm.spark.kernel.api.{KernelLike, KernelOptions}
 import com.ibm.spark.utils.{MultiOutputStream, TaskManager}
 import org.apache.spark.SparkContext
 import org.apache.spark.repl.{SparkCommandLine, SparkIMain, SparkJLineCompletion}
+import org.apache.spark.sql.SQLContext
 import org.slf4j.LoggerFactory
 
 import scala.annotation.tailrec
@@ -514,12 +515,16 @@ class ScalaInterpreter() extends Interpreter {
   }
 
   override def bindSparkContext(sparkContext: SparkContext) = {
+    val bindName = "sc"
 
     doQuietly {
-      logger.debug("Binding context into interpreter")
+      logger.debug(s"Binding SparkContext into interpreter as $bindName")
       bind(
-        "sc", "org.apache.spark.SparkContext",
-        sparkContext, List( """@transient"""))
+        bindName,
+        "org.apache.spark.SparkContext",
+        sparkContext,
+        List( """@transient""")
+      )
 
       // NOTE: This is needed because interpreter blows up after adding
       //       dependencies to SparkContext and Interpreter before the
@@ -530,16 +535,32 @@ class ScalaInterpreter() extends Interpreter {
       logger.debug("Initializing Spark cluster in interpreter")
 
       doQuietly {
-        interpret("""
-                                | val $toBeNulled = {
-                                | var $toBeNulled = sc.emptyRDD.collect()
-                                | $toBeNulled = null
-                                |  }
-                                |
-                                |""".stripMargin)
+        interpret(Seq(
+          "val $toBeNulled = {",
+          "  var $toBeNulled = sc.emptyRDD.collect()",
+          "  $toBeNulled = null",
+          "}"
+        ).mkString("\n").trim())
       }
     }
+  }
+
+  override def bindSqlContext(sqlContext: SQLContext): Unit = {
+    val bindName = "sqlContext"
+
+    doQuietly {
+      // TODO: This only adds the context to the main interpreter AND
+      //       is limited to the Scala interpreter interface
+      logger.debug(s"Binding SQLContext into interpreter as $bindName")
+      bind(
+        bindName,
+        classOf[SQLContext].getName,
+        sqlContext,
+        List( """@transient""")
+      )
 
+      sqlContext
+    }
   }
 
   override def bind(

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/73cc589d/sparkr-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sparkr/SparkRInterpreter.scala
----------------------------------------------------------------------
diff --git a/sparkr-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sparkr/SparkRInterpreter.scala b/sparkr-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sparkr/SparkRInterpreter.scala
index a950da0..45fe03c 100644
--- a/sparkr-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sparkr/SparkRInterpreter.scala
+++ b/sparkr-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sparkr/SparkRInterpreter.scala
@@ -21,6 +21,7 @@ import com.ibm.spark.interpreter.Results.Result
 import com.ibm.spark.interpreter._
 import com.ibm.spark.kernel.api.KernelLike
 import org.apache.spark.SparkContext
+import org.apache.spark.sql.SQLContext
 import org.slf4j.LoggerFactory
 
 import scala.concurrent.Await
@@ -133,8 +134,12 @@ class SparkRInterpreter(
   // Unsupported
   override def classServerURI: String = ""
 
+  // Unsupported (but can be invoked)
   override def bindSparkContext(sparkContext: SparkContext): Unit = {}
 
+  // Unsupported (but can be invoked)
+  override def bindSqlContext(sqlContext: SQLContext): Unit = {}
+
   // Unsupported
   override def interrupt(): Interpreter = ???
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/73cc589d/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala
----------------------------------------------------------------------
diff --git a/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala b/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala
index 837d917..889d4a6 100644
--- a/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala
+++ b/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala
@@ -102,8 +102,12 @@ class SqlInterpreter() extends Interpreter {
   // Unsupported
   override def classServerURI: String = ""
 
+  // Unsupported (but can be invoked)
   override def bindSparkContext(sparkContext: SparkContext): Unit = {}
 
+  // Unsupported (but can be invoked)
+  override def bindSqlContext(sqlContext: SQLContext): Unit = {}
+
   // Unsupported
   override def interrupt(): Interpreter = ???
 


[38/50] [abbrv] incubator-toree git commit: Merge pull request #198 from ibm-et/FixRunAllPrintln

Posted by lr...@apache.org.
Merge pull request #198 from ibm-et/FixRunAllPrintln

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/a9de6aab
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/a9de6aab
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/a9de6aab

Branch: refs/heads/master
Commit: a9de6aab3975c1cd82360c9125ac43413bb51ec8
Parents: f207e2f 5b04172
Author: Gino Bustelo <gi...@bustelos.com>
Authored: Fri Dec 4 06:49:22 2015 -0600
Committer: Gino Bustelo <gi...@bustelos.com>
Committed: Fri Dec 4 06:49:22 2015 -0600

----------------------------------------------------------------------
 .../communication/actors/PubSocketActor.scala   |  19 +++-
 .../security/SignatureCheckerActor.scala        |  13 ++-
 .../security/SignatureManagerActor.scala        |  34 ++++++-
 .../security/SignatureProducerActor.scala       |  13 ++-
 .../communication/utils/OrderedSupport.scala    |  89 ++++++++++++++++
 .../utils/OrderedSupportSpec.scala              | 101 +++++++++++++++++++
 .../kernel/protocol/v5/OrderedSupport.scala     |  89 ----------------
 .../protocol/v5/handler/BaseHandler.scala       |   1 +
 .../v5/handler/ExecuteRequestHandler.scala      |   7 +-
 .../handler/GenericSocketMessageHandler.scala   |  13 ++-
 .../v5/handler/InputRequestReplyHandler.scala   |   3 +-
 .../protocol/v5/kernel/socket/IOPub.scala       |   1 +
 .../protocol/v5/relay/KernelMessageRelay.scala  |   8 +-
 .../kernel/protocol/v5/OrderedSupportSpec.scala | 101 -------------------
 .../v5/handler/ExecuteRequestHandlerSpec.scala  |   5 +
 15 files changed, 290 insertions(+), 207 deletions(-)
----------------------------------------------------------------------



[09/50] [abbrv] incubator-toree git commit: Attach parent header to comm messages that are generated with a comm writer.

Posted by lr...@apache.org.
Attach parent header to comm messages that are generated with a comm writer.


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/d1f93bb0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/d1f93bb0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/d1f93bb0

Branch: refs/heads/master
Commit: d1f93bb06f0fefafa0d1876e91680f822ddc12d6
Parents: 9db161f
Author: wellecks <we...@gmail.com>
Authored: Mon Nov 16 15:04:34 2015 -0600
Committer: wellecks <we...@gmail.com>
Committed: Mon Nov 16 15:04:34 2015 -0600

----------------------------------------------------------------------
 .../protocol/v5/handler/CommMsgHandler.scala    | 10 +++++---
 .../v5/handler/CommMsgHandlerSpec.scala         | 27 ++++++++++++++++++++
 2 files changed, 33 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/d1f93bb0/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommMsgHandler.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommMsgHandler.scala b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommMsgHandler.scala
index a0f5adc..03baef9 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommMsgHandler.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommMsgHandler.scala
@@ -42,22 +42,24 @@ class CommMsgHandler(
 {
   override def process(kernelMessage: KernelMessage): Future[_] = future {
     logKernelMessageAction("Initiating Comm Msg for", kernelMessage)
+
+    val kmBuilder = KMBuilder().withParent(kernelMessage)
+
     Utilities.parseAndHandle(
       kernelMessage.contentString,
       CommMsg.commMsgReads,
-      handler = handleCommMsg,
+      handler = handleCommMsg(kmBuilder),
       errHandler = handleParseError
     )
   }
 
-  private def handleCommMsg(commMsg: CommMsg) = {
+  private def handleCommMsg(kmBuilder: KMBuilder)(commMsg: CommMsg) = {
     val commId = commMsg.comm_id
     val data = commMsg.data
 
     logger.debug(s"Received comm_msg with id '$commId'")
 
-    // TODO: Should we be reusing something from the KernelMessage?
-    val commWriter = new KernelCommWriter(actorLoader, KMBuilder(), commId)
+    val commWriter = new KernelCommWriter(actorLoader, kmBuilder, commId)
 
     commStorage.getCommIdCallbacks(commId) match {
       case None             =>

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/d1f93bb0/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommMsgHandlerSpec.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommMsgHandlerSpec.scala b/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommMsgHandlerSpec.scala
index 363ab68..582a08e 100644
--- a/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommMsgHandlerSpec.scala
+++ b/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommMsgHandlerSpec.scala
@@ -121,6 +121,33 @@ class CommMsgHandlerSpec extends TestKit(
         //       limit? Is there a different logical approach?
         kernelMessageRelayProbe.expectNoMsg(200.milliseconds)
       }
+
+      it("should include the parent's header in the parent header of " +
+         "outgoing messages"){
+
+        // Register a callback that sends a message using the comm writer
+        val msgCallback: CommCallbacks.MsgCallback =
+          new CommCallbacks.MsgCallback() {
+            def apply(v1: CommWriter, v2: v5.UUID, v3: v5.MsgData): Unit =
+              v1.writeMsg(MsgData.Empty)
+          }
+        val callbacks = (new CommCallbacks).addMsgCallback(msgCallback)
+        doReturn(Some(callbacks)).when(spyCommStorage)
+          .getCommIdCallbacks(TestCommId)
+
+        // Send a comm_msg message with the test id
+        val msg = kmBuilder
+          .withHeader(CommMsg.toTypeString)
+          .withContentString(CommMsg(TestCommId, v5.MsgData.Empty))
+          .build
+        commMsgHandler ! msg
+
+        // Verify that the message sent by the handler has the desired property
+        kernelMessageRelayProbe.fishForMessage(200.milliseconds) {
+          case KernelMessage(_, _, _, parentHeader, _, _) =>
+            parentHeader == msg.header
+        }
+      }
     }
   }
 }


[39/50] [abbrv] incubator-toree git commit: Fixed work directory for `make dev` in Vagrant mode

Posted by lr...@apache.org.
Fixed work directory for `make dev` in Vagrant mode


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/2e20d17b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/2e20d17b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/2e20d17b

Branch: refs/heads/master
Commit: 2e20d17b839a2500c8e522a6ff8f5345636e99c7
Parents: a9de6aa
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Fri Dec 4 09:49:35 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Fri Dec 4 09:49:35 2015 -0600

----------------------------------------------------------------------
 Makefile | 14 +++++++++++---
 1 file changed, 11 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/2e20d17b/Makefile
----------------------------------------------------------------------
diff --git a/Makefile b/Makefile
index e26a936..213ee7d 100644
--- a/Makefile
+++ b/Makefile
@@ -14,14 +14,12 @@
 # limitations under the License.
 #
 
-.PHONY: clean clean-dist build dev test test-travis
+.PHONY: help clean clean-dist build dev test test-travis
 
 VERSION?=0.1.5
 IS_SNAPSHOT?=true
 APACHE_SPARK_VERSION?=1.5.1
 
-VM_WORKDIR=/src/spark-kernel
-
 USE_VAGRANT?=
 RUN_PREFIX=$(if $(USE_VAGRANT),vagrant ssh -c "cd $(VM_WORKDIR) && )
 RUN_SUFFIX=$(if $(USE_VAGRANT),")
@@ -33,12 +31,21 @@ ENV_OPTS=APACHE_SPARK_VERSION=$(APACHE_SPARK_VERSION) VERSION=$(VERSION) IS_SNAP
 FULL_VERSION=$(shell echo $(VERSION)`[ "$(IS_SNAPSHOT)" == "true" ] && (echo '-SNAPSHOT')` )
 ASSEMBLY_JAR=$(shell echo kernel-assembly-$(FULL_VERSION).jar )
 
+help:
+	@echo '      clean - clean build files'
+	@echo '        dev - starts ipython'
+	@echo '       dist - build a packaged distribution'
+	@echo '      build - builds assembly'
+	@echo '       test - run all units'
+
 clean-dist:
 	-rm -r dist
 
+clean: VM_WORKDIR=/src/spark-kernel
 clean: clean-dist
 	$(call RUN,$(ENV_OPTS) sbt clean)
 
+kernel/target/scala-2.10/$(ASSEMBLY_JAR): VM_WORKDIR=/src/spark-kernel
 kernel/target/scala-2.10/$(ASSEMBLY_JAR): ${shell find ./*/src/main/**/*}
 kernel/target/scala-2.10/$(ASSEMBLY_JAR): ${shell find ./*/build.sbt}
 kernel/target/scala-2.10/$(ASSEMBLY_JAR): project/build.properties project/Build.scala project/Common.scala project/plugins.sbt
@@ -50,6 +57,7 @@ dev: VM_WORKDIR=~
 dev: dist
 	$(call RUN,ipython notebook --ip=* --no-browser)
 
+test: VM_WORKDIR=/src/spark-kernel
 test:
 	$(call RUN,$(ENV_OPTS) sbt compile test)
 


[50/50] [abbrv] incubator-toree git commit: Updated to point out that master supports 1.5.1+

Posted by lr...@apache.org.
Updated to point out that master supports 1.5.1+

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/0a5a7f6b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/0a5a7f6b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/0a5a7f6b

Branch: refs/heads/master
Commit: 0a5a7f6bac1afcb0a410887f17112b0d98bbcd68
Parents: 042debc
Author: Gino Bustelo <gi...@bustelos.com>
Authored: Thu Jan 7 16:31:30 2016 -0600
Committer: Gino Bustelo <gi...@bustelos.com>
Committed: Thu Jan 7 16:31:30 2016 -0600

----------------------------------------------------------------------
 README.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/0a5a7f6b/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index 8eab2e2..38f0a22 100644
--- a/README.md
+++ b/README.md
@@ -54,7 +54,7 @@ Our goal is to keep `master` up to date with the latest version of Spark. When n
 
 Branch                       | Spark Kernel Version | Apache Spark Version
 ---------------------------- | -------------------- | --------------------
-[master][master]             | 0.1.5                | 1.5.1
+[master][master]             | 0.1.5                | 1.5.1+
 [branch-0.1.4][branch-0.1.4] | 0.1.4                | 1.4.1
 [branch-0.1.3][branch-0.1.3] | 0.1.3                | 1.3.1
 


[47/50] [abbrv] incubator-toree git commit: Merge pull request #229 from ibm-et/FixSqlContext

Posted by lr...@apache.org.
Merge pull request #229 from ibm-et/FixSqlContext

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/5a163a9c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/5a163a9c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/5a163a9c

Branch: refs/heads/master
Commit: 5a163a9cd55a7ffc5febbae251c950b32dbdabb2
Parents: 3905e47 73cc589
Author: Gino Bustelo <gi...@bustelos.com>
Authored: Wed Dec 9 11:18:59 2015 -0600
Committer: Gino Bustelo <gi...@bustelos.com>
Committed: Wed Dec 9 11:18:59 2015 -0600

----------------------------------------------------------------------
 .../com/ibm/spark/interpreter/Interpreter.scala | 15 ++++++-
 .../boot/layer/ComponentInitialization.scala    | 45 --------------------
 .../scala/com/ibm/spark/kernel/api/Kernel.scala | 45 ++++++++++++++++++--
 .../com/ibm/spark/kernel/api/KernelSpec.scala   |  1 -
 .../scala/test/utils/DummyInterpreter.scala     | 16 +++++++
 .../pyspark/PySparkInterpreter.scala            |  8 ++--
 .../interpreter/scala/ScalaInterpreter.scala    | 41 +++++++++++++-----
 .../interpreter/sparkr/SparkRInterpreter.scala  |  5 +++
 .../kernel/interpreter/sql/SqlInterpreter.scala |  4 ++
 9 files changed, 116 insertions(+), 64 deletions(-)
----------------------------------------------------------------------



[07/50] [abbrv] incubator-toree git commit: remove old create_context arg

Posted by lr...@apache.org.
remove old create_context arg


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/25b343bc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/25b343bc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/25b343bc

Branch: refs/heads/master
Commit: 25b343bc67b9f594892bae53a2ba29a97473605f
Parents: 0058eb1
Author: Brian Burns <bb...@us.ibm.com>
Authored: Thu Nov 12 14:17:05 2015 -0500
Committer: Brian Burns <bb...@us.ibm.com>
Committed: Thu Nov 12 14:17:05 2015 -0500

----------------------------------------------------------------------
 resources/compile/reference.conf | 1 -
 resources/test/reference.conf    | 1 -
 2 files changed, 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/25b343bc/resources/compile/reference.conf
----------------------------------------------------------------------
diff --git a/resources/compile/reference.conf b/resources/compile/reference.conf
index ddfae16..2aa6d63 100644
--- a/resources/compile/reference.conf
+++ b/resources/compile/reference.conf
@@ -35,7 +35,6 @@ ip = ${?IP}
 transport = "tcp"
 signature_scheme = "hmac-sha256"
 key = ""
-create_context = true
 spark.master = "local[*]"
 
 ivy_local = "/tmp/.ivy2"

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/25b343bc/resources/test/reference.conf
----------------------------------------------------------------------
diff --git a/resources/test/reference.conf b/resources/test/reference.conf
index f084489..7f3fcbb 100644
--- a/resources/test/reference.conf
+++ b/resources/test/reference.conf
@@ -35,7 +35,6 @@ ip = ${?IP}
 transport = "tcp"
 signature_scheme = "hmac-sha256"
 key = ""
-create_context = true
 spark.master = "local[*]"
 
 ivy_local = "/tmp/.ivy2"


[25/50] [abbrv] incubator-toree git commit: Fixed missing comma in Vagrantfile kernel.json

Posted by lr...@apache.org.
Fixed missing comma in Vagrantfile kernel.json


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/e002a1bc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/e002a1bc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/e002a1bc

Branch: refs/heads/master
Commit: e002a1bc62f9f8acfdcd9504c5213c14268e30b8
Parents: f8b37cc
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Thu Nov 19 13:28:14 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Tue Nov 24 08:49:50 2015 -0600

----------------------------------------------------------------------
 Vagrantfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/e002a1bc/Vagrantfile
----------------------------------------------------------------------
diff --git a/Vagrantfile b/Vagrantfile
index 47f27b4..09132da 100644
--- a/Vagrantfile
+++ b/Vagrantfile
@@ -129,7 +129,7 @@ cat << EOF > /home/vagrant/.ipython/kernels/spark/kernel.json
     ],
     "codemirror_mode": "scala",
     "env": {
-        "SPARK_OPTS": "--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=trace"
+        "SPARK_OPTS": "--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=trace",
         "MAX_INTERPRETER_THREADS": "16",
         "SPARK_CONFIGURATION": "spark.cores.max=4",
         "CAPTURE_STANDARD_OUT": "true",


[44/50] [abbrv] incubator-toree git commit: Removed unnecessary test

Posted by lr...@apache.org.
Removed unnecessary test


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/33836223
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/33836223
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/33836223

Branch: refs/heads/master
Commit: 338362232839e9c1a49586e55ec22fb04ee7b555
Parents: b7f4ed1
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Tue Dec 8 11:04:39 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Tue Dec 8 11:04:39 2015 -0600

----------------------------------------------------------------------
 .../com/ibm/spark/kernel/api/KernelSpec.scala   | 24 --------------------
 1 file changed, 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/33836223/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala b/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
index a5756a9..98157a9 100644
--- a/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
+++ b/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
@@ -174,30 +174,6 @@ class KernelSpec extends FunSpec with Matchers with MockitoSugar
 
         sparkConf.get("spark.master") should be (expected)
       }
-
-      it("should not add ourselves as a jar if spark.master is not local") {
-        val sparkConf = new SparkConf().setMaster("local[*]")
-        doReturn("local[*]").when(mockConfig).getString("spark.master")
-        doReturn(sparkConf).when(mockSparkContext).getConf
-
-        kernel.updateInterpreterWithSparkContext(mockSparkContext)
-        verify(mockSparkContext, never()).addJar(anyString())
-      }
-
-      it("should add ourselves as a jar if spark.master is not local") {
-        val sparkConf = new SparkConf().setMaster("foo://bar")
-        doReturn("notlocal").when(mockConfig).getString("spark.master")
-        doReturn(sparkConf).when(mockSparkContext).getConf
-
-        // TODO: This is going to be outdated when we determine a way to
-        //       re-include all jars
-        val expected =
-          com.ibm.spark.SparkKernel.getClass.getProtectionDomain
-            .getCodeSource.getLocation.getPath
-
-        kernel.updateInterpreterWithSparkContext(mockSparkContext)
-        verify(mockSparkContext).addJar(expected)
-      }
     }
   }
 }


[30/50] [abbrv] incubator-toree git commit: Mods to Makefile to run sbt locally

Posted by lr...@apache.org.
Mods to Makefile to run sbt locally


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/e312d17c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/e312d17c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/e312d17c

Branch: refs/heads/master
Commit: e312d17c5775c53633e81cb741b5bed99bea74db
Parents: ecfa5dd
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Wed Dec 2 17:07:56 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Wed Dec 2 17:07:56 2015 -0600

----------------------------------------------------------------------
 Makefile  | 28 ++++++++++++++++++----------
 README.md |  8 ++++++--
 2 files changed, 24 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/e312d17c/Makefile
----------------------------------------------------------------------
diff --git a/Makefile b/Makefile
index b0cafd1..e26a936 100644
--- a/Makefile
+++ b/Makefile
@@ -14,40 +14,48 @@
 # limitations under the License.
 #
 
-.PHONY: clean build init dev test test-travis
+.PHONY: clean clean-dist build dev test test-travis
 
 VERSION?=0.1.5
 IS_SNAPSHOT?=true
 APACHE_SPARK_VERSION?=1.5.1
 
+VM_WORKDIR=/src/spark-kernel
+
+USE_VAGRANT?=
+RUN_PREFIX=$(if $(USE_VAGRANT),vagrant ssh -c "cd $(VM_WORKDIR) && )
+RUN_SUFFIX=$(if $(USE_VAGRANT),")
+
+RUN=$(RUN_PREFIX)$(1)$(RUN_SUFFIX)
+
 ENV_OPTS=APACHE_SPARK_VERSION=$(APACHE_SPARK_VERSION) VERSION=$(VERSION) IS_SNAPSHOT=$(IS_SNAPSHOT)
 
 FULL_VERSION=$(shell echo $(VERSION)`[ "$(IS_SNAPSHOT)" == "true" ] && (echo '-SNAPSHOT')` )
 ASSEMBLY_JAR=$(shell echo kernel-assembly-$(FULL_VERSION).jar )
 
-clean:
-	vagrant ssh -c "cd /src/spark-kernel/ && sbt clean"
-	@-rm -r dist
+clean-dist:
+	-rm -r dist
 
-init:
-	vagrant up
+clean: clean-dist
+	$(call RUN,$(ENV_OPTS) sbt clean)
 
 kernel/target/scala-2.10/$(ASSEMBLY_JAR): ${shell find ./*/src/main/**/*}
 kernel/target/scala-2.10/$(ASSEMBLY_JAR): ${shell find ./*/build.sbt}
 kernel/target/scala-2.10/$(ASSEMBLY_JAR): project/build.properties project/Build.scala project/Common.scala project/plugins.sbt
-	vagrant ssh -c "cd /src/spark-kernel/ && $(ENV_OPTS) sbt kernel/assembly"
+	$(call RUN,$(ENV_OPTS) sbt kernel/assembly)
 
 build: kernel/target/scala-2.10/$(ASSEMBLY_JAR)
 
+dev: VM_WORKDIR=~
 dev: dist
-	vagrant ssh -c "cd ~ && ipython notebook --ip=* --no-browser"
+	$(call RUN,ipython notebook --ip=* --no-browser)
 
 test:
-	vagrant ssh -c "cd /src/spark-kernel/ && $(ENV_OPTS) sbt compile test"
+	$(call RUN,$(ENV_OPTS) sbt compile test)
 
 dist: COMMIT=$(shell git rev-parse --short=12 --verify HEAD)
 dist: VERSION_FILE=dist/spark-kernel/VERSION
-dist: kernel/target/scala-2.10/$(ASSEMBLY_JAR)
+dist: kernel/target/scala-2.10/$(ASSEMBLY_JAR) ${shell find ./etc/bin/*}
 	@mkdir -p dist/spark-kernel/bin dist/spark-kernel/lib
 	@cp -r etc/bin/* dist/spark-kernel/bin/.
 	@cp kernel/target/scala-2.10/$(ASSEMBLY_JAR) dist/spark-kernel/lib/.

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/e312d17c/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index 2115304..8eab2e2 100644
--- a/README.md
+++ b/README.md
@@ -23,17 +23,21 @@ A version of the Spark Kernel is deployed as part of the [Try Jupyter!][try-jupy
 
 Develop
 =======
-[Vagrant][vagrant] is used to simplify the development experience. It is the only requirement to be able to build, package and test the Spark Kernel on your development machine. 
+This project uses `make` as the entry point for build, test, and packaging. It supports 2 modes, local and vagrant. The default is local and all command (i.e. sbt) will be ran locally on your machine. This means that you need to
+install `sbt`, `jupyter/ipython`, and other develoment requirements locally on your machine. The 2nd mode uses [Vagrant][vagrant] to simplify the development experience. In vagrant mode, all commands are sent to the vagrant box 
+that has all necessary dependencies pre-installed. To run in vagrant mode, run `export USE_VAGRANT=true`.  
 
 To build and interact with the Spark Kernel using Jupyter, run
 ```
 make dev
 ```
 
-This will start a Jupyter notebook server accessible at `http://192.168.44.44:8888`. From here you can create notebooks that use the Spark Kernel configured for local mode.
+This will start a Jupyter notebook server. Depending on your mode, it will be accessible at `http://localhost:8888` or `http://192.168.44.44:8888`. From here you can create notebooks that use the Spark Kernel configured for local mode.
 
 Tests can be run by doing `make test`.
 
+>> NOTE: Do not use `sbt` directly.
+
 Build & Package
 ===============
 To build and package up the Spark Kernel, run


[02/50] [abbrv] incubator-toree git commit: Begin Interpreter Plugin

Posted by lr...@apache.org.
Begin Interpreter Plugin


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/708180ad
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/708180ad
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/708180ad

Branch: refs/heads/master
Commit: 708180ad0d7dc10eda8ce3d4ab199ea8a8f67952
Parents: 9db161f
Author: Brian Burns <bb...@us.ibm.com>
Authored: Wed Nov 4 13:44:22 2015 -0500
Committer: Brian Burns <bb...@us.ibm.com>
Committed: Wed Nov 4 13:44:22 2015 -0500

----------------------------------------------------------------------
 .../com/ibm/spark/boot/CommandLineOptions.scala |  11 +-
 .../boot/layer/ComponentInitialization.scala    | 187 ++++---------------
 .../StandardComponentInitializationSpec.scala   |  76 +++-----
 .../scala/test/utils/DummyInterpreter.scala     | 105 +++++++++++
 4 files changed, 173 insertions(+), 206 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/708180ad/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala b/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
index 069cab7..27c5c68 100644
--- a/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
+++ b/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
@@ -96,6 +96,9 @@ class CommandLineOptions(args: Seq[String]) {
   private val _nosparkcontext =
     parser.accepts("nosparkcontext", "kernel should not create a spark context")
 
+  private val _plugins = parser.accepts(
+    "interpreter-plugin"
+  ).withRequiredArg().ofType(classOf[String])
 
   private val options = parser.parse(args.map(_.trim): _*)
 
@@ -152,7 +155,8 @@ class CommandLineOptions(args: Seq[String]) {
       "max_interpreter_threads" -> get(_max_interpreter_threads),
       "jar_dir" -> get(_jar_dir),
       "default_interpreter" -> get(_default_interpreter),
-      "nosparkcontext" -> (if (has(_nosparkcontext)) Some(true) else Some(false))
+      "nosparkcontext" -> (if (has(_nosparkcontext)) Some(true) else Some(false)),
+      "interpreter_plugins" -> interpreterPlugins
     ).flatMap(removeEmptyOptions).asInstanceOf[Map[String, AnyRef]].asJava)
 
     commandLineConfig.withFallback(profileConfig).withFallback(ConfigFactory.load)
@@ -173,6 +177,11 @@ class CommandLineOptions(args: Seq[String]) {
     }
   }
 
+  private def interpreterPlugins: Option[java.util.List[String]] = {
+    val p = getAll(_plugins)
+    p.map(_.asJava)
+  }
+
   /**
    * Prints the help message to the output stream provided.
    * @param out The output stream to direct the help message

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/708180ad/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala b/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
index 8ce6360..55b133d 100644
--- a/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
+++ b/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
@@ -16,6 +16,7 @@
 
 package com.ibm.spark.boot.layer
 
+import java.util
 import java.util.concurrent.ConcurrentHashMap
 
 import akka.actor.ActorRef
@@ -23,7 +24,7 @@ import com.ibm.spark.comm.{CommManager, KernelCommManager, CommRegistrar, CommSt
 import com.ibm.spark.dependencies.{DependencyDownloader, IvyDependencyDownloader}
 import com.ibm.spark.global
 import com.ibm.spark.interpreter._
-import com.ibm.spark.kernel.api.Kernel
+import com.ibm.spark.kernel.api.{KernelLike, Kernel}
 import com.ibm.spark.kernel.interpreter.pyspark.PySparkInterpreter
 import com.ibm.spark.kernel.interpreter.sparkr.SparkRInterpreter
 import com.ibm.spark.kernel.interpreter.scala.{TaskManagerProducerLike, StandardSparkIMainProducer, StandardSettingsProducer, ScalaInterpreter}
@@ -82,13 +83,6 @@ trait StandardComponentInitialization extends ComponentInitialization {
       initializeCommObjects(actorLoader)
     val interpreter = initializeInterpreter(config)
 
-    //val sparkContext = null
-    //val sparkContext = initializeSparkContext(
-    //  config, appName, actorLoader, interpreter)
-    //val sqlContext = null
-    //val sqlContext = initializeSqlContext(sparkContext)
-    //updateInterpreterWithSqlContext(sqlContext, interpreter)
-
     val dependencyDownloader = initializeDependencyDownloader(config)
     val magicLoader = initializeMagicLoader(
       config, interpreter, dependencyDownloader)
@@ -113,6 +107,11 @@ trait StandardComponentInitialization extends ComponentInitialization {
     //sqlInterpreter.start()
     kernel.data.put("SQL", sqlInterpreter)
 
+
+    val plugins = initializeInterpreterPlugins(kernel, config)
+
+    kernel.data.putAll(plugins.asJava)
+
     // Add Scala to available data map
     kernel.data.put("Scala", interpreter)
     val defaultInterpreter: Interpreter =
@@ -129,6 +128,8 @@ trait StandardComponentInitialization extends ComponentInitialization {
         case "sql" =>
           logger.info("Using SQL interpreter as default!")
           sqlInterpreter
+        case p if(kernel.data.containsKey(p)) =>
+          kernel.data.get(p).asInstanceOf[Interpreter]
         case unknown =>
           logger.warn(s"Unknown interpreter '$unknown'! Defaulting to Scala!")
           interpreter
@@ -141,6 +142,32 @@ trait StandardComponentInitialization extends ComponentInitialization {
 
   }
 
+  def initializeInterpreterPlugins(
+    kernel: KernelLike,
+    config: Config
+  ): Map[String, Interpreter] = {
+    val p = config
+      .getStringList("interpreter_plugins")
+      .listIterator().asScala
+
+    p.foldLeft(Map[String, Interpreter]())( (acc, v) => {
+      v.split(":") match {
+        case Array(name, className) =>
+          try {
+            acc + (name -> Class
+              .forName(className)
+              .getConstructor(classOf[KernelLike])
+              .newInstance(kernel)
+              .asInstanceOf[Interpreter])
+          }
+          catch {
+            case _:Throwable => acc
+          }
+        case _ => acc
+      }
+    })
+  }
+
   def initializeSparkContext(config:Config, kernel:Kernel, appName:String) = {
     if(!config.getBoolean("nosparkcontext")) {
       kernel.createSparkContext(config.getString("spark.master"), appName)
@@ -190,150 +217,6 @@ trait StandardComponentInitialization extends ComponentInitialization {
     interpreter
   }
 
-  // TODO: Think of a better way to test without exposing this
-  /*
-  protected[layer] def initializeSparkContext(
-    config: Config, appName: String, actorLoader: ActorLoader,
-    interpreter: Interpreter
-  ) = {
-    logger.debug("Creating Spark Configuration")
-    val conf = new SparkConf()
-
-    val master = config.getString("spark.master")
-    logger.info("Using " + master + " as Spark Master")
-    conf.setMaster(master)
-
-    logger.info("Setting deployMode to client")
-    conf.set("spark.submit.deployMode", "client")
-
-    logger.info("Using " + appName + " as Spark application name")
-    conf.setAppName(appName)
-
-    KeyValuePairUtils.stringToKeyValuePairSeq(
-      config.getString("spark_configuration")
-    ).foreach { keyValuePair =>
-      logger.info(s"Setting ${keyValuePair.key} to ${keyValuePair.value}")
-      Try(conf.set(keyValuePair.key, keyValuePair.value))
-    }
-
-    // TODO: Move SparkIMain to private and insert in a different way
-    logger.warn("Locked to Scala interpreter with SparkIMain until decoupled!")
-
-    // TODO: Construct class server outside of SparkIMain
-    logger.warn("Unable to control initialization of REPL class server!")
-    logger.info("REPL Class Server Uri: " + interpreter.classServerURI)
-    conf.set("spark.repl.class.uri", interpreter.classServerURI)
-
-    val sparkContext = reallyInitializeSparkContext(
-      config, actorLoader, KMBuilder(), conf
-    )
-
-    updateInterpreterWithSparkContext(
-      config, sparkContext, interpreter)
-
-    sparkContext
-  }
-
-  // TODO: Think of a better way to test without exposing this
-  protected[layer] def reallyInitializeSparkContext(
-    config: Config, actorLoader: ActorLoader, kmBuilder: KMBuilder,
-    sparkConf: SparkConf
-  ): SparkContext = {
-    logger.debug("Constructing new Spark Context")
-    // TODO: Inject stream redirect headers in Spark dynamically
-    var sparkContext: SparkContext = null
-    val outStream = new KernelOutputStream(
-      actorLoader, KMBuilder(), global.ScheduledTaskManager.instance,
-      sendEmptyOutput = config.getBoolean("send_empty_output")
-    )
-
-    // Update global stream state and use it to set the Console local variables
-    // for threads in the Spark threadpool
-    global.StreamState.setStreams(System.in, outStream, outStream)
-    global.StreamState.withStreams {
-      sparkContext = new SparkContext(sparkConf)
-    }
-
-    sparkContext
-  }
-
-  // TODO: Think of a better way to test without exposing this
-  protected[layer] def updateInterpreterWithSparkContext(
-    config: Config, sparkContext: SparkContext, interpreter: Interpreter
-  ) = {
-    interpreter.doQuietly {
-      logger.debug("Binding context into interpreter")
-      interpreter.bind(
-        "sc", "org.apache.spark.SparkContext",
-        sparkContext, List( """@transient"""))
-
-      // NOTE: This is needed because interpreter blows up after adding
-      //       dependencies to SparkContext and Interpreter before the
-      //       cluster has been used... not exactly sure why this is the case
-      // TODO: Investigate why the cluster has to be initialized in the kernel
-      //       to avoid the kernel's interpreter blowing up (must be done
-      //       inside the interpreter)
-      logger.debug("Initializing Spark cluster in interpreter")
-
-       interpreter.doQuietly {
-        interpreter.interpret("""
-        | val $toBeNulled = {
-        | var $toBeNulled = sc.emptyRDD.collect()
-        | $toBeNulled = null
-        |  }
-        |
-        |""".stripMargin)
-      }
-    }
-
-    // Add ourselves as a dependency
-    // TODO: Provide ability to point to library as commandline argument
-    // TODO: Provide better method to determine if can add ourselves
-    // TODO: Avoid duplicating request for master twice (initializeSparkContext
-    //       also does this)
-    val master = config.getString("spark.master")
-    // If in local mode, do not need to add our jars as dependencies
-    if (!master.toLowerCase.startsWith("local")) {
-      @inline def getJarPathFor(klass: Class[_]): String =
-        klass.getProtectionDomain.getCodeSource.getLocation.getPath
-
-      // TODO: Provide less hard-coded solution in case additional dependencies
-      //       are added or classes are refactored to different projects
-      val jarPaths = Seq(
-        // Macro project
-        classOf[com.ibm.spark.annotations.Experimental],
-
-        // Protocol project
-        classOf[com.ibm.spark.kernel.protocol.v5.KernelMessage],
-
-        // Communication project
-        classOf[com.ibm.spark.communication.SocketManager],
-
-        // Kernel-api project
-        classOf[com.ibm.spark.kernel.api.KernelLike],
-
-        // Scala-interpreter project
-        classOf[com.ibm.spark.kernel.interpreter.scala.ScalaInterpreter],
-
-        // PySpark-interpreter project
-        classOf[com.ibm.spark.kernel.interpreter.pyspark.PySparkInterpreter],
-
-        // SparkR-interpreter project
-        classOf[com.ibm.spark.kernel.interpreter.sparkr.SparkRInterpreter],
-
-        // Kernel project
-        classOf[com.ibm.spark.boot.KernelBootstrap]
-      ).map(getJarPathFor)
-
-      logger.info("Adding kernel jars to cluster:\n- " +
-        jarPaths.mkString("\n- "))
-      jarPaths.foreach(sparkContext.addJar)
-    } else {
-      logger.info("Running in local mode! Not adding self as dependency!")
-    }
-  }
-  */
-
   protected[layer] def initializeSqlContext(sparkContext: SparkContext) = {
     val sqlContext: SQLContext = try {
       logger.info("Attempting to create Hive Context")

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/708180ad/kernel/src/test/scala/com/ibm/spark/boot/layer/StandardComponentInitializationSpec.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/com/ibm/spark/boot/layer/StandardComponentInitializationSpec.scala b/kernel/src/test/scala/com/ibm/spark/boot/layer/StandardComponentInitializationSpec.scala
index f6a9235..68ee0cc 100644
--- a/kernel/src/test/scala/com/ibm/spark/boot/layer/StandardComponentInitializationSpec.scala
+++ b/kernel/src/test/scala/com/ibm/spark/boot/layer/StandardComponentInitializationSpec.scala
@@ -16,8 +16,9 @@
 
 package com.ibm.spark.boot.layer
 
-import com.ibm.spark.boot.KernelBootstrap
+import com.ibm.spark.boot.{CommandLineOptions, KernelBootstrap}
 import com.ibm.spark.interpreter.Interpreter
+import com.ibm.spark.kernel.api.KernelLike
 import com.ibm.spark.kernel.protocol.v5.KMBuilder
 import com.ibm.spark.kernel.protocol.v5.kernel.ActorLoader
 import com.ibm.spark.utils.LogLike
@@ -29,6 +30,8 @@ import org.mockito.Mockito._
 import org.scalatest.mock.MockitoSugar
 import org.scalatest.{BeforeAndAfter, FunSpec, Matchers}
 
+import scala.collection.mutable
+import scala.collection.JavaConverters._
 class StandardComponentInitializationSpec extends FunSpec with Matchers
   with MockitoSugar with BeforeAndAfter
 {
@@ -38,6 +41,7 @@ class StandardComponentInitializationSpec extends FunSpec with Matchers
   private var mockActorLoader: ActorLoader = _
   private var mockSparkContext: SparkContext = _
   private var mockInterpreter: Interpreter = _
+  private var mockKernel: KernelLike = _
   private var spyComponentInitialization: StandardComponentInitialization = _
 
   private class TestComponentInitialization
@@ -48,67 +52,33 @@ class StandardComponentInitializationSpec extends FunSpec with Matchers
     mockActorLoader = mock[ActorLoader]
     mockSparkContext = mock[SparkContext]
     mockInterpreter = mock[Interpreter]
+    mockKernel = mock[KernelLike]
 
     spyComponentInitialization = spy(new TestComponentInitialization())
   }
 
-  /*
   describe("StandardComponentInitialization") {
-    describe("when spark.master is set in config") {
-      it("should set spark.master in SparkConf") {
-        val expected = "some value"
-        doReturn(expected).when(mockConfig).getString("spark.master")
-        doReturn("").when(mockConfig).getString("spark_configuration")
-
-        // Stub out other helper methods to avoid long init process and to
-        // avoid failure when creating SparkContext
-        doReturn(mockSparkContext).when(spyComponentInitialization)
-          .reallyInitializeSparkContext(
-            any[Config], any[ActorLoader], any[KMBuilder], any[SparkConf])
-        doNothing().when(spyComponentInitialization)
-          .updateInterpreterWithSparkContext(
-            any[Config], any[SparkContext], any[Interpreter])
-
-        // Provide stub for interpreter classServerURI since also executed
-        doReturn("").when(mockInterpreter).classServerURI
-
-        val sparkContext = spyComponentInitialization.initializeSparkContext(
-          mockConfig, TestAppName, mockActorLoader, mockInterpreter)
-
-        val sparkConf = {
-          val sparkConfCaptor = ArgumentCaptor.forClass(classOf[SparkConf])
-          verify(spyComponentInitialization).reallyInitializeSparkContext(
-            any[Config], any[ActorLoader], any[KMBuilder],
-            sparkConfCaptor.capture()
-          )
-          sparkConfCaptor.getValue
-        }
-
-        sparkConf.get("spark.master") should be (expected)
+    describe("#initializeInterpreterPlugins") {
+      it("should return a map with the DummyInterpreter") {
+        val conf = new CommandLineOptions(List(
+          "--interpreter-plugin", "dummy:test.utils.DummyInterpreter",
+          "--interpreter-plugin", "dummy2:test.utils.DummyInterpreter"
+        )).toConfig
+
+        val m = spyComponentInitialization
+          .initializeInterpreterPlugins(mockKernel, conf)
+
+        m.get("dummy") should not be None
+        m.get("dummy2") should not be None
       }
+      it("should return an empty map") {
+        val conf = new CommandLineOptions(List()).toConfig
 
-      it("should not add ourselves as a jar if spark.master is not local") {
-        doReturn("local[*]").when(mockConfig).getString("spark.master")
-
-        spyComponentInitialization.updateInterpreterWithSparkContext(
-          mockConfig, mockSparkContext, mockInterpreter)
-        verify(mockSparkContext, never()).addJar(anyString())
-      }
-
-      it("should add ourselves as a jar if spark.master is not local") {
-        doReturn("notlocal").when(mockConfig).getString("spark.master")
-
-        // TODO: This is going to be outdated when we determine a way to
-        //       re-include all jars
-        val expected =
-          com.ibm.spark.SparkKernel.getClass.getProtectionDomain
-            .getCodeSource.getLocation.getPath
+        val m = spyComponentInitialization
+          .initializeInterpreterPlugins(mockKernel, conf)
 
-        spyComponentInitialization.updateInterpreterWithSparkContext(
-          mockConfig, mockSparkContext, mockInterpreter)
-        verify(mockSparkContext).addJar(expected)
+        m.isEmpty shouldBe true
       }
     }
   }
-  */
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/708180ad/kernel/src/test/scala/test/utils/DummyInterpreter.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/test/utils/DummyInterpreter.scala b/kernel/src/test/scala/test/utils/DummyInterpreter.scala
new file mode 100644
index 0000000..1ab8cac
--- /dev/null
+++ b/kernel/src/test/scala/test/utils/DummyInterpreter.scala
@@ -0,0 +1,105 @@
+package test.utils
+
+import java.net.URL
+
+import com.ibm.spark.interpreter.{ExecuteFailure, ExecuteOutput, Interpreter}
+import com.ibm.spark.interpreter.Results.Result
+import com.ibm.spark.kernel.api.KernelLike
+
+import scala.tools.nsc.interpreter.{OutputStream, InputStream}
+
+class DummyInterpreter(kernel: KernelLike) extends Interpreter {
+  /**
+   * Starts the interpreter, initializing any internal state.
+   * @return A reference to the interpreter
+   */
+  override def start(): Interpreter = ???
+
+  /**
+   * Executes body and will not print anything to the console during the execution
+   * @param body The function to execute
+   * @tparam T The return type of body
+   * @return The return value of body
+   */
+  override def doQuietly[T](body: => T): T = ???
+
+  /**
+   * Stops the interpreter, removing any previous internal state.
+   * @return A reference to the interpreter
+   */
+  override def stop(): Interpreter = ???
+
+  /**
+   * Adds external jars to the internal classpaths of the interpreter.
+   * @param jars The list of jar locations
+   */
+  override def addJars(jars: URL*): Unit = ???
+
+  /**
+   * @return Returns a string to reference the URI of where the interpreted class files are created
+   */
+  override def classServerURI: String = ???
+
+  /**
+   * Returns the name of the variable created from the last execution.
+   * @return Some String name if a variable was created, otherwise None
+   */
+  override def lastExecutionVariableName: Option[String] = ???
+
+  /**
+   * Mask the Console and System objects with our wrapper implementations
+   * and dump the Console methods into the public namespace (similar to
+   * the Predef approach).
+   * @param in The new input stream
+   * @param out The new output stream
+   * @param err The new error stream
+   */
+  override def updatePrintStreams(in: InputStream, out: OutputStream, err: OutputStream): Unit = ???
+
+  /**
+   * Returns the class loader used by this interpreter.
+   * @return The runtime class loader used by this interpreter
+   */
+  override def classLoader: ClassLoader = ???
+
+  /**
+   * Retrieves the contents of the variable with the provided name from the
+   * interpreter.
+   * @param variableName The name of the variable whose contents to read
+   * @return An option containing the variable contents or None if the
+   *         variable does not exist
+   */
+  override def read(variableName: String): Option[AnyRef] = ???
+
+  /**
+   * Interrupts the current code being interpreted.
+   * @return A reference to the interpreter
+   */
+  override def interrupt(): Interpreter = ???
+
+  /**
+   * Binds a variable in the interpreter to a value.
+   * @param variableName The name to expose the value in the interpreter
+   * @param typeName The type of the variable, must be the fully qualified class name
+   * @param value The value of the variable binding
+   * @param modifiers Any annotation, scoping modifiers, etc on the variable
+   */
+  override def bind(variableName: String, typeName: String, value: Any, modifiers: List[String]): Unit = ???
+
+  /**
+   * Executes the provided code with the option to silence output.
+   * @param code The code to execute
+   * @param silent Whether or not to execute the code silently (no output)
+   * @return The success/failure of the interpretation and the output from the
+   *         execution or the failure
+   */
+  override def interpret(code: String, silent: Boolean): (Result, Either[ExecuteOutput, ExecuteFailure]) = ???
+
+  /**
+   * Attempts to perform code completion via the <TAB> command.
+   * @param code The current cell to complete
+   * @param pos The cursor position
+   * @return The cursor position and list of possible completions
+   */
+  override def completion(code: String, pos: Int): (Int, List[String]) = ???
+}


[06/50] [abbrv] incubator-toree git commit: move list of default interpreters to config

Posted by lr...@apache.org.
move list of default interpreters to config


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/0058eb10
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/0058eb10
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/0058eb10

Branch: refs/heads/master
Commit: 0058eb107cb59e20f86f5ce78155f8fc65860200
Parents: 2e6bf02
Author: Brian Burns <bb...@us.ibm.com>
Authored: Wed Nov 11 12:24:05 2015 -0500
Committer: Brian Burns <bb...@us.ibm.com>
Committed: Wed Nov 11 12:24:05 2015 -0500

----------------------------------------------------------------------
 .../com/ibm/spark/boot/CommandLineOptions.scala    | 15 ++++++++-------
 .../ibm/spark/boot/layer/InterpreterManager.scala  | 17 ++++++++++++-----
 .../ibm/spark/boot/CommandLineOptionsSpec.scala    | 16 ++++++++++++++++
 resources/compile/reference.conf                   |  6 ++++++
 resources/test/reference.conf                      |  7 +++++++
 5 files changed, 49 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/0058eb10/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala b/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
index 9539404..b3871f5 100644
--- a/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
+++ b/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
@@ -178,19 +178,20 @@ class CommandLineOptions(args: Seq[String]) {
   }
 
   private def interpreterPlugins: Option[java.util.List[String]] = {
-    val defaults = List[String](
-      "PySpark:com.ibm.spark.kernel.interpreter.pyspark.PySparkInterpreter",
-      "SparkR:com.ibm.spark.kernel.interpreter.sparkr.SparkRInterpreter",
-      "SQL:com.ibm.spark.kernel.interpreter.sql.SqlInterpreter"
-    )
+    //val defaults = getAll(_default_interpreter_plugin).getOrElse(List())
+    //val defaults = List[String](
+    //  "PySpark:com.ibm.spark.kernel.interpreter.pyspark.PySparkInterpreter",
+    //  "SparkR:com.ibm.spark.kernel.interpreter.sparkr.SparkRInterpreter",
+    //  "SQL:com.ibm.spark.kernel.interpreter.sql.SqlInterpreter"
+    //)
 
     val userDefined = getAll(_interpreter_plugin) match {
       case Some(l) => l
       case _ => List[String]()
     }
 
-    val p = defaults ++ userDefined
-    Some(p.asJava)
+    //val p = defaults ++ userDefined
+    Some(userDefined.asJava)
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/0058eb10/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala b/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala
index e07af8e..903ab69 100644
--- a/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala
+++ b/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala
@@ -5,11 +5,14 @@ import com.typesafe.config.Config
 import com.ibm.spark.interpreter._
 import scala.collection.JavaConverters._
 
+import org.slf4j.LoggerFactory
+
 case class InterpreterManager(
   default: String = "Scala",
   interpreters: Map[String, Interpreter] = Map[String, Interpreter]()
 ) {
 
+
   def initializeInterpreters(kernel: KernelLike): Unit = {
     interpreters.values.foreach(interpreter =>
       interpreter.init(kernel)
@@ -30,12 +33,13 @@ case class InterpreterManager(
 
 object InterpreterManager {
 
+  protected val logger = LoggerFactory.getLogger(this.getClass.getName)
+
   def apply(config: Config): InterpreterManager = {
-    val p = config
-      .getStringList("interpreter_plugins")
-      .listIterator().asScala
+    val ip = config.getStringList("interpreter_plugins").asScala ++
+      config.getStringList("default_interpreter_plugin").asScala
 
-    val m = p.foldLeft(Map[String, Interpreter]())( (acc, v) => {
+    val m = ip.foldLeft(Map[String, Interpreter]())( (acc, v) => {
       v.split(":") match {
         case Array(name, className) =>
           try {
@@ -46,7 +50,10 @@ object InterpreterManager {
             acc + (name -> i)
           }
           catch {
-            case _:Throwable => acc
+            case e:Throwable =>
+              logger.error("Error loading interpreter class " + className)
+              logger.error(e.getMessage())
+              acc
           }
         case _ => acc
       }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/0058eb10/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala b/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala
index 8141eec..ab6748a 100644
--- a/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala
+++ b/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala
@@ -335,6 +335,22 @@ class CommandLineOptionsSpec extends FunSpec with Matchers {
           be (Seq(url1, url2))
       }
     }
+
+    describe("when received --interpreter-plugin") {
+      it("should return the interpreter-plugin along with the defaults") {
+        val options = new CommandLineOptions(Seq(
+          "--interpreter-plugin",
+          "dummy:test.utils.DummyInterpreter"
+        ))
+
+        val config: Config = options.toConfig
+
+        val p = config.getList("interpreter_plugins")
+
+        p should not be empty
+
+      }
+    }
   }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/0058eb10/resources/compile/reference.conf
----------------------------------------------------------------------
diff --git a/resources/compile/reference.conf b/resources/compile/reference.conf
index 07a49ac..ddfae16 100644
--- a/resources/compile/reference.conf
+++ b/resources/compile/reference.conf
@@ -59,3 +59,9 @@ jar_dir = ${?JAR_DIR}
 
 default_interpreter = "Scala"
 default_interpreter = ${?DEFAULT_INTERPRETER}
+
+default_interpreter_plugin = [
+  "PySpark:com.ibm.spark.kernel.interpreter.pyspark.PySparkInterpreter",
+  "SparkR:com.ibm.spark.kernel.interpreter.sparkr.SparkRInterpreter",
+  "SQL:com.ibm.spark.kernel.interpreter.sql.SqlInterpreter"
+]

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/0058eb10/resources/test/reference.conf
----------------------------------------------------------------------
diff --git a/resources/test/reference.conf b/resources/test/reference.conf
index 407f344..f084489 100644
--- a/resources/test/reference.conf
+++ b/resources/test/reference.conf
@@ -57,3 +57,10 @@ send_empty_output = ${?SEND_EMPTY_OUTPUT}
 
 default_interpreter = "Scala"
 default_interpreter = ${?DEFAULT_INTERPRETER}
+
+default_interpreter_plugin = [
+  "PySpark:com.ibm.spark.kernel.interpreter.pyspark.PySparkInterpreter",
+  "SparkR:com.ibm.spark.kernel.interpreter.sparkr.SparkRInterpreter",
+  "SQL:com.ibm.spark.kernel.interpreter.sql.SqlInterpreter"
+]
+


[33/50] [abbrv] incubator-toree git commit: Merge pull request #220 from ibm-et/MakefileBuildLocal

Posted by lr...@apache.org.
Merge pull request #220 from ibm-et/MakefileBuildLocal

Mods to Makefile to run sbt locally

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/655a35b6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/655a35b6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/655a35b6

Branch: refs/heads/master
Commit: 655a35b6f3e7e56799670f70f4fcad31d15ef1bc
Parents: ed8b209 e312d17
Author: Chip Senkbeil <ch...@gmail.com>
Authored: Thu Dec 3 09:01:52 2015 -0600
Committer: Chip Senkbeil <ch...@gmail.com>
Committed: Thu Dec 3 09:01:52 2015 -0600

----------------------------------------------------------------------
 Makefile  | 28 ++++++++++++++++++----------
 README.md |  8 ++++++--
 2 files changed, 24 insertions(+), 12 deletions(-)
----------------------------------------------------------------------



[31/50] [abbrv] incubator-toree git commit: Merge pull request #205 from jodersky/proper-shutdown

Posted by lr...@apache.org.
Merge pull request #205 from jodersky/proper-shutdown

Properly close sockets on system shutdown.

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/09ed0bcc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/09ed0bcc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/09ed0bcc

Branch: refs/heads/master
Commit: 09ed0bcc9b45afaf670090e80baf76e07cbcc574
Parents: ecfa5dd 1398a63
Author: Chip Senkbeil <ch...@gmail.com>
Authored: Thu Dec 3 08:02:45 2015 -0600
Committer: Chip Senkbeil <ch...@gmail.com>
Committed: Thu Dec 3 08:02:45 2015 -0600

----------------------------------------------------------------------
 .../ibm/spark/communication/SocketManager.scala | 40 +++++++++++++-------
 .../socket/ZeroMQSocketRunnable.scala           | 10 +++--
 2 files changed, 33 insertions(+), 17 deletions(-)
----------------------------------------------------------------------



[34/50] [abbrv] incubator-toree git commit: Build interpreter classpath based on Classloader hierarchy

Posted by lr...@apache.org.
Build interpreter classpath based on Classloader hierarchy


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/cac39e9c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/cac39e9c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/cac39e9c

Branch: refs/heads/master
Commit: cac39e9c8a05784dd7c1f9856818c8d3c50a1f05
Parents: 655a35b
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Wed Dec 2 17:24:24 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Thu Dec 3 09:21:09 2015 -0600

----------------------------------------------------------------------
 etc/bin/spark-kernel                            |  1 -
 .../interpreter/scala/ScalaInterpreter.scala    | 34 ++++++++++++++------
 .../scala/ScalaInterpreterSpec.scala            | 33 ++++++++++++++++++-
 3 files changed, 56 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/cac39e9c/etc/bin/spark-kernel
----------------------------------------------------------------------
diff --git a/etc/bin/spark-kernel b/etc/bin/spark-kernel
index 18cce1d..b827265 100755
--- a/etc/bin/spark-kernel
+++ b/etc/bin/spark-kernel
@@ -32,5 +32,4 @@ export PYTHONHASHSEED=0
 
 exec "$SPARK_HOME"/bin/spark-submit \
   ${SPARK_OPTS} \
-  --driver-class-path $PROG_HOME/lib/${KERNEL_ASSEMBLY} \
   --class com.ibm.spark.SparkKernel $PROG_HOME/lib/${KERNEL_ASSEMBLY} "$@"

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/cac39e9c/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
index 6fc0aa6..078054a 100644
--- a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
+++ b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
@@ -33,12 +33,13 @@ import org.apache.spark.SparkContext
 import org.apache.spark.repl.{SparkCommandLine, SparkIMain, SparkJLineCompletion}
 import org.slf4j.LoggerFactory
 
+import scala.annotation.tailrec
 import scala.concurrent.{Await, Future}
 import scala.language.reflectiveCalls
 import scala.tools.nsc.backend.JavaPlatform
 import scala.tools.nsc.interpreter.{OutputStream, IR, JPrintWriter, InputStream}
 import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.MergedClassPath
+import scala.tools.nsc.util.{ClassPath, MergedClassPath}
 import scala.tools.nsc.{Global, Settings, io}
 import scala.util.{Try => UtilTry}
 
@@ -193,15 +194,7 @@ class ScalaInterpreter() extends Interpreter {
     val args = interpreterArgs(kernel)
     this.settings = newSettings(args)
 
-    val urls = _thisClassloader match {
-      case cl: java.net.URLClassLoader => cl.getURLs.toList
-      case a => // TODO: Should we really be using sys.error here?
-        sys.error("[SparkInterpreter] Unexpected class loader: " + a.getClass)
-    }
-    val classpath = urls.map(_.toString)
-
-    this.settings.classpath.value =
-      classpath.distinct.mkString(java.io.File.pathSeparator)
+    this.settings.classpath.value = buildClasspath(_thisClassloader)
     this.settings.embeddedDefaults(_runtimeClassloader)
 
     maxInterpreterThreads = maxInterpreterThreads(kernel)
@@ -212,6 +205,27 @@ class ScalaInterpreter() extends Interpreter {
     this
   }
 
+  protected[scala] def buildClasspath(classLoader: ClassLoader): String = {
+
+    def toClassLoaderList( classLoader: ClassLoader ): Seq[ClassLoader] = {
+      @tailrec
+      def toClassLoaderListHelper( aClassLoader: ClassLoader, theList: Seq[ClassLoader]):Seq[ClassLoader] = {
+        if( aClassLoader == null )
+          return theList
+
+        toClassLoaderListHelper( aClassLoader.getParent, aClassLoader +: theList )
+      }
+      toClassLoaderListHelper(classLoader, Seq())
+    }
+
+    val urls = toClassLoaderList(classLoader).flatMap{
+        case cl: java.net.URLClassLoader => cl.getURLs.toList
+        case a => List()
+    }
+
+    urls.foldLeft("")((l, r) => ClassPath.join(l, r.toString))
+  }
+
   protected def interpreterArgs(kernel: KernelLike): List[String] = {
     import scala.collection.JavaConverters._
     kernel.config.getStringList("interpreter_args").asScala.toList

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/cac39e9c/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala b/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala
index bf8c5d8..1b89df3 100644
--- a/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala
+++ b/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala
@@ -17,7 +17,7 @@
 package com.ibm.spark.kernel.interpreter.scala
 
 import java.io.{File, InputStream, OutputStream}
-import java.net.URL
+import java.net.{URLClassLoader, URL}
 
 import com.ibm.spark.interpreter.Results.Result
 import com.ibm.spark.interpreter._
@@ -34,6 +34,7 @@ import org.scalatest.{BeforeAndAfter, FunSpec, Matchers}
 import scala.concurrent.Future
 import scala.tools.nsc.Settings
 import scala.tools.nsc.interpreter.{JPrintWriter, IR}
+import scala.tools.nsc.util.ClassPath
 
 class ScalaInterpreterSpec extends FunSpec
   with Matchers with MockitoSugar with BeforeAndAfter
@@ -149,6 +150,36 @@ class ScalaInterpreterSpec extends FunSpec
       }
     }
 
+    describe("#buildClasspath") {
+      it("should return classpath based on classloader hierarchy") {
+        // Needed to access runtimeClassloader method
+        import scala.language.reflectiveCalls
+
+        // Create a new interpreter exposing the internal runtime classloader
+        val itInterpreter = new StubbedStartInterpreter
+
+        val parentUrls = Array(
+          new URL("file:/some/dir/a.jar"),
+          new URL("file:/some/dir/b.jar"),
+          new URL("file:/some/dir/c.jar")
+        )
+
+        val theParentClassloader = new URLClassLoader(parentUrls, null)
+
+        val urls = Array(
+          new URL("file:/some/dir/1.jar"),
+          new URL("file:/some/dir/2.jar"),
+          new URL("file:/some/dir/3.jar")
+        )
+
+        val theClassloader = new URLClassLoader(urls, theParentClassloader)
+
+        val expected = ClassPath.join((parentUrls ++ urls).map(_.toString) :_*)
+
+        itInterpreter.buildClasspath(theClassloader) should be(expected)
+      }
+    }
+
     describe("#interrupt") {
       it("should fail a require if the interpreter is not started") {
         intercept[IllegalArgumentException] {


[26/50] [abbrv] incubator-toree git commit: Removed Hadoop as an explicit build dependency

Posted by lr...@apache.org.
Removed Hadoop as an explicit build dependency


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/bc834e87
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/bc834e87
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/bc834e87

Branch: refs/heads/master
Commit: bc834e873d9668d8060b43bd871bd861368e3a54
Parents: dfffa6c
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Wed Nov 25 09:43:17 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Wed Nov 25 09:43:17 2015 -0600

----------------------------------------------------------------------
 Makefile             |  3 +--
 project/Common.scala | 30 ++----------------------------
 2 files changed, 3 insertions(+), 30 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/bc834e87/Makefile
----------------------------------------------------------------------
diff --git a/Makefile b/Makefile
index 745610d..b0cafd1 100644
--- a/Makefile
+++ b/Makefile
@@ -19,9 +19,8 @@
 VERSION?=0.1.5
 IS_SNAPSHOT?=true
 APACHE_SPARK_VERSION?=1.5.1
-APACHE_HADOOP_VERSION?=2.3.0
 
-ENV_OPTS=APACHE_SPARK_VERSION=$(APACHE_SPARK_VERSION) APACHE_HADOOP_VERSION=$(APACHE_HADOOP_VERSION) VERSION=$(VERSION) IS_SNAPSHOT=$(IS_SNAPSHOT)
+ENV_OPTS=APACHE_SPARK_VERSION=$(APACHE_SPARK_VERSION) VERSION=$(VERSION) IS_SNAPSHOT=$(IS_SNAPSHOT)
 
 FULL_VERSION=$(shell echo $(VERSION)`[ "$(IS_SNAPSHOT)" == "true" ] && (echo '-SNAPSHOT')` )
 ASSEMBLY_JAR=$(shell echo kernel-assembly-$(FULL_VERSION).jar )

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/bc834e87/project/Common.scala
----------------------------------------------------------------------
diff --git a/project/Common.scala b/project/Common.scala
index f61daaf..cef0d45 100644
--- a/project/Common.scala
+++ b/project/Common.scala
@@ -78,29 +78,6 @@ object Common {
     }
   }
 
-  lazy val hadoopVersion = {
-    val hadoopEnvironmentVariable = "APACHE_HADOOP_VERSION"
-    val defaultHadoopVersion = "2.3.0"
-
-    val _hadoopVersion = Properties.envOrNone(hadoopEnvironmentVariable)
-
-    if (_hadoopVersion.isEmpty) {
-      scala.Console.out.println(
-        s"""
-           |[INFO] Using default Apache Hadoop $defaultHadoopVersion!
-           """.stripMargin.trim.replace('\n', ' '))
-      defaultHadoopVersion
-    } else {
-      val version = _hadoopVersion.get
-      scala.Console.out.println(
-        s"""
-           |[INFO] Using Apache Hadoop $version provided from
-                                                 |$hadoopEnvironmentVariable!
-           """.stripMargin.trim.replace('\n', ' '))
-      version
-    }
-  }
-
   val settings: Seq[Def.Setting[_]] = Seq(
     organization := buildOrganization,
     version := buildVersion,
@@ -165,7 +142,6 @@ object Common {
 
 
   buildLibraryDependencies ++= Seq( "org.apache.spark" %% "spark-core" % "1.5.1"  % "provided" excludeAll( // Apache v2
-    ExclusionRule(organization = "org.apache.hadoop"),
 
     // Exclude netty (org.jboss.netty is for 3.2.2.Final only)
     ExclusionRule(
@@ -177,10 +153,8 @@ object Common {
     "org.apache.spark" %% "spark-sql" % sparkVersion % "provided",
     "org.apache.spark" %% "spark-mllib" % sparkVersion % "provided",
     "org.apache.spark" %% "spark-graphx" % sparkVersion % "provided",
-    "org.apache.spark" %% "spark-repl" % sparkVersion  % "provided" excludeAll
-      ExclusionRule(organization = "org.apache.hadoop"),
-    "org.apache.hadoop" % "hadoop-client" % hadoopVersion % "provided" excludeAll
-      ExclusionRule(organization = "javax.servlet"))
+    "org.apache.spark" %% "spark-repl" % sparkVersion  % "provided"
+  )
 
   // ==========================================================================
   // = REBUILD IVY XML SETTINGS BELOW


[45/50] [abbrv] incubator-toree git commit: Merge pull request #228 from lbustelo/Issue224

Posted by lr...@apache.org.
Merge pull request #228 from lbustelo/Issue224

Removed code that was adding jar to conf... no longer needed

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/3905e478
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/3905e478
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/3905e478

Branch: refs/heads/master
Commit: 3905e47815e1e08d26b5adf7f8269b4110ebae32
Parents: 9c8a01f 3383622
Author: Chip Senkbeil <ch...@gmail.com>
Authored: Tue Dec 8 11:26:51 2015 -0600
Committer: Chip Senkbeil <ch...@gmail.com>
Committed: Tue Dec 8 11:26:51 2015 -0600

----------------------------------------------------------------------
 .../scala/com/ibm/spark/kernel/api/Kernel.scala | 80 +-------------------
 .../com/ibm/spark/kernel/api/KernelSpec.scala   | 24 ------
 2 files changed, 2 insertions(+), 102 deletions(-)
----------------------------------------------------------------------



[04/50] [abbrv] incubator-toree git commit: InterpreterManager for Interpreter initialization and management

Posted by lr...@apache.org.
InterpreterManager for Interpreter initialization and management


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/502372d4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/502372d4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/502372d4

Branch: refs/heads/master
Commit: 502372d47ddd940969149f96936746ba8f6f253a
Parents: d2172c1
Author: Brian Burns <bb...@us.ibm.com>
Authored: Mon Nov 9 15:51:37 2015 -0500
Committer: Brian Burns <bb...@us.ibm.com>
Committed: Mon Nov 9 15:51:37 2015 -0500

----------------------------------------------------------------------
 .../com/ibm/spark/interpreter/Interpreter.scala |  9 +++
 .../com/ibm/spark/kernel/api/KernelLike.scala   |  3 +
 .../com/ibm/spark/boot/CommandLineOptions.scala | 17 +++-
 .../boot/layer/ComponentInitialization.scala    | 19 +++--
 .../spark/boot/layer/InterpreterManager.scala   | 59 ++++++++++++++
 .../scala/com/ibm/spark/kernel/api/Kernel.scala | 29 +++++--
 .../StandardComponentInitializationSpec.scala   | 84 --------------------
 .../com/ibm/spark/kernel/api/KernelSpec.scala   | 10 ++-
 .../scala/test/utils/DummyInterpreter.scala     |  7 ++
 .../scala/test/utils/SparkKernelDeployer.scala  |  1 +
 .../pyspark/PySparkInterpreter.scala            | 22 ++++-
 .../com/ibm/spark/magic/builtin/PySpark.scala   |  2 +-
 resources/compile/reference.conf                |  5 +-
 resources/test/reference.conf                   |  2 +-
 .../interpreter/scala/ScalaInterpreter.scala    | 13 ++-
 .../com/ibm/spark/magic/builtin/Scala.scala     |  2 +-
 .../interpreter/sparkr/SparkRInterpreter.scala  | 12 ++-
 .../com/ibm/spark/magic/builtin/SparkR.scala    |  2 +-
 .../kernel/interpreter/sql/SqlInterpreter.scala | 13 ++-
 .../scala/com/ibm/spark/magic/builtin/Sql.scala |  2 +-
 20 files changed, 192 insertions(+), 121 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala
----------------------------------------------------------------------
diff --git a/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala b/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala
index 24fe9dc..76d4432 100644
--- a/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala
+++ b/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala
@@ -18,11 +18,20 @@ package com.ibm.spark.interpreter
 
 import java.net.URL
 
+import com.ibm.spark.kernel.api.KernelLike
 import org.apache.spark.SparkContext
 
 import scala.tools.nsc.interpreter._
 
 trait Interpreter {
+
+  /**
+   * Initializes the interpreter.
+   * @param kernel The kernel
+   * @return The newly initialized interpreter
+   */
+  def init(kernel: KernelLike): Interpreter
+
   /**
    * Starts the interpreter, initializing any internal state.
    * @return A reference to the interpreter

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/kernel-api/src/main/scala/com/ibm/spark/kernel/api/KernelLike.scala
----------------------------------------------------------------------
diff --git a/kernel-api/src/main/scala/com/ibm/spark/kernel/api/KernelLike.scala b/kernel-api/src/main/scala/com/ibm/spark/kernel/api/KernelLike.scala
index 4098bfb..8fb5d80 100644
--- a/kernel-api/src/main/scala/com/ibm/spark/kernel/api/KernelLike.scala
+++ b/kernel-api/src/main/scala/com/ibm/spark/kernel/api/KernelLike.scala
@@ -90,6 +90,9 @@ trait KernelLike {
    */
   val data: java.util.Map[String, Any]
 
+
+  def interpreter(name: String): Option[com.ibm.spark.interpreter.Interpreter]
+
   def sparkContext: SparkContext
 
   def sparkConf: SparkConf

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala b/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
index 27c5c68..9539404 100644
--- a/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
+++ b/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
@@ -96,7 +96,7 @@ class CommandLineOptions(args: Seq[String]) {
   private val _nosparkcontext =
     parser.accepts("nosparkcontext", "kernel should not create a spark context")
 
-  private val _plugins = parser.accepts(
+  private val _interpreter_plugin = parser.accepts(
     "interpreter-plugin"
   ).withRequiredArg().ofType(classOf[String])
 
@@ -178,8 +178,19 @@ class CommandLineOptions(args: Seq[String]) {
   }
 
   private def interpreterPlugins: Option[java.util.List[String]] = {
-    val p = getAll(_plugins)
-    p.map(_.asJava)
+    val defaults = List[String](
+      "PySpark:com.ibm.spark.kernel.interpreter.pyspark.PySparkInterpreter",
+      "SparkR:com.ibm.spark.kernel.interpreter.sparkr.SparkRInterpreter",
+      "SQL:com.ibm.spark.kernel.interpreter.sql.SqlInterpreter"
+    )
+
+    val userDefined = getAll(_interpreter_plugin) match {
+      case Some(l) => l
+      case _ => List[String]()
+    }
+
+    val p = defaults ++ userDefined
+    Some(p.asJava)
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala b/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
index df09028..e403e9b 100644
--- a/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
+++ b/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
@@ -86,14 +86,15 @@ trait StandardComponentInitialization extends ComponentInitialization {
     val dependencyDownloader = initializeDependencyDownloader(config)
     val magicLoader = initializeMagicLoader(
       config, interpreter, dependencyDownloader)
-    //val kernel = initializeKernel(
-    //  config, actorLoader, interpreter, commManager, magicLoader
-    //)
+    val manager =  InterpreterManager(config)
+      .addInterpreter("Scala",interpreter)
     val kernel = initializeKernel(
-      config, actorLoader, null, commManager, magicLoader
+      config, actorLoader, manager, commManager, magicLoader
     )
     val responseMap = initializeResponseMap()
 
+
+    /*
     // NOTE: Tested via initializing the following and returning this
     //       interpreter instead of the Scala one
     val pySparkInterpreter = new PySparkInterpreter(kernel)
@@ -144,10 +145,12 @@ trait StandardComponentInitialization extends ComponentInitialization {
           interpreter
       }
 
-    kernel.interpreter = defaultInterpreter
+    */
+    //kernel.interpreter = defaultInterpreter
     initializeSparkContext(config, kernel, appName)
 
-    (commStorage, commRegistrar, commManager, defaultInterpreter, kernel,
+    (commStorage, commRegistrar, commManager,
+      manager.defaultInterpreter.getOrElse(null), kernel,
       dependencyDownloader, magicLoader, responseMap)
 
   }
@@ -278,14 +281,14 @@ trait StandardComponentInitialization extends ComponentInitialization {
   private def initializeKernel(
     config: Config,
     actorLoader: ActorLoader,
-    interpreter: Interpreter,
+    interpreterManager: InterpreterManager,
     commManager: CommManager,
     magicLoader: MagicLoader
   ) = {
     val kernel = new Kernel(
       config,
       actorLoader,
-      interpreter,
+      interpreterManager,
       commManager,
       magicLoader
     )

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala b/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala
new file mode 100644
index 0000000..e07af8e
--- /dev/null
+++ b/kernel/src/main/scala/com/ibm/spark/boot/layer/InterpreterManager.scala
@@ -0,0 +1,59 @@
+package com.ibm.spark.boot.layer
+
+import com.ibm.spark.kernel.api.KernelLike
+import com.typesafe.config.Config
+import com.ibm.spark.interpreter._
+import scala.collection.JavaConverters._
+
+case class InterpreterManager(
+  default: String = "Scala",
+  interpreters: Map[String, Interpreter] = Map[String, Interpreter]()
+) {
+
+  def initializeInterpreters(kernel: KernelLike): Unit = {
+    interpreters.values.foreach(interpreter =>
+      interpreter.init(kernel)
+    )
+  }
+
+  def addInterpreter(
+    name:String,
+    interpreter: Interpreter
+  ): InterpreterManager = {
+    copy(interpreters = interpreters + (name -> interpreter))
+  }
+
+  def defaultInterpreter(): Option[Interpreter] = {
+    interpreters.get(default)
+  }
+}
+
+object InterpreterManager {
+
+  def apply(config: Config): InterpreterManager = {
+    val p = config
+      .getStringList("interpreter_plugins")
+      .listIterator().asScala
+
+    val m = p.foldLeft(Map[String, Interpreter]())( (acc, v) => {
+      v.split(":") match {
+        case Array(name, className) =>
+          try {
+            val i = Class
+                .forName(className)
+                .newInstance()
+                .asInstanceOf[Interpreter]
+            acc + (name -> i)
+          }
+          catch {
+            case _:Throwable => acc
+          }
+        case _ => acc
+      }
+    })
+
+    val default = config.getString("default_interpreter")
+
+    InterpreterManager(interpreters = m, default = default)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala b/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
index 58676c3..f070004 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
@@ -20,6 +20,7 @@ import java.io.{OutputStream, InputStream, PrintStream}
 import java.util.concurrent.ConcurrentHashMap
 
 import com.ibm.spark.annotations.Experimental
+import com.ibm.spark.boot.layer.InterpreterManager
 import com.ibm.spark.comm.CommManager
 import com.ibm.spark.global
 import com.ibm.spark.interpreter.Results.Result
@@ -46,7 +47,7 @@ import com.ibm.spark.global.ExecuteRequestState
  * Represents the main kernel API to be used for interaction.
  *
  * @param config The configuration used when starting the kernel
- * @param interpreter The interpreter to expose in this instance
+ * @param interpreterManager The interpreter manager to expose in this instance
  * @param comm The Comm manager to expose in this instance
  * @param actorLoader The actor loader to use for message relaying
  */
@@ -54,7 +55,7 @@ import com.ibm.spark.global.ExecuteRequestState
 class Kernel (
   private val config: Config,
   private val actorLoader: ActorLoader,
-  var interpreter: Interpreter,
+  val interpreterManager: InterpreterManager,
   val comm: CommManager,
   val magicLoader: MagicLoader
 ) extends KernelLike with LogLike {
@@ -107,6 +108,11 @@ class Kernel (
    */
   val data: java.util.Map[String, Any] = new ConcurrentHashMap[String, Any]()
 
+
+  interpreterManager.initializeInterpreters(this)
+
+  val interpreter = interpreterManager.defaultInterpreter().get
+
   /**
    * Handles the output of interpreting code.
    * @param output the output of the interpreter
@@ -424,9 +430,12 @@ class Kernel (
       @inline def getJarPathFor(klass: Class[_]): String =
         klass.getProtectionDomain.getCodeSource.getLocation.getPath
 
+      val interpreterC = interpreterManager.interpreters.values.map(_.getClass)
+
       // TODO: Provide less hard-coded solution in case additional dependencies
       //       are added or classes are refactored to different projects
-      val jarPaths = Seq(
+      val classDep = Seq(
+
         // Macro project
         classOf[com.ibm.spark.annotations.Experimental],
 
@@ -440,17 +449,19 @@ class Kernel (
         classOf[com.ibm.spark.kernel.api.KernelLike],
 
         // Scala-interpreter project
-        classOf[com.ibm.spark.kernel.interpreter.scala.ScalaInterpreter],
+        //classOf[com.ibm.spark.kernel.interpreter.scala.ScalaInterpreter],
 
         // PySpark-interpreter project
-        classOf[com.ibm.spark.kernel.interpreter.pyspark.PySparkInterpreter],
+        //classOf[com.ibm.spark.kernel.interpreter.pyspark.PySparkInterpreter],
 
         // SparkR-interpreter project
-        classOf[com.ibm.spark.kernel.interpreter.sparkr.SparkRInterpreter],
+        //classOf[com.ibm.spark.kernel.interpreter.sparkr.SparkRInterpreter],
 
         // Kernel project
         classOf[com.ibm.spark.boot.KernelBootstrap]
-      ).map(getJarPathFor)
+      )
+
+      val jarPaths = (interpreterC ++ classDep).map(getJarPathFor)
 
       logger.info("Adding kernel jars to cluster:\n- " +
         jarPaths.mkString("\n- "))
@@ -460,6 +471,10 @@ class Kernel (
     }
   }
 
+  override def interpreter(name: String): Option[Interpreter] = {
+    interpreterManager.interpreters.get(name)
+  }
+
   override def sparkContext: SparkContext = _sparkContext
   override def sparkConf: SparkConf = _sparkConf
   override def javaSparkContext: JavaSparkContext = _javaSparkContext

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/kernel/src/test/scala/com/ibm/spark/boot/layer/StandardComponentInitializationSpec.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/com/ibm/spark/boot/layer/StandardComponentInitializationSpec.scala b/kernel/src/test/scala/com/ibm/spark/boot/layer/StandardComponentInitializationSpec.scala
deleted file mode 100644
index 68ee0cc..0000000
--- a/kernel/src/test/scala/com/ibm/spark/boot/layer/StandardComponentInitializationSpec.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright 2014 IBM Corp.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.ibm.spark.boot.layer
-
-import com.ibm.spark.boot.{CommandLineOptions, KernelBootstrap}
-import com.ibm.spark.interpreter.Interpreter
-import com.ibm.spark.kernel.api.KernelLike
-import com.ibm.spark.kernel.protocol.v5.KMBuilder
-import com.ibm.spark.kernel.protocol.v5.kernel.ActorLoader
-import com.ibm.spark.utils.LogLike
-import com.typesafe.config.Config
-import org.apache.spark.{SparkConf, SparkContext}
-import org.mockito.ArgumentCaptor
-import org.mockito.Matchers._
-import org.mockito.Mockito._
-import org.scalatest.mock.MockitoSugar
-import org.scalatest.{BeforeAndAfter, FunSpec, Matchers}
-
-import scala.collection.mutable
-import scala.collection.JavaConverters._
-class StandardComponentInitializationSpec extends FunSpec with Matchers
-  with MockitoSugar with BeforeAndAfter
-{
-  private val TestAppName = "test app"
-
-  private var mockConfig: Config = _
-  private var mockActorLoader: ActorLoader = _
-  private var mockSparkContext: SparkContext = _
-  private var mockInterpreter: Interpreter = _
-  private var mockKernel: KernelLike = _
-  private var spyComponentInitialization: StandardComponentInitialization = _
-
-  private class TestComponentInitialization
-    extends StandardComponentInitialization with LogLike
-
-  before {
-    mockConfig = mock[Config]
-    mockActorLoader = mock[ActorLoader]
-    mockSparkContext = mock[SparkContext]
-    mockInterpreter = mock[Interpreter]
-    mockKernel = mock[KernelLike]
-
-    spyComponentInitialization = spy(new TestComponentInitialization())
-  }
-
-  describe("StandardComponentInitialization") {
-    describe("#initializeInterpreterPlugins") {
-      it("should return a map with the DummyInterpreter") {
-        val conf = new CommandLineOptions(List(
-          "--interpreter-plugin", "dummy:test.utils.DummyInterpreter",
-          "--interpreter-plugin", "dummy2:test.utils.DummyInterpreter"
-        )).toConfig
-
-        val m = spyComponentInitialization
-          .initializeInterpreterPlugins(mockKernel, conf)
-
-        m.get("dummy") should not be None
-        m.get("dummy2") should not be None
-      }
-      it("should return an empty map") {
-        val conf = new CommandLineOptions(List()).toConfig
-
-        val m = spyComponentInitialization
-          .initializeInterpreterPlugins(mockKernel, conf)
-
-        m.isEmpty shouldBe true
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala b/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
index 22cd08f..f5d5517 100644
--- a/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
+++ b/kernel/src/test/scala/com/ibm/spark/kernel/api/KernelSpec.scala
@@ -2,6 +2,7 @@ package com.ibm.spark.kernel.api
 
 import java.io.{InputStream, PrintStream}
 
+import com.ibm.spark.boot.layer.InterpreterManager
 import com.ibm.spark.comm.CommManager
 import com.ibm.spark.interpreter._
 import com.ibm.spark.kernel.protocol.v5._
@@ -31,6 +32,7 @@ class KernelSpec extends FunSpec with Matchers with MockitoSugar
   private var mockSparkConf: SparkConf = _
   private var mockActorLoader: ActorLoader = _
   private var mockInterpreter: Interpreter = _
+  private var mockInterpreterManager: InterpreterManager = _
   private var mockCommManager: CommManager = _
   private var mockMagicLoader: MagicLoader = _
   private var kernel: Kernel = _
@@ -39,8 +41,13 @@ class KernelSpec extends FunSpec with Matchers with MockitoSugar
   before {
     mockConfig = mock[Config]
     mockInterpreter = mock[Interpreter]
+    mockInterpreterManager = mock[InterpreterManager]
     mockSparkContext = mock[SparkContext]
     mockSparkConf = mock[SparkConf]
+    when(mockInterpreterManager.defaultInterpreter())
+      .thenReturn(Some(mockInterpreter))
+    when(mockInterpreterManager.interpreters)
+      .thenReturn(Map[String, com.ibm.spark.interpreter.Interpreter]())
     when(mockInterpreter.interpret(BadCode.get))
       .thenReturn((Results.Incomplete, null))
     when(mockInterpreter.interpret(GoodCode.get))
@@ -48,12 +55,13 @@ class KernelSpec extends FunSpec with Matchers with MockitoSugar
     when(mockInterpreter.interpret(ErrorCode.get))
       .thenReturn((Results.Error, Right(ExecuteError("error","bad", List("1")))))
 
+
     mockCommManager = mock[CommManager]
     mockActorLoader = mock[ActorLoader]
     mockMagicLoader = mock[MagicLoader]
 
     kernel = new Kernel(
-      mockConfig, mockActorLoader, mockInterpreter, mockCommManager,
+      mockConfig, mockActorLoader, mockInterpreterManager, mockCommManager,
       mockMagicLoader
     )
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/kernel/src/test/scala/test/utils/DummyInterpreter.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/test/utils/DummyInterpreter.scala b/kernel/src/test/scala/test/utils/DummyInterpreter.scala
index 1ab8cac..ee7c096 100644
--- a/kernel/src/test/scala/test/utils/DummyInterpreter.scala
+++ b/kernel/src/test/scala/test/utils/DummyInterpreter.scala
@@ -102,4 +102,11 @@ class DummyInterpreter(kernel: KernelLike) extends Interpreter {
    * @return The cursor position and list of possible completions
    */
   override def completion(code: String, pos: Int): (Int, List[String]) = ???
+
+  /**
+   * Initializes the interpreter.
+   * @param kernel The kernel
+   * @return The newly initialized interpreter
+   */
+  override def init(kernel: KernelLike): Interpreter = ???
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/kernel/src/test/scala/test/utils/SparkKernelDeployer.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/test/utils/SparkKernelDeployer.scala b/kernel/src/test/scala/test/utils/SparkKernelDeployer.scala
index fe97463..4f0412a 100644
--- a/kernel/src/test/scala/test/utils/SparkKernelDeployer.scala
+++ b/kernel/src/test/scala/test/utils/SparkKernelDeployer.scala
@@ -83,6 +83,7 @@ object SparkKernelDeployer extends LogLike with MockitoSugar {
     }
 
 
+
     /*
     def reallyInitializeSparkContext(
       config: Config,

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/pyspark-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/pyspark/PySparkInterpreter.scala
----------------------------------------------------------------------
diff --git a/pyspark-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/pyspark/PySparkInterpreter.scala b/pyspark-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/pyspark/PySparkInterpreter.scala
index ab1d061..38e1d68 100644
--- a/pyspark-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/pyspark/PySparkInterpreter.scala
+++ b/pyspark-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/pyspark/PySparkInterpreter.scala
@@ -33,12 +33,11 @@ import scala.tools.nsc.interpreter.{InputStream, OutputStream}
  * SPARK_HOME, PYTHONPATH pointing to Spark's Python source, and py4j installed
  * where it is accessible to the Spark Kernel.
  *
- * @param _kernel The kernel API to expose to the PySpark instance
  */
 class PySparkInterpreter(
-  private val _kernel: KernelLike
 ) extends Interpreter {
   private val logger = LoggerFactory.getLogger(this.getClass)
+  private var _kernel:KernelLike = _
 
   // TODO: Replace hard-coded maximum queue count
   /** Represents the state used by this interpreter's Python instance. */
@@ -50,6 +49,7 @@ class PySparkInterpreter(
     _kernel
   )
 
+
   /** Represents the interface for Python to talk to JVM Spark components. */
   private lazy val gatewayServer = new GatewayServer(pySparkBridge, 0)
 
@@ -69,6 +69,21 @@ class PySparkInterpreter(
   private lazy val pySparkTransformer = new PySparkTransformer
 
   /**
+   * Initializes the interpreter.
+   * @param kernel The kernel
+   * @return The newly initialized interpreter
+   */
+  override def init(kernel: KernelLike): Interpreter = {
+    _kernel = kernel
+    this
+  }
+
+
+  override def bindSparkContext(sparkContext: SparkContext) = {
+
+  }
+
+  /**
    * Executes the provided code with the option to silence output.
    * @param code The code to execute
    * @param silent Whether or not to execute the code silently (no output)
@@ -128,7 +143,7 @@ class PySparkInterpreter(
   override def updatePrintStreams(in: InputStream, out: OutputStream, err: OutputStream): Unit = ???
 
   // Unsupported
-  override def classServerURI: String = ???
+  override def classServerURI: String = ""
 
   // Unsupported
   override def interrupt(): Interpreter = ???
@@ -141,4 +156,5 @@ class PySparkInterpreter(
 
   // Unsupported
   override def doQuietly[T](body: => T): T = ???
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/pyspark-interpreter/src/main/scala/com/ibm/spark/magic/builtin/PySpark.scala
----------------------------------------------------------------------
diff --git a/pyspark-interpreter/src/main/scala/com/ibm/spark/magic/builtin/PySpark.scala b/pyspark-interpreter/src/main/scala/com/ibm/spark/magic/builtin/PySpark.scala
index 5fed927..a0a79b5 100644
--- a/pyspark-interpreter/src/main/scala/com/ibm/spark/magic/builtin/PySpark.scala
+++ b/pyspark-interpreter/src/main/scala/com/ibm/spark/magic/builtin/PySpark.scala
@@ -26,7 +26,7 @@ import com.ibm.spark.magic.dependencies.IncludeKernel
  */
 class PySpark extends CellMagic with IncludeKernel {
   override def execute(code: String): CellMagicOutput = {
-    val pySpark = Option(kernel.data.get("PySpark"))
+    val pySpark = kernel.interpreter("PySpark")
 
     if (pySpark.isEmpty || pySpark.get == null)
       throw new PySparkException("PySpark is not available!")

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/resources/compile/reference.conf
----------------------------------------------------------------------
diff --git a/resources/compile/reference.conf b/resources/compile/reference.conf
index c9d12c9..07a49ac 100644
--- a/resources/compile/reference.conf
+++ b/resources/compile/reference.conf
@@ -57,8 +57,5 @@ send_empty_output = ${?SEND_EMPTY_OUTPUT}
 
 jar_dir = ${?JAR_DIR}
 
-default_interpreter = "scala"
+default_interpreter = "Scala"
 default_interpreter = ${?DEFAULT_INTERPRETER}
-
-sparkcontext = "yes"
-

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/resources/test/reference.conf
----------------------------------------------------------------------
diff --git a/resources/test/reference.conf b/resources/test/reference.conf
index d506c8a..407f344 100644
--- a/resources/test/reference.conf
+++ b/resources/test/reference.conf
@@ -55,5 +55,5 @@ max_interpreter_threads = ${?MAX_INTERPRETER_THREADS}
 send_empty_output = false
 send_empty_output = ${?SEND_EMPTY_OUTPUT}
 
-default_interpreter = "scala"
+default_interpreter = "Scala"
 default_interpreter = ${?DEFAULT_INTERPRETER}

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
index f774a5a..bccdf62 100644
--- a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
+++ b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
@@ -27,7 +27,7 @@ import com.ibm.spark.global.StreamState
 import com.ibm.spark.interpreter
 import com.ibm.spark.interpreter._
 import com.ibm.spark.interpreter.imports.printers.{WrapperConsole, WrapperSystem}
-import com.ibm.spark.kernel.api.KernelOptions
+import com.ibm.spark.kernel.api.{KernelLike, KernelOptions}
 import com.ibm.spark.utils.{MultiOutputStream, TaskManager}
 import org.apache.spark.SparkContext
 import org.apache.spark.repl.{SparkIMain, SparkJLineCompletion}
@@ -190,6 +190,17 @@ class ScalaInterpreter(
     )
   }
 
+  override def init(kernel: KernelLike): Interpreter = {
+    doQuietly {
+      bind(
+        "kernel", "com.ibm.spark.kernel.api.Kernel",
+        kernel, List( """@transient implicit""")
+      )
+    }
+
+    this
+  }
+
   override def interrupt(): Interpreter = {
     require(sparkIMain != null && taskManager != null)
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/scala-interpreter/src/main/scala/com/ibm/spark/magic/builtin/Scala.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/main/scala/com/ibm/spark/magic/builtin/Scala.scala b/scala-interpreter/src/main/scala/com/ibm/spark/magic/builtin/Scala.scala
index c739bc6..c850926 100644
--- a/scala-interpreter/src/main/scala/com/ibm/spark/magic/builtin/Scala.scala
+++ b/scala-interpreter/src/main/scala/com/ibm/spark/magic/builtin/Scala.scala
@@ -26,7 +26,7 @@ import com.ibm.spark.magic.{CellMagic, CellMagicOutput}
  */
 class Scala extends CellMagic with IncludeKernel {
   override def execute(code: String): CellMagicOutput = {
-    val scala = Option(kernel.data.get("Scala"))
+    val scala = kernel.interpreter("Scala")
 
     if (scala.isEmpty || scala.get == null)
       throw new ScalaException("Scala is not available!")

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/sparkr-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sparkr/SparkRInterpreter.scala
----------------------------------------------------------------------
diff --git a/sparkr-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sparkr/SparkRInterpreter.scala b/sparkr-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sparkr/SparkRInterpreter.scala
index 0e52f9d..a950da0 100644
--- a/sparkr-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sparkr/SparkRInterpreter.scala
+++ b/sparkr-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sparkr/SparkRInterpreter.scala
@@ -32,12 +32,11 @@ import scala.tools.nsc.interpreter.{InputStream, OutputStream}
  * SPARK_HOME pointing to a binary distribution (needs packaged SparkR library)
  * and an implementation of R on the path.
  *
- * @param _kernel The kernel API to expose to the SparkR instance
  */
 class SparkRInterpreter(
-  private val _kernel: KernelLike
 ) extends Interpreter {
   private val logger = LoggerFactory.getLogger(this.getClass)
+  private var _kernel: KernelLike = _
 
   // TODO: Replace hard-coded maximum queue count
   /** Represents the state used by this interpreter's R instance. */
@@ -67,6 +66,11 @@ class SparkRInterpreter(
   )
   private lazy val sparkRTransformer = new SparkRTransformer
 
+  override def init(kernel: KernelLike): Interpreter = {
+    _kernel = kernel
+    this
+  }
+
   /**
    * Executes the provided code with the option to silence output.
    * @param code The code to execute
@@ -127,7 +131,9 @@ class SparkRInterpreter(
   override def updatePrintStreams(in: InputStream, out: OutputStream, err: OutputStream): Unit = ???
 
   // Unsupported
-  override def classServerURI: String = ???
+  override def classServerURI: String = ""
+
+  override def bindSparkContext(sparkContext: SparkContext): Unit = {}
 
   // Unsupported
   override def interrupt(): Interpreter = ???

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/sparkr-interpreter/src/main/scala/com/ibm/spark/magic/builtin/SparkR.scala
----------------------------------------------------------------------
diff --git a/sparkr-interpreter/src/main/scala/com/ibm/spark/magic/builtin/SparkR.scala b/sparkr-interpreter/src/main/scala/com/ibm/spark/magic/builtin/SparkR.scala
index cbbd8e2..7eba136 100644
--- a/sparkr-interpreter/src/main/scala/com/ibm/spark/magic/builtin/SparkR.scala
+++ b/sparkr-interpreter/src/main/scala/com/ibm/spark/magic/builtin/SparkR.scala
@@ -26,7 +26,7 @@ import com.ibm.spark.magic.dependencies.IncludeKernel
  */
 class SparkR extends CellMagic with IncludeKernel {
   override def execute(code: String): CellMagicOutput = {
-    val sparkR = Option(kernel.data.get("SparkR"))
+    val sparkR = kernel.interpreter("SparkR")
 
     if (sparkR.isEmpty || sparkR.get == null)
       throw new SparkRException("SparkR is not available!")

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala
----------------------------------------------------------------------
diff --git a/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala b/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala
index ad06fbd..76ae262 100644
--- a/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala
+++ b/sql-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/sql/SqlInterpreter.scala
@@ -20,6 +20,7 @@ import java.net.URL
 import com.ibm.spark.interpreter.{ExecuteFailure, ExecuteOutput, Interpreter}
 import com.ibm.spark.interpreter.Results.Result
 import com.ibm.spark.kernel.api.KernelLike
+import org.apache.spark.SparkContext
 import org.apache.spark.sql.SQLContext
 
 import scala.concurrent.duration._
@@ -30,9 +31,15 @@ import scala.tools.nsc.interpreter.{OutputStream, InputStream}
  * Represents an interpreter interface to Spark SQL.
  */
 class SqlInterpreter(private val kernel: KernelLike) extends Interpreter {
-  private lazy val sqlService = new SqlService(kernel)
+  private var _kernel: KernelLike = _
+  private lazy val sqlService = new SqlService(_kernel)
   private lazy val sqlTransformer = new SqlTransformer
 
+  override def init(kernel: KernelLike): Interpreter = {
+    _kernel = kernel
+    this
+  }
+
   /**
    * Executes the provided code with the option to silence output.
    * @param code The code to execute
@@ -93,7 +100,9 @@ class SqlInterpreter(private val kernel: KernelLike) extends Interpreter {
   override def updatePrintStreams(in: InputStream, out: OutputStream, err: OutputStream): Unit = ???
 
   // Unsupported
-  override def classServerURI: String = ???
+  override def classServerURI: String = ""
+
+  override def bindSparkContext(sparkContext: SparkContext): Unit = {}
 
   // Unsupported
   override def interrupt(): Interpreter = ???

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/502372d4/sql-interpreter/src/main/scala/com/ibm/spark/magic/builtin/Sql.scala
----------------------------------------------------------------------
diff --git a/sql-interpreter/src/main/scala/com/ibm/spark/magic/builtin/Sql.scala b/sql-interpreter/src/main/scala/com/ibm/spark/magic/builtin/Sql.scala
index a15817b..a8f439c 100644
--- a/sql-interpreter/src/main/scala/com/ibm/spark/magic/builtin/Sql.scala
+++ b/sql-interpreter/src/main/scala/com/ibm/spark/magic/builtin/Sql.scala
@@ -26,7 +26,7 @@ import com.ibm.spark.magic.dependencies.IncludeKernel
  */
 class Sql extends CellMagic with IncludeKernel {
   override def execute(code: String): CellMagicOutput = {
-    val sparkR = Option(kernel.data.get("SQL"))
+    val sparkR = kernel.interpreter("SQL")
 
     if (sparkR.isEmpty || sparkR.get == null)
       throw new SqlException("SQL is not available!")


[12/50] [abbrv] incubator-toree git commit: Merge pull request #195 from bpburns/plugin

Posted by lr...@apache.org.
Merge pull request #195 from bpburns/plugin

Allow attaching new Interpreters through config

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/45e451a5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/45e451a5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/45e451a5

Branch: refs/heads/master
Commit: 45e451a5691bf7f4a30f69dbbba3e6651006b580
Parents: 9db161f ce10383
Author: Chip Senkbeil <ch...@gmail.com>
Authored: Tue Nov 17 14:25:11 2015 -0600
Committer: Chip Senkbeil <ch...@gmail.com>
Committed: Tue Nov 17 14:25:11 2015 -0600

----------------------------------------------------------------------
 .../com/ibm/spark/interpreter/Interpreter.scala |  13 ++
 .../com/ibm/spark/kernel/api/KernelLike.scala   |   6 +
 .../com/ibm/spark/boot/CommandLineOptions.scala |  23 +-
 .../boot/layer/ComponentInitialization.scala    | 234 ++-----------------
 .../spark/boot/layer/InterpreterManager.scala   |  66 ++++++
 .../scala/com/ibm/spark/kernel/api/Kernel.scala |  50 ++--
 .../ibm/spark/boot/CommandLineOptionsSpec.scala |  16 ++
 .../StandardComponentInitializationSpec.scala   | 114 ---------
 .../com/ibm/spark/kernel/api/KernelSpec.scala   |  10 +-
 .../InterpreterActorSpecForIntegration.scala    |  22 +-
 .../PostProcessorSpecForIntegration.scala       |  21 +-
 .../scala/test/utils/DummyInterpreter.scala     | 112 +++++++++
 .../scala/test/utils/SparkKernelDeployer.scala  |  44 +---
 .../pyspark/PySparkInterpreter.scala            |  22 +-
 .../com/ibm/spark/magic/builtin/PySpark.scala   |   2 +-
 resources/compile/reference.conf                |  11 +-
 resources/test/reference.conf                   |  11 +-
 .../interpreter/scala/ScalaInterpreter.scala    | 120 ++++++++--
 .../com/ibm/spark/magic/builtin/Scala.scala     |   2 +-
 .../scala/ScalaInterpreterSpec.scala            |   5 +-
 .../AddExternalJarMagicSpecForIntegration.scala |  21 +-
 .../interpreter/sparkr/SparkRInterpreter.scala  |  12 +-
 .../com/ibm/spark/magic/builtin/SparkR.scala    |   2 +-
 .../kernel/interpreter/sql/SqlInterpreter.scala |  15 +-
 .../scala/com/ibm/spark/magic/builtin/Sql.scala |   2 +-
 25 files changed, 501 insertions(+), 455 deletions(-)
----------------------------------------------------------------------



[21/50] [abbrv] incubator-toree git commit: Changes to README file.

Posted by lr...@apache.org.
Changes to README file.


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/f8b37cc3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/f8b37cc3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/f8b37cc3

Branch: refs/heads/master
Commit: f8b37cc354b2bca36dcaa889ae6959a84e945236
Parents: be758d8
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Wed Nov 18 17:10:51 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Tue Nov 24 08:49:50 2015 -0600

----------------------------------------------------------------------
 README.md | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/f8b37cc3/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index c9367d2..2115304 100644
--- a/README.md
+++ b/README.md
@@ -23,24 +23,25 @@ A version of the Spark Kernel is deployed as part of the [Try Jupyter!][try-jupy
 
 Develop
 =======
-[Vagrant][vagrant] is used to simplify the development experience. It is the only requirement to be able to build and test the Spark Kernel on your development machine. 
+[Vagrant][vagrant] is used to simplify the development experience. It is the only requirement to be able to build, package and test the Spark Kernel on your development machine. 
 
-To interact with the Spark Kernel using Jupyter, run
+To build and interact with the Spark Kernel using Jupyter, run
 ```
 make dev
 ```
 
 This will start a Jupyter notebook server accessible at `http://192.168.44.44:8888`. From here you can create notebooks that use the Spark Kernel configured for local mode.
 
+Tests can be run by doing `make test`.
 
 Build & Package
 ===============
 To build and package up the Spark Kernel, run
 ```
-make build
+make dist
 ```
 
-The resulting package of the kernel will be located at `./kernel/target/pack`. It contains a `Makefile` that can be used to install the Spark Kernel by running `make install` within the directory. More details about building and packaging can be found [here][4].
+The resulting package of the kernel will be located at `./dist/spark-kernel-<VERSION>.tar.gz`. The uncompressed package is what is used is ran by Jupyter when doing `make dev`.
 
 
 Version


[32/50] [abbrv] incubator-toree git commit: Merge pull request #210 from wangmiao1981/master

Posted by lr...@apache.org.
Merge pull request #210 from wangmiao1981/master

Fixed issue where code evaluated to a string with a newline does not show up

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/ed8b2096
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/ed8b2096
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/ed8b2096

Branch: refs/heads/master
Commit: ed8b2096925bf1e3f91f4eb32a1e59d636ffe7f3
Parents: 09ed0bc 074b15e
Author: Chip Senkbeil <ch...@gmail.com>
Authored: Thu Dec 3 08:35:34 2015 -0600
Committer: Chip Senkbeil <ch...@gmail.com>
Committed: Thu Dec 3 08:35:34 2015 -0600

----------------------------------------------------------------------
 .../ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala  | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------



[28/50] [abbrv] incubator-toree git commit: Register contexts so that they can be closed when sockets close

Posted by lr...@apache.org.
Register contexts so that they can be closed when sockets close

Fixes #194


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/1398a638
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/1398a638
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/1398a638

Branch: refs/heads/master
Commit: 1398a638199cff1fd40b168e4fce11de3e942464
Parents: ecfa5dd
Author: Jakob Odersky <jo...@gmail.com>
Authored: Tue Dec 1 14:56:12 2015 -0800
Committer: Jakob Odersky <jo...@gmail.com>
Committed: Tue Dec 1 14:56:12 2015 -0800

----------------------------------------------------------------------
 .../ibm/spark/communication/SocketManager.scala | 40 +++++++++++++-------
 .../socket/ZeroMQSocketRunnable.scala           | 10 +++--
 2 files changed, 33 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/1398a638/communication/src/main/scala/com/ibm/spark/communication/SocketManager.scala
----------------------------------------------------------------------
diff --git a/communication/src/main/scala/com/ibm/spark/communication/SocketManager.scala b/communication/src/main/scala/com/ibm/spark/communication/SocketManager.scala
index 3973c8d..994360f 100644
--- a/communication/src/main/scala/com/ibm/spark/communication/SocketManager.scala
+++ b/communication/src/main/scala/com/ibm/spark/communication/SocketManager.scala
@@ -38,6 +38,19 @@ class SocketManager {
     new ConcurrentHashMap[SocketLike, ZMQ.Context]().asScala
 
   /**
+   * Provides and registers a new ZMQ context, used for creating a new socket.
+   * @param mkSocket a function that creates a socket using a given context
+   * @return the new socket
+   * @see newZmqContext
+   */
+  private def withNewContext[A <: SocketLike](mkSocket: ZMQ.Context => A): A = {
+    val ctx = newZmqContext()
+    val socket = mkSocket(ctx)
+    socketToContextMap.put(socket, ctx)
+    socket
+  }
+
+  /**
    * Closes the socket provided and also closes the context if no more sockets
    * are using the context.
    *
@@ -45,6 +58,7 @@ class SocketManager {
    */
   def closeSocket(socket: SocketLike) = {
     socket.close()
+
     socketToContextMap.remove(socket).foreach(context => {
       if (!socketToContextMap.values.exists(_ == context)) context.close()
     })
@@ -61,9 +75,9 @@ class SocketManager {
   def newReqSocket(
     address: String,
     inboundMessageCallback: (Seq[String]) => Unit
-  ): SocketLike = {
-    new JeroMQSocket(new ReqSocketRunnable(
-      newZmqContext(),
+  ): SocketLike = withNewContext{ ctx =>
+     new JeroMQSocket(new ReqSocketRunnable(
+      ctx,
       Some(inboundMessageCallback),
       Connect(address),
       Linger(0)
@@ -81,9 +95,9 @@ class SocketManager {
   def newRepSocket(
     address: String,
     inboundMessageCallback: (Seq[String]) => Unit
-  ): SocketLike = {
+  ): SocketLike = withNewContext{ ctx =>
     new JeroMQSocket(new ZeroMQSocketRunnable(
-      newZmqContext(),
+      ctx,
       RepSocket,
       Some(inboundMessageCallback),
       Bind(address),
@@ -100,9 +114,9 @@ class SocketManager {
    */
   def newPubSocket(
     address: String
-  ): SocketLike = {
+  ): SocketLike = withNewContext{ ctx =>
     new JeroMQSocket(new PubSocketRunnable(
-      newZmqContext(),
+      ctx,
       Bind(address),
       Linger(0)
     ))
@@ -119,9 +133,9 @@ class SocketManager {
   def newSubSocket(
     address: String,
     inboundMessageCallback: (Seq[String]) => Unit
-  ): SocketLike = {
+  ): SocketLike = withNewContext { ctx =>
     new JeroMQSocket(new ZeroMQSocketRunnable(
-      newZmqContext(),
+      ctx,
       SubSocket,
       Some(inboundMessageCallback),
       Connect(address),
@@ -141,9 +155,9 @@ class SocketManager {
   def newRouterSocket(
     address: String,
     inboundMessageCallback: (Seq[String]) => Unit
-  ): SocketLike = {
+  ): SocketLike = withNewContext { ctx =>
     new JeroMQSocket(new ZeroMQSocketRunnable(
-      newZmqContext(),
+      ctx,
       RouterSocket,
       Some(inboundMessageCallback),
       Bind(address),
@@ -163,9 +177,9 @@ class SocketManager {
     address: String,
     inboundMessageCallback: (Seq[String]) => Unit,
     identity: String = UUID.randomUUID().toString
-  ): SocketLike = {
+  ): SocketLike = withNewContext{ ctx =>
     new JeroMQSocket(new ZeroMQSocketRunnable(
-      newZmqContext(),
+      ctx,
       DealerSocket,
       Some(inboundMessageCallback),
       Connect(address),

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/1398a638/communication/src/main/scala/com/ibm/spark/communication/socket/ZeroMQSocketRunnable.scala
----------------------------------------------------------------------
diff --git a/communication/src/main/scala/com/ibm/spark/communication/socket/ZeroMQSocketRunnable.scala b/communication/src/main/scala/com/ibm/spark/communication/socket/ZeroMQSocketRunnable.scala
index 0a3c900..6fee716 100644
--- a/communication/src/main/scala/com/ibm/spark/communication/socket/ZeroMQSocketRunnable.scala
+++ b/communication/src/main/scala/com/ibm/spark/communication/socket/ZeroMQSocketRunnable.scala
@@ -152,11 +152,13 @@ class ZeroMQSocketRunnable(
         Thread.sleep(1)
       }
     } catch {
-      case throwable: Throwable =>
-        logger.error("Unexpected exception in 0mq socket runnable!", throwable)
+      case ex: Exception =>
+        logger.error("Unexpected exception in 0mq socket runnable!", ex)
     } finally {
-      Try(socket.close()).failed.foreach {
-        case throwable: Throwable =>
+      try{
+        socket.close()
+      } catch {
+        case ex: Exception =>
           logger.error("Failed to close socket!", _: Throwable)
       }
     }


[43/50] [abbrv] incubator-toree git commit: Removed code that was adding jar to conf... no longer needed

Posted by lr...@apache.org.
Removed code that was adding jar to conf... no longer needed


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/b7f4ed1c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/b7f4ed1c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/b7f4ed1c

Branch: refs/heads/master
Commit: b7f4ed1c1a0feeb46550ca7b62243eaa63f8b87d
Parents: a3c719e
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Tue Dec 8 10:13:58 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Tue Dec 8 10:13:58 2015 -0600

----------------------------------------------------------------------
 .../scala/com/ibm/spark/kernel/api/Kernel.scala | 80 +-------------------
 1 file changed, 2 insertions(+), 78 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/b7f4ed1c/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala b/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
index fe6bc2d..3ff4f85 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
@@ -331,6 +331,8 @@ class Kernel (
     _javaSparkContext = new JavaSparkContext(_sparkContext)
     _sqlContext = new SQLContext(_sparkContext)
 
+    logger.info( s"Connecting to spark.master ${_sparkConf.getOption("spark.master").getOrElse("not_set")}")
+
     updateInterpreterWithSparkContext(sparkContext)
 
     magicLoader.dependencyMap =
@@ -396,84 +398,6 @@ class Kernel (
   ) = {
 
     interpreter.bindSparkContext(sparkContext)
-    /*
-    interpreter.doQuietly {
-      logger.debug("Binding context into interpreter")
-      interpreter.bind(
-        "sc", "org.apache.spark.SparkContext",
-        sparkContext, List( """@transient"""))
-
-      // NOTE: This is needed because interpreter blows up after adding
-      //       dependencies to SparkContext and Interpreter before the
-      //       cluster has been used... not exactly sure why this is the case
-      // TODO: Investigate why the cluster has to be initialized in the kernel
-      //       to avoid the kernel's interpreter blowing up (must be done
-      //       inside the interpreter)
-      logger.debug("Initializing Spark cluster in interpreter")
-
-      interpreter.doQuietly {
-        interpreter.interpret("""
-                                | val $toBeNulled = {
-                                | var $toBeNulled = sc.emptyRDD.collect()
-                                | $toBeNulled = null
-                                |  }
-                                |
-                                |""".stripMargin)
-      }
-    }
-    */
-
-    // Add ourselves as a dependency
-    // TODO: Provide ability to point to library as commandline argument
-    // TODO: Provide better method to determine if can add ourselves
-    // TODO: Avoid duplicating request for master twice (initializeSparkContext
-    //       also does this)
-    val master = sparkContext.getConf.get("spark.master")
-
-    // If in local mode, do not need to add our jars as dependencies
-    if (!master.toLowerCase.startsWith("local")) {
-      @inline def getJarPathFor(klass: Class[_]): String =
-        klass.getProtectionDomain.getCodeSource.getLocation.getPath
-
-      val interpreterC = interpreterManager.interpreters.values.map(_.getClass)
-
-      // TODO: Provide less hard-coded solution in case additional dependencies
-      //       are added or classes are refactored to different projects
-      val classDep = Seq(
-
-        // Macro project
-        classOf[com.ibm.spark.annotations.Experimental],
-
-        // Protocol project
-        classOf[com.ibm.spark.kernel.protocol.v5.KernelMessage],
-
-        // Communication project
-        classOf[com.ibm.spark.communication.SocketManager],
-
-        // Kernel-api project
-        classOf[com.ibm.spark.kernel.api.KernelLike],
-
-        // Scala-interpreter project
-        //classOf[com.ibm.spark.kernel.interpreter.scala.ScalaInterpreter],
-
-        // PySpark-interpreter project
-        //classOf[com.ibm.spark.kernel.interpreter.pyspark.PySparkInterpreter],
-
-        // SparkR-interpreter project
-        //classOf[com.ibm.spark.kernel.interpreter.sparkr.SparkRInterpreter],
-
-        // Kernel project
-        classOf[com.ibm.spark.boot.KernelBootstrap]
-      )
-
-      val jarPaths = (interpreterC ++ classDep).map(getJarPathFor)
-
-      logger.info("Adding kernel jars to cluster:\n- " +
-        jarPaths.mkString("\n- "))
-      jarPaths.foreach(sparkContext.addJar)
-    } else {
-      logger.info("Running in local mode! Not adding self as dependency!")
-    }
   }
 
   override def interpreter(name: String): Option[Interpreter] = {


[48/50] [abbrv] incubator-toree git commit: Upgrade sbt to version 0.13.9

Posted by lr...@apache.org.
Upgrade sbt to version 0.13.9


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/f81c327a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/f81c327a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/f81c327a

Branch: refs/heads/master
Commit: f81c327a524fe965dcadd07d44f4688b245b0248
Parents: 5a163a9
Author: Jakob Odersky <jo...@gmail.com>
Authored: Thu Dec 10 14:37:18 2015 -0800
Committer: Jakob Odersky <jo...@gmail.com>
Committed: Fri Dec 11 13:53:18 2015 -0800

----------------------------------------------------------------------
 Vagrantfile              | 6 +++---
 project/Common.scala     | 3 ---
 project/build.properties | 2 +-
 3 files changed, 4 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/f81c327a/Vagrantfile
----------------------------------------------------------------------
diff --git a/Vagrantfile b/Vagrantfile
index 09132da..a4f4986 100644
--- a/Vagrantfile
+++ b/Vagrantfile
@@ -98,9 +98,9 @@ fi
 
 # If sbt is not installed, install it
 if ! flag_is_set SBT; then
-  wget --progress=bar:force http://dl.bintray.com/sbt/debian/sbt-0.13.7.deb && \
-  dpkg -i sbt-0.13.7.deb && \
-  rm sbt-0.13.7.deb && \
+  wget --progress=bar:force http://dl.bintray.com/sbt/debian/sbt-0.13.9.deb && \
+  dpkg -i sbt-0.13.9.deb && \
+  rm sbt-0.13.9.deb && \
   set_flag SBT
 fi
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/f81c327a/project/Common.scala
----------------------------------------------------------------------
diff --git a/project/Common.scala b/project/Common.scala
index 0c5f6e5..337b620 100644
--- a/project/Common.scala
+++ b/project/Common.scala
@@ -37,8 +37,6 @@ object Common {
     if (snapshot) s"$versionNumber-SNAPSHOT"
     else versionNumber
   private val buildScalaVersion = "2.10.4"
-  private val buildSbtVersion   = "0.13.7"
-
 
 
   // Global dependencies provided to all projects
@@ -82,7 +80,6 @@ object Common {
     organization := buildOrganization,
     version := buildVersion,
     scalaVersion := buildScalaVersion,
-    sbtVersion := buildSbtVersion,
     libraryDependencies ++= buildLibraryDependencies,
     isSnapshot := snapshot,
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/f81c327a/project/build.properties
----------------------------------------------------------------------
diff --git a/project/build.properties b/project/build.properties
index bb200f4..02cb92b 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -1,2 +1,2 @@
-sbt.version=0.13.7
+sbt.version=0.13.9
 


[03/50] [abbrv] incubator-toree git commit: Make sure kernel has default interpreter Add bindSparkContext function to Interpreter trait.

Posted by lr...@apache.org.
Make sure kernel has default interpreter
Add bindSparkContext function to Interpreter trait.


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/d2172c1a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/d2172c1a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/d2172c1a

Branch: refs/heads/master
Commit: d2172c1aa90ffd155ecf882f44abfc1b2aa9ccb0
Parents: 708180a
Author: Brian Burns <bb...@us.ibm.com>
Authored: Wed Nov 4 17:16:18 2015 -0500
Committer: Brian Burns <bb...@us.ibm.com>
Committed: Wed Nov 4 17:16:18 2015 -0500

----------------------------------------------------------------------
 .../com/ibm/spark/interpreter/Interpreter.scala |  4 +++
 .../boot/layer/ComponentInitialization.scala    | 14 ++++++++-
 .../scala/com/ibm/spark/kernel/api/Kernel.scala |  6 +++-
 .../interpreter/scala/ScalaInterpreter.scala    | 30 ++++++++++++++++++++
 4 files changed, 52 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/d2172c1a/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala
----------------------------------------------------------------------
diff --git a/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala b/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala
index c649e2f..24fe9dc 100644
--- a/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala
+++ b/kernel-api/src/main/scala/com/ibm/spark/interpreter/Interpreter.scala
@@ -18,6 +18,8 @@ package com.ibm.spark.interpreter
 
 import java.net.URL
 
+import org.apache.spark.SparkContext
+
 import scala.tools.nsc.interpreter._
 
 trait Interpreter {
@@ -68,6 +70,8 @@ trait Interpreter {
    */
   def doQuietly[T](body: => T): T
 
+  def bindSparkContext(sparkContext: SparkContext): Unit = ???
+
   /**
    * Binds a variable in the interpreter to a value.
    * @param variableName The name to expose the value in the interpreter

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/d2172c1a/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala b/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
index 55b133d..df09028 100644
--- a/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
+++ b/kernel/src/main/scala/com/ibm/spark/boot/layer/ComponentInitialization.scala
@@ -86,8 +86,11 @@ trait StandardComponentInitialization extends ComponentInitialization {
     val dependencyDownloader = initializeDependencyDownloader(config)
     val magicLoader = initializeMagicLoader(
       config, interpreter, dependencyDownloader)
+    //val kernel = initializeKernel(
+    //  config, actorLoader, interpreter, commManager, magicLoader
+    //)
     val kernel = initializeKernel(
-      config, actorLoader, interpreter, commManager, magicLoader
+      config, actorLoader, null, commManager, magicLoader
     )
     val responseMap = initializeResponseMap()
 
@@ -118,6 +121,12 @@ trait StandardComponentInitialization extends ComponentInitialization {
       config.getString("default_interpreter").toLowerCase match {
         case "scala" =>
           logger.info("Using Scala interpreter as default!")
+          interpreter.doQuietly {
+            interpreter.bind(
+              "kernel", "com.ibm.spark.kernel.api.Kernel",
+              kernel, List( """@transient implicit""")
+            )
+          }
           interpreter
         case "pyspark" =>
           logger.info("Using PySpark interpreter as default!")
@@ -135,6 +144,7 @@ trait StandardComponentInitialization extends ComponentInitialization {
           interpreter
       }
 
+    kernel.interpreter = defaultInterpreter
     initializeSparkContext(config, kernel, appName)
 
     (commStorage, commRegistrar, commManager, defaultInterpreter, kernel,
@@ -279,12 +289,14 @@ trait StandardComponentInitialization extends ComponentInitialization {
       commManager,
       magicLoader
     )
+    /*
     interpreter.doQuietly {
       interpreter.bind(
         "kernel", "com.ibm.spark.kernel.api.Kernel",
         kernel, List( """@transient implicit""")
       )
     }
+    */
     magicLoader.dependencyMap.setKernel(kernel)
 
     kernel

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/d2172c1a/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala b/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
index 2090593..58676c3 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/api/Kernel.scala
@@ -54,7 +54,7 @@ import com.ibm.spark.global.ExecuteRequestState
 class Kernel (
   private val config: Config,
   private val actorLoader: ActorLoader,
-  val interpreter: Interpreter,
+  var interpreter: Interpreter,
   val comm: CommManager,
   val magicLoader: MagicLoader
 ) extends KernelLike with LogLike {
@@ -383,6 +383,9 @@ class Kernel (
   protected[kernel] def updateInterpreterWithSparkContext(
     sparkContext: SparkContext
   ) = {
+
+    interpreter.bindSparkContext(sparkContext)
+    /*
     interpreter.doQuietly {
       logger.debug("Binding context into interpreter")
       interpreter.bind(
@@ -407,6 +410,7 @@ class Kernel (
                                 |""".stripMargin)
       }
     }
+    */
 
     // Add ourselves as a dependency
     // TODO: Provide ability to point to library as commandline argument

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/d2172c1a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
----------------------------------------------------------------------
diff --git a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
index a327f68..f774a5a 100644
--- a/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
+++ b/scala-interpreter/src/main/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreter.scala
@@ -24,6 +24,7 @@ import java.util.concurrent.ExecutionException
 import akka.actor.Actor
 import akka.actor.Actor.Receive
 import com.ibm.spark.global.StreamState
+import com.ibm.spark.interpreter
 import com.ibm.spark.interpreter._
 import com.ibm.spark.interpreter.imports.printers.{WrapperConsole, WrapperSystem}
 import com.ibm.spark.kernel.api.KernelOptions
@@ -458,6 +459,35 @@ class ScalaInterpreter(
     sparkIMain.beQuietDuring[T](body)
   }
 
+  override def bindSparkContext(sparkContext: SparkContext) = {
+
+    doQuietly {
+      logger.debug("Binding context into interpreter")
+      bind(
+        "sc", "org.apache.spark.SparkContext",
+        sparkContext, List( """@transient"""))
+
+      // NOTE: This is needed because interpreter blows up after adding
+      //       dependencies to SparkContext and Interpreter before the
+      //       cluster has been used... not exactly sure why this is the case
+      // TODO: Investigate why the cluster has to be initialized in the kernel
+      //       to avoid the kernel's interpreter blowing up (must be done
+      //       inside the interpreter)
+      logger.debug("Initializing Spark cluster in interpreter")
+
+      doQuietly {
+        interpret("""
+                                | val $toBeNulled = {
+                                | var $toBeNulled = sc.emptyRDD.collect()
+                                | $toBeNulled = null
+                                |  }
+                                |
+                                |""".stripMargin)
+      }
+    }
+
+  }
+
   override def bind(
     variableName: String, typeName: String,
     value: Any, modifiers: List[String]


[22/50] [abbrv] incubator-toree git commit: Removed pack and configure versions through make

Posted by lr...@apache.org.
Removed pack and configure versions through make


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/9314c48d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/9314c48d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/9314c48d

Branch: refs/heads/master
Commit: 9314c48da0095e60f3f1afe412ea72d103eb3b84
Parents: 4ebe0b2
Author: Corey Stubbs and Gino Bustelo <pa...@us.ibm.com>
Authored: Wed Nov 18 09:29:09 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Tue Nov 24 08:49:50 2015 -0600

----------------------------------------------------------------------
 Makefile                     |  48 +++++------------
 Vagrantfile                  |   2 +
 kernel-api/build.sbt         |   3 --
 kernel/build.sbt             |   5 --
 kernel/project/plugins.sbt   |   4 --
 project/Build.scala          |  11 ++--
 project/Common.scala         | 109 ++++++++++++++++++--------------------
 project/plugins.sbt          |   7 ---
 protocol/build.sbt           |   3 --
 protocol/project/plugins.sbt |   3 --
 10 files changed, 71 insertions(+), 124 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/9314c48d/Makefile
----------------------------------------------------------------------
diff --git a/Makefile b/Makefile
index e48d473..1a6282b 100644
--- a/Makefile
+++ b/Makefile
@@ -14,50 +14,26 @@
 # limitations under the License.
 #
 
-.PHONY: clean build build-image dev vagrantup
+.PHONY: clean build init dev test
 
-#   Container Properties
-KERNEL_CONTAINER?=spark-kernel
-STDIN_PORT?=48000
-SHELL_PORT?=48001
-IOPUB_PORT?=48002
-CONTROL_PORT?=48003
-HB_PORT?=48004
-IP?=0.0.0.0
-VERSION?=0.1.5-SNAPSHOT
+VERSION?=0.1.5
+IS_SNAPSHOT?=true
+APACHE_SPARK_VERSION?=1.5.1
+APACHE_HADOOP_VERSION?=2.3.0
+
+ENV_OPTS=APACHE_SPARK_VERSION=$(APACHE_SPARK_VERSION) APACHE_HADOOP_VERSION=$(APACHE_HADOOP_VERSION) VERSION=$(VERSION)
 
 clean:
 	vagrant ssh -c "cd /src/spark-kernel/ && sbt clean"
 	@-rm -r dist
 
-build-image: IMAGE_NAME?cloudet/spark-kernel
-build-image: CACHE?=""
-build-image:
-	vagrant ssh -c "cd /src/spark-kernel && docker build $(CACHE) -t $(FULL_IMAGE) ."
-
-run-image: KERNEL_CONTAINER?=spark-kernel
-run-image: STDIN_PORT?=48000
-run-image: SHELL_PORT?=48001
-run-image: IOPUB_PORT?=48002
-run-image: CONTROL_PORT?=48003
-run-image: HB_PORT?=48004
-run-image: IP?=0.0.0.0
-run-image: build-image
-	vagrant ssh -c "docker rm -f $(KERNEL_CONTAINER) || true"
-	vagrant ssh -c "docker run -d \
-											--name=$(KERNEL_CONTAINER) \
-											-e "STDIN_PORT=$(STDIN_PORT)" \
-											-e "SHELL_PORT=$(SHELL_PORT)" \
-											-e "IOPUB_PORT=$(IOPUB_PORT)" \
-											-e "CONTROL_PORT=$(CONTROL_PORT)" \
-											-e "HB_PORT=$(HB_PORT)" -e "IP=$(IP)" \
-											$(FULL_IMAGE)"
-
-vagrantup:
+init:
 	vagrant up
 
 kernel/target/scala-2.10/kernel-assembly-$(VERSION).jar: ${shell find ./*/src/main/**/*}
-	vagrant ssh -c "cd /src/spark-kernel/ && sbt kernel/assembly"
+kernel/target/scala-2.10/kernel-assembly-$(VERSION).jar: ${shell find ./*/build.sbt}
+kernel/target/scala-2.10/kernel-assembly-$(VERSION).jar: project/build.properties project/Build.scala project/Common.scala project/plugins.sbt
+	vagrant ssh -c "cd /src/spark-kernel/ && $(ENV_OPTS) sbt kernel/assembly"
 
 build: kernel/target/scala-2.10/kernel-assembly-$(VERSION).jar
 
@@ -65,7 +41,7 @@ dev: dist
 	vagrant ssh -c "cd ~ && ipython notebook --ip=* --no-browser"
 
 test:
-	vagrant ssh -c "cd /src/spark-kernel/ && sbt compile test"
+	vagrant ssh -c "cd /src/spark-kernel/ && $(ENV_OPTS) sbt compile test"
 
 dist: build
 	@mkdir -p dist/spark-kernel/bin dist/spark-kernel/lib

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/9314c48d/Vagrantfile
----------------------------------------------------------------------
diff --git a/Vagrantfile b/Vagrantfile
index 110befe..47f27b4 100644
--- a/Vagrantfile
+++ b/Vagrantfile
@@ -195,5 +195,7 @@ Vagrant.configure("2") do |config|
   config.vm.provider :virtualbox do |vb|
     vb.customize ["modifyvm", :id, "--memory", "2048"]
     vb.customize ["modifyvm", :id, "--cpus", "2"]
+    vb.name = "spark-kernel-vm"
+
   end
 end

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/9314c48d/kernel-api/build.sbt
----------------------------------------------------------------------
diff --git a/kernel-api/build.sbt b/kernel-api/build.sbt
index 9995e85..11df204 100644
--- a/kernel-api/build.sbt
+++ b/kernel-api/build.sbt
@@ -1,4 +1,3 @@
-import xerial.sbt.Pack._
 import Common._
 /*
  * Copyright 2015 IBM Corp.
@@ -15,8 +14,6 @@ import Common._
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-pack <<= pack dependsOn compile
-
 //
 // SCALA INTERPRETER DEPENDENCIES
 //

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/9314c48d/kernel/build.sbt
----------------------------------------------------------------------
diff --git a/kernel/build.sbt b/kernel/build.sbt
index 2c0d371..7b01dd0 100644
--- a/kernel/build.sbt
+++ b/kernel/build.sbt
@@ -1,5 +1,4 @@
 import Common._
-import xerial.sbt.Pack._
 /*
  * Copyright 2014 IBM Corp.
  *
@@ -15,10 +14,6 @@ import xerial.sbt.Pack._
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-pack <<= pack dependsOn (rebuildIvyXml dependsOn deliverLocal)
-
-packArchive <<= packArchive dependsOn (rebuildIvyXml dependsOn deliverLocal)
-
 //
 // TEST DEPENDENCIES
 //

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/9314c48d/kernel/project/plugins.sbt
----------------------------------------------------------------------
diff --git a/kernel/project/plugins.sbt b/kernel/project/plugins.sbt
index ef39ec4..542f406 100644
--- a/kernel/project/plugins.sbt
+++ b/kernel/project/plugins.sbt
@@ -17,7 +17,3 @@
 logLevel := Level.Warn
 
 resolvers += Classpaths.sbtPluginReleases
-
-// Provides ability to create a pack containing all jars and a script to run
-// them using `sbt pack` or `sbt pack-archive` to generate a *.tar.gz file
-addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.6.1")

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/9314c48d/project/Build.scala
----------------------------------------------------------------------
diff --git a/project/Build.scala b/project/Build.scala
index b0d867e..9116a53 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -25,7 +25,6 @@ import sbtbuildinfo.Plugin._
 import sbtunidoc.Plugin.UnidocKeys._
 import sbtunidoc.Plugin._
 import scoverage.ScoverageSbtPlugin
-import xerial.sbt.Pack._
 import com.typesafe.sbt.SbtGit.{GitKeys => git}
 import sbtassembly.AssemblyKeys._
 
@@ -97,9 +96,7 @@ trait SubProjects extends Settings with TestTasks {
   lazy val kernel = addTestTasksToProject(Project(
     id = "kernel",
     base = file("kernel"),
-    settings = fullSettings ++
-      packSettings ++ Seq(
-        packMain := Map("sparkkernel" -> "com.ibm.spark.SparkKernel"),
+    settings = fullSettings ++ Seq(
         test in assembly := {}
       )
   )) dependsOn(
@@ -168,7 +165,7 @@ trait SubProjects extends Settings with TestTasks {
   lazy val kernel_api = addTestTasksToProject(Project(
     id = "kernel-api",
     base = file("kernel-api"),
-    settings = fullSettings ++ packSettings
+    settings = fullSettings
   )) dependsOn(macros % "test->test;compile->compile")
 
   /**
@@ -182,7 +179,7 @@ trait SubProjects extends Settings with TestTasks {
     sourceGenerators in Compile <+= buildInfo,
     buildInfoKeys ++= Seq[BuildInfoKey](
       version, scalaVersion,
-      "sparkVersion" -> Common.sparkVersion.value,
+      "sparkVersion" -> Common.sparkVersion,
       "buildDate" -> {
         val simpleDateFormat = new SimpleDateFormat("yyyyMMdd-HHmmss")
         val now = Calendar.getInstance.getTime
@@ -212,7 +209,7 @@ trait SubProjects extends Settings with TestTasks {
   lazy val protocol = addTestTasksToProject(Project(
     id = "protocol",
     base = file("protocol"),
-    settings = fullSettings ++ buildInfoSettings ++ buildSettings ++ packSettings
+    settings = fullSettings ++ buildInfoSettings ++ buildSettings
   )) dependsOn(macros % "test->test;compile->compile")
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/9314c48d/project/Common.scala
----------------------------------------------------------------------
diff --git a/project/Common.scala b/project/Common.scala
index f170dea..f61daaf 100644
--- a/project/Common.scala
+++ b/project/Common.scala
@@ -22,6 +22,7 @@ import scala.util.Properties
 
 object Common {
   //  Parameters for publishing to artifact repositories
+  val versionNumber             = Properties.envOrElse("VERSION", "0.0.0-dev")
   val snapshot                  = Properties.envOrElse("IS_SNAPSHOT","true").toBoolean
   val repoPort                  = Properties.envOrElse("REPO_PORT","")
   val repoHost                  = Properties.envOrElse("REPO_HOST","")
@@ -30,7 +31,7 @@ object Common {
   val repoEndpoint              = Properties.envOrElse("REPO_ENDPOINT", if(snapshot) "/nexus/content/repositories/snapshots/" else "/nexus/content/repositories/releases/")
   val repoUrl                   = Properties.envOrElse("REPO_URL", s"http://${repoHost}:${repoPort}${repoEndpoint}")
 
-  private val versionNumber     = "0.1.5"
+
   private val buildOrganization = "com.ibm.spark"
   private val buildVersion      =
     if (snapshot) s"$versionNumber-SNAPSHOT"
@@ -38,7 +39,7 @@ object Common {
   private val buildScalaVersion = "2.10.4"
   private val buildSbtVersion   = "0.13.7"
 
-  lazy val sparkVersion = settingKey[String]("The Apache Spark version to use")
+
 
   // Global dependencies provided to all projects
   private var buildLibraryDependencies = Seq(
@@ -52,65 +53,61 @@ object Common {
     "org.mockito" % "mockito-all" % "1.9.5" % "test"   // MIT
   )
 
-  lazy val hadoopVersion = settingKey[String]("The Apache Hadoop version to use")
-
   // The prefix used for our custom artifact names
   private val artifactPrefix = "ibm-spark"
+  lazy val sparkVersion = {
+    val sparkEnvironmentVariable = "APACHE_SPARK_VERSION"
+    val defaultSparkVersion = "1.5.1"
 
-  val settings: Seq[Def.Setting[_]] = Seq(
-    organization := buildOrganization,
-    version := buildVersion,
-    scalaVersion := buildScalaVersion,
-    sbtVersion := buildSbtVersion,
-    libraryDependencies ++= buildLibraryDependencies,
-    isSnapshot := snapshot,
-    sparkVersion := {
-      val sparkEnvironmentVariable = "APACHE_SPARK_VERSION"
-      val defaultSparkVersion = "1.5.1"
+    val _sparkVersion = Properties.envOrNone(sparkEnvironmentVariable)
 
-      val _sparkVersion = Properties.envOrNone(sparkEnvironmentVariable)
-
-      if (_sparkVersion.isEmpty) {
-        scala.Console.out.println(
-          s"""
-             |[INFO] Using default Apache Spark $defaultSparkVersion!
+    if (_sparkVersion.isEmpty) {
+      scala.Console.out.println(
+        s"""
+           |[INFO] Using default Apache Spark $defaultSparkVersion!
            """.stripMargin.trim.replace('\n', ' '))
-        defaultSparkVersion
-      } else {
-        val version = _sparkVersion.get
-        scala.Console.out.println(
-          s"""
-             |[INFO] Using Apache Spark $version provided from
-             |$sparkEnvironmentVariable!
+      defaultSparkVersion
+    } else {
+      val version = _sparkVersion.get
+      scala.Console.out.println(
+        s"""
+           |[INFO] Using Apache Spark $version provided from
+                                                |$sparkEnvironmentVariable!
            """.stripMargin.trim.replace('\n', ' '))
-        version
-      }
-    },
-    hadoopVersion := {
-      val hadoopEnvironmentVariable = "APACHE_HADOOP_VERSION"
-      val defaultHadoopVersion = "2.3.0"
+      version
+    }
+  }
 
-      val _hadoopVersion = Properties.envOrNone(hadoopEnvironmentVariable)
+  lazy val hadoopVersion = {
+    val hadoopEnvironmentVariable = "APACHE_HADOOP_VERSION"
+    val defaultHadoopVersion = "2.3.0"
 
-      if (_hadoopVersion.isEmpty) {
-        scala.Console.out.println(
-          s"""
-             |[INFO] Using default Apache Hadoop $defaultHadoopVersion!
+    val _hadoopVersion = Properties.envOrNone(hadoopEnvironmentVariable)
+
+    if (_hadoopVersion.isEmpty) {
+      scala.Console.out.println(
+        s"""
+           |[INFO] Using default Apache Hadoop $defaultHadoopVersion!
            """.stripMargin.trim.replace('\n', ' '))
-        defaultHadoopVersion
-      } else {
-        val version = _hadoopVersion.get
-        scala.Console.out.println(
-          s"""
-             |[INFO] Using Apache Hadoop $version provided from
-             |$hadoopEnvironmentVariable!
+      defaultHadoopVersion
+    } else {
+      val version = _hadoopVersion.get
+      scala.Console.out.println(
+        s"""
+           |[INFO] Using Apache Hadoop $version provided from
+                                                 |$hadoopEnvironmentVariable!
            """.stripMargin.trim.replace('\n', ' '))
-        version
-      }
-    },
-
-
+      version
+    }
+  }
 
+  val settings: Seq[Def.Setting[_]] = Seq(
+    organization := buildOrganization,
+    version := buildVersion,
+    scalaVersion := buildScalaVersion,
+    sbtVersion := buildSbtVersion,
+    libraryDependencies ++= buildLibraryDependencies,
+    isSnapshot := snapshot,
 
     scalacOptions in (Compile, doc) ++= Seq(
       // Ignore packages (for Scaladoc) not from our project
@@ -176,13 +173,13 @@ object Common {
       name = "netty"
     )
     ),
-    "org.apache.spark" %% "spark-streaming" % "1.5.1" % "provided",      // Apache v2
-    "org.apache.spark" %% "spark-sql" % "1.5.1" % "provided",            // Apache v2
-    "org.apache.spark" %% "spark-mllib" % "1.5.1" % "provided",          // Apache v2
-    "org.apache.spark" %% "spark-graphx" % "1.5.1" % "provided",         // Apache v2
-    "org.apache.spark" %% "spark-repl" % "1.5.1"  % "provided" excludeAll // Apache v2
+    "org.apache.spark" %% "spark-streaming" % sparkVersion % "provided",
+    "org.apache.spark" %% "spark-sql" % sparkVersion % "provided",
+    "org.apache.spark" %% "spark-mllib" % sparkVersion % "provided",
+    "org.apache.spark" %% "spark-graphx" % sparkVersion % "provided",
+    "org.apache.spark" %% "spark-repl" % sparkVersion  % "provided" excludeAll
       ExclusionRule(organization = "org.apache.hadoop"),
-    "org.apache.hadoop" % "hadoop-client" % "2.3.0" % "provided" excludeAll
+    "org.apache.hadoop" % "hadoop-client" % hadoopVersion % "provided" excludeAll
       ExclusionRule(organization = "javax.servlet"))
 
   // ==========================================================================

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/9314c48d/project/plugins.sbt
----------------------------------------------------------------------
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 491f0ca..7c01ef5 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -29,16 +29,9 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.1")
 // `sbt dependencyTree`; there are other commands provided as well
 addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.7.4")
 
-// Provides ability to create a pack containing all jars and a script to run them
-// using `sbt pack` or `sbt pack-archive` to generate a *.tar.gz file
-addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.6.1")
-
 // Provides abilit to create an uber-jar
 addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.0")
 
-//  Provides the ability to package our project as a docker image
-addSbtPlugin("se.marcuslonnberg" % "sbt-docker" % "0.5.2")
-
 // Provides a generated build info object to sync between build and application
 addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.3.2")
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/9314c48d/protocol/build.sbt
----------------------------------------------------------------------
diff --git a/protocol/build.sbt b/protocol/build.sbt
index b8b0d22..e705600 100644
--- a/protocol/build.sbt
+++ b/protocol/build.sbt
@@ -1,4 +1,3 @@
-import xerial.sbt.Pack._
 /*
  * Copyright 2014 IBM Corp.
  *
@@ -14,8 +13,6 @@ import xerial.sbt.Pack._
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-pack <<= pack dependsOn compile
-
 resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
 
 //

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/9314c48d/protocol/project/plugins.sbt
----------------------------------------------------------------------
diff --git a/protocol/project/plugins.sbt b/protocol/project/plugins.sbt
index 79742c8..7d420a0 100644
--- a/protocol/project/plugins.sbt
+++ b/protocol/project/plugins.sbt
@@ -13,6 +13,3 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-// Provides ability to create a pack containing all jars and a script to run
-// them using `sbt pack` or `sbt pack-archive` to generate a *.tar.gz file
-addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.6.1")
\ No newline at end of file


[42/50] [abbrv] incubator-toree git commit: Merge pull request #227 from lbustelo/MinimizeAssembly

Posted by lr...@apache.org.
Merge pull request #227 from lbustelo/MinimizeAssembly

Removed scala libs from assembly

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/9c8a01fc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/9c8a01fc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/9c8a01fc

Branch: refs/heads/master
Commit: 9c8a01fc59164a804e197809b1c866cc38cf8dee
Parents: a3c719e 37ae175
Author: Chip Senkbeil <ch...@gmail.com>
Authored: Tue Dec 8 08:39:42 2015 -0600
Committer: Chip Senkbeil <ch...@gmail.com>
Committed: Tue Dec 8 08:39:42 2015 -0600

----------------------------------------------------------------------
 kernel/build.sbt | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------



[49/50] [abbrv] incubator-toree git commit: Merge pull request #231 from jodersky/sbt-latest

Posted by lr...@apache.org.
Merge pull request #231 from jodersky/sbt-latest

Upgrade sbt to version 0.13.9

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/042debc2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/042debc2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/042debc2

Branch: refs/heads/master
Commit: 042debc2bd0ce34c677f1928dbc1199b33e1f710
Parents: 5a163a9 f81c327
Author: Chip Senkbeil <ch...@gmail.com>
Authored: Fri Dec 11 17:27:21 2015 -0600
Committer: Chip Senkbeil <ch...@gmail.com>
Committed: Fri Dec 11 17:27:21 2015 -0600

----------------------------------------------------------------------
 Vagrantfile              | 6 +++---
 project/Common.scala     | 3 ---
 project/build.properties | 2 +-
 3 files changed, 4 insertions(+), 7 deletions(-)
----------------------------------------------------------------------



[19/50] [abbrv] incubator-toree git commit: Added guava as a dependency to get it in assembly

Posted by lr...@apache.org.
Added guava as a dependency to get it in assembly


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/dfffa6c0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/dfffa6c0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/dfffa6c0

Branch: refs/heads/master
Commit: dfffa6c0184b0e2b00b6376b941308d1e09d46e2
Parents: e002a1b
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Thu Nov 19 17:40:31 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Tue Nov 24 08:49:50 2015 -0600

----------------------------------------------------------------------
 kernel-api/build.sbt | 35 +----------------------------------
 1 file changed, 1 insertion(+), 34 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/dfffa6c0/kernel-api/build.sbt
----------------------------------------------------------------------
diff --git a/kernel-api/build.sbt b/kernel-api/build.sbt
index 11df204..831bd44 100644
--- a/kernel-api/build.sbt
+++ b/kernel-api/build.sbt
@@ -33,43 +33,10 @@ libraryDependencies ++= Seq(
 )
 
 //
-// SPARK DEPENDENCIES
-//
-// NOTE: Currently, version must match deployed Spark cluster version.
-//
-
-// TODO: Mark these as provided and bring them in via the kernel project
-//       so users wanting to implement a magic do not bring in Spark itself
-//libraryDependencies ++= Seq(
-//  "org.apache.spark" %% "spark-core" % sparkVersion.value excludeAll( // Apache v2
-//    ExclusionRule(organization = "org.apache.hadoop"),
-//
-//    // Exclude netty (org.jboss.netty is for 3.2.2.Final only)
-//    ExclusionRule(
-//      organization = "org.jboss.netty",
-//      name = "netty"
-//    )
-//  ),
-//  "org.apache.spark" %% "spark-streaming" % sparkVersion.value,      // Apache v2
-//  "org.apache.spark" %% "spark-sql" % sparkVersion.value,            // Apache v2
-//  "org.apache.spark" %% "spark-mllib" % sparkVersion.value,          // Apache v2
-//  "org.apache.spark" %% "spark-graphx" % sparkVersion.value,         // Apache v2
-//  "org.apache.spark" %% "spark-repl" % sparkVersion.value excludeAll // Apache v2
-//    ExclusionRule(organization = "org.apache.hadoop")
-//)
-
-//
-// HADOOP DEPENDENCIES
-//
-//libraryDependencies ++= Seq(
-//  "org.apache.hadoop" % "hadoop-client" % hadoopVersion.value excludeAll
-//    ExclusionRule(organization = "javax.servlet")
-//)
-
-//
 // EXECUTION DEPENDENCIES
 //
 libraryDependencies += "org.apache.commons" % "commons-exec" % "1.3"
+libraryDependencies += "com.google.guava" % "guava" % "14.0.1"
 
 //
 // CLI DEPENDENCIES


[29/50] [abbrv] incubator-toree git commit: Merge remote-tracking branch 'upstream/master'

Posted by lr...@apache.org.
Merge remote-tracking branch 'upstream/master'


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/074b15ec
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/074b15ec
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/074b15ec

Branch: refs/heads/master
Commit: 074b15ec1e3cd36b88921df5b3fc3fb3f18e50ad
Parents: 06071e8 ecfa5dd
Author: wm624@hotmail.com <wm...@hotmail.com>
Authored: Wed Dec 2 12:23:02 2015 -0800
Committer: wm624@hotmail.com <wm...@hotmail.com>
Committed: Wed Dec 2 12:23:02 2015 -0800

----------------------------------------------------------------------
 .gitignore                                      |   3 +
 .travis.yml                                     |  16 +-
 Makefile                                        |  76 +++---
 README.md                                       |   9 +-
 Vagrantfile                                     |   7 +-
 etc/bin/spark-kernel                            |  36 +++
 kernel-api/build.sbt                            |  38 +--
 .../com/ibm/spark/interpreter/Interpreter.scala |  13 ++
 .../com/ibm/spark/kernel/api/KernelLike.scala   |   6 +
 kernel/build.sbt                                |   5 -
 kernel/project/plugins.sbt                      |   4 -
 .../com/ibm/spark/boot/CommandLineOptions.scala |  28 ++-
 .../boot/layer/ComponentInitialization.scala    | 234 ++-----------------
 .../spark/boot/layer/InterpreterManager.scala   |  66 ++++++
 .../scala/com/ibm/spark/kernel/api/Kernel.scala |  50 ++--
 .../protocol/v5/handler/CommCloseHandler.scala  |  10 +-
 .../protocol/v5/handler/CommMsgHandler.scala    |  10 +-
 .../protocol/v5/handler/CommOpenHandler.scala   |  10 +-
 .../ibm/spark/boot/CommandLineOptionsSpec.scala |  46 ++--
 .../StandardComponentInitializationSpec.scala   | 114 ---------
 .../com/ibm/spark/kernel/api/KernelSpec.scala   |  10 +-
 .../v5/handler/CommCloseHandlerSpec.scala       |  29 ++-
 .../v5/handler/CommMsgHandlerSpec.scala         |  27 +++
 .../v5/handler/CommOpenHandlerSpec.scala        |  29 +++
 .../InterpreterActorSpecForIntegration.scala    |  22 +-
 .../PostProcessorSpecForIntegration.scala       |  21 +-
 .../scala/test/utils/DummyInterpreter.scala     | 112 +++++++++
 .../scala/test/utils/SparkKernelDeployer.scala  |  44 +---
 project/Build.scala                             |  13 +-
 project/Common.scala                            |  94 ++++----
 project/plugins.sbt                             |   8 +-
 protocol/build.sbt                              |   7 +-
 protocol/project/plugins.sbt                    |   3 -
 .../pyspark/PySparkInterpreter.scala            |  22 +-
 .../com/ibm/spark/magic/builtin/PySpark.scala   |   2 +-
 resources/compile/reference.conf                |  12 +-
 resources/test/reference.conf                   |  11 +-
 .../interpreter/scala/ScalaInterpreter.scala    | 120 ++++++++--
 .../com/ibm/spark/magic/builtin/Scala.scala     |   2 +-
 .../scala/ScalaInterpreterSpec.scala            |   5 +-
 .../AddExternalJarMagicSpecForIntegration.scala |  21 +-
 .../interpreter/sparkr/SparkRInterpreter.scala  |  12 +-
 .../com/ibm/spark/magic/builtin/SparkR.scala    |   2 +-
 .../kernel/interpreter/sql/SqlInterpreter.scala |  15 +-
 .../scala/com/ibm/spark/magic/builtin/Sql.scala |   2 +-
 45 files changed, 753 insertions(+), 673 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/074b15ec/scala-interpreter/src/test/scala/com/ibm/spark/kernel/interpreter/scala/ScalaInterpreterSpec.scala
----------------------------------------------------------------------


[41/50] [abbrv] incubator-toree git commit: Removed scala libs from assembly

Posted by lr...@apache.org.
Removed scala libs from assembly


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/37ae175b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/37ae175b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/37ae175b

Branch: refs/heads/master
Commit: 37ae175b76a8a0bbefa94d8cf28306c0c320159e
Parents: a9de6aa
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Mon Dec 7 09:27:39 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Mon Dec 7 09:27:39 2015 -0600

----------------------------------------------------------------------
 kernel/build.sbt | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/37ae175b/kernel/build.sbt
----------------------------------------------------------------------
diff --git a/kernel/build.sbt b/kernel/build.sbt
index 7b01dd0..733283e 100644
--- a/kernel/build.sbt
+++ b/kernel/build.sbt
@@ -20,6 +20,8 @@ import Common._
 libraryDependencies +=
   "org.spark-project.akka" %% "akka-testkit" % "2.3.4-spark" % "test" // MIT
 
+assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeScala = false)
+
 //
 // CUSTOM TASKS
 //


[40/50] [abbrv] incubator-toree git commit: Merge pull request #226 from ibm-et/FixWorkDirPath

Posted by lr...@apache.org.
Merge pull request #226 from ibm-et/FixWorkDirPath

Fixed work directory for `make dev` in Vagrant mode

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/a3c719e5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/a3c719e5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/a3c719e5

Branch: refs/heads/master
Commit: a3c719e58173dc69eb508b3a431f75474710c554
Parents: a9de6aa 2e20d17
Author: Chip Senkbeil <ch...@gmail.com>
Authored: Fri Dec 4 10:24:19 2015 -0600
Committer: Chip Senkbeil <ch...@gmail.com>
Committed: Fri Dec 4 10:24:19 2015 -0600

----------------------------------------------------------------------
 Makefile | 14 +++++++++++---
 1 file changed, 11 insertions(+), 3 deletions(-)
----------------------------------------------------------------------



[10/50] [abbrv] incubator-toree git commit: Attach parent header to the comm writer for open and close messages.

Posted by lr...@apache.org.
Attach parent header to the comm writer for open and close messages.


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/7c45d156
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/7c45d156
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/7c45d156

Branch: refs/heads/master
Commit: 7c45d1568c1bf0d30f2a858ec6bcf949c78be0b1
Parents: d1f93bb
Author: wellecks <we...@gmail.com>
Authored: Tue Nov 17 12:58:35 2015 -0600
Committer: wellecks <we...@gmail.com>
Committed: Tue Nov 17 12:58:35 2015 -0600

----------------------------------------------------------------------
 .../protocol/v5/handler/CommCloseHandler.scala  | 10 ++++---
 .../protocol/v5/handler/CommOpenHandler.scala   | 10 ++++---
 .../v5/handler/CommCloseHandlerSpec.scala       | 29 +++++++++++++++++++-
 .../v5/handler/CommOpenHandlerSpec.scala        | 29 ++++++++++++++++++++
 4 files changed, 69 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/7c45d156/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommCloseHandler.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommCloseHandler.scala b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommCloseHandler.scala
index 52f657b..2c87dd7 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommCloseHandler.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommCloseHandler.scala
@@ -42,22 +42,24 @@ class CommCloseHandler(
 {
   override def process(kernelMessage: KernelMessage): Future[_] = future {
     logKernelMessageAction("Initiating Comm Close for", kernelMessage)
+
+    val kmBuilder = KMBuilder().withParent(kernelMessage)
+
     Utilities.parseAndHandle(
       kernelMessage.contentString,
       CommClose.commCloseReads,
-      handler = handleCommClose,
+      handler = handleCommClose(kmBuilder),
       errHandler = handleParseError
     )
   }
 
-  private def handleCommClose(commClose: CommClose) = {
+  private def handleCommClose(kmBuilder: KMBuilder)(commClose: CommClose) = {
     val commId = commClose.comm_id
     val data = commClose.data
 
     logger.debug(s"Received comm_close with id '$commId'")
 
-    // TODO: Should we be reusing something from the KernelMessage?
-    val commWriter = new KernelCommWriter(actorLoader, KMBuilder(), commId)
+    val commWriter = new KernelCommWriter(actorLoader, kmBuilder, commId)
 
     commStorage.getCommIdCallbacks(commId) match {
       case None             =>

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/7c45d156/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommOpenHandler.scala
----------------------------------------------------------------------
diff --git a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommOpenHandler.scala b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommOpenHandler.scala
index 5230271..80e2b35 100644
--- a/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommOpenHandler.scala
+++ b/kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/CommOpenHandler.scala
@@ -42,15 +42,18 @@ class CommOpenHandler(
 {
   override def process(kernelMessage: KernelMessage): Future[_] = future {
     logKernelMessageAction("Initiating Comm Open for", kernelMessage)
+
+    val kmBuilder = KMBuilder().withParent(kernelMessage)
+
     Utilities.parseAndHandle(
       kernelMessage.contentString,
       CommOpen.commOpenReads,
-      handler = handleCommOpen,
+      handler = handleCommOpen(kmBuilder),
       errHandler = handleParseError
     )
   }
 
-  private def handleCommOpen(commOpen: CommOpen) = {
+  private def handleCommOpen(kmBuilder: KMBuilder)(commOpen: CommOpen) = {
     val commId = commOpen.comm_id
     val targetName = commOpen.target_name
     val data = commOpen.data
@@ -58,8 +61,7 @@ class CommOpenHandler(
     logger.debug(
       s"Received comm_open for target '$targetName' with id '$commId'")
 
-    // TODO: Should we be reusing something from the KernelMessage?
-    val commWriter = new KernelCommWriter(actorLoader, KMBuilder(), commId)
+    val commWriter = new KernelCommWriter(actorLoader, kmBuilder, commId)
 
     commStorage.getTargetCallbacks(targetName) match {
       case None             =>

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/7c45d156/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommCloseHandlerSpec.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommCloseHandlerSpec.scala b/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommCloseHandlerSpec.scala
index 90d5e15..49582f8 100644
--- a/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommCloseHandlerSpec.scala
+++ b/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommCloseHandlerSpec.scala
@@ -23,7 +23,7 @@ import akka.testkit.{TestProbe, ImplicitSender, TestKit}
 import com.ibm.spark.kernel.protocol.v5
 import com.ibm.spark.kernel.protocol.v5.content.{ClearOutput, CommClose}
 import com.ibm.spark.kernel.protocol.v5.kernel.ActorLoader
-import com.ibm.spark.kernel.protocol.v5.{SystemActorType, KMBuilder}
+import com.ibm.spark.kernel.protocol.v5.{KernelMessage, SystemActorType, KMBuilder}
 import com.ibm.spark.comm.{CommRegistrar, CommWriter, CommCallbacks, CommStorage}
 import org.scalatest.mock.MockitoSugar
 import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers}
@@ -123,6 +123,33 @@ class CommCloseHandlerSpec extends TestKit(
         //       limit? Is there a different logical approach?
         kernelMessageRelayProbe.expectNoMsg(200.milliseconds)
       }
+
+      it("should include the parent's header in the parent header of " +
+        "outgoing messages"){
+
+        // Register a callback that sends a message using the comm writer
+        val closeCallback: CommCallbacks.CloseCallback =
+          new CommCallbacks.CloseCallback() {
+            def apply(v1: CommWriter, v2: v5.UUID, v4: v5.MsgData) =
+              v1.writeMsg(v5.MsgData.Empty)
+          }
+        val callbacks = (new CommCallbacks).addCloseCallback(closeCallback)
+        doReturn(Some(callbacks)).when(spyCommStorage)
+          .getCommIdCallbacks(TestCommId)
+
+        // Send a comm close message
+        val msg = kmBuilder
+          .withHeader(CommClose.toTypeString)
+          .withContentString(CommClose(TestCommId, v5.MsgData.Empty))
+          .build
+        commCloseHandler ! msg
+
+        // Verify that the message sent by the handler has the desired property
+        kernelMessageRelayProbe.fishForMessage(200.milliseconds) {
+          case KernelMessage(_, _, _, parentHeader, _, _) =>
+            parentHeader == msg.header
+        }
+      }
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/7c45d156/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommOpenHandlerSpec.scala
----------------------------------------------------------------------
diff --git a/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommOpenHandlerSpec.scala b/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommOpenHandlerSpec.scala
index afc9f78..64013e9 100644
--- a/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommOpenHandlerSpec.scala
+++ b/kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/handler/CommOpenHandlerSpec.scala
@@ -123,6 +123,35 @@ class CommOpenHandlerSpec extends TestKit(
         //       limit? Is there a different logical approach?
         kernelMessageRelayProbe.expectNoMsg(200.milliseconds)
       }
+
+      it("should include the parent's header in the parent header of " +
+        "outgoing messages"){
+
+        // Register a callback that sends a message using the comm writer
+        val openCallback: CommCallbacks.OpenCallback =
+          new CommCallbacks.OpenCallback() {
+            def apply(v1: CommWriter, v2: v5.UUID, v3: String, v4: v5.MsgData) =
+              v1.writeMsg(MsgData.Empty)
+          }
+        val callbacks = (new CommCallbacks).addOpenCallback(openCallback)
+        doReturn(Some(callbacks)).when(spyCommStorage)
+          .getCommIdCallbacks(TestCommId)
+
+        // Send a comm_open message
+        val msg = kmBuilder
+          .withHeader(CommOpen.toTypeString)
+          .withContentString(
+            CommOpen(TestCommId, TestTargetName, v5.MsgData.Empty)
+          )
+          .build
+        commOpenHandler ! msg
+
+        // Verify that the message sent by the handler has the desired property
+        kernelMessageRelayProbe.fishForMessage(200.milliseconds) {
+          case KernelMessage(_, _, _, parentHeader, _, _) =>
+            parentHeader == msg.header
+        }
+      }
     }
   }
 }


[15/50] [abbrv] incubator-toree git commit: Merge pull request #206 from jodersky/ignore-ensime

Posted by lr...@apache.org.
Merge pull request #206 from jodersky/ignore-ensime

[MINOR] Ignore ensime files

Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/29856a78
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/29856a78
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/29856a78

Branch: refs/heads/master
Commit: 29856a785affeee02db1e5d2db19231faf2e7956
Parents: e6e278d b87812c
Author: Chip Senkbeil <ch...@gmail.com>
Authored: Tue Nov 17 21:26:28 2015 -0600
Committer: Chip Senkbeil <ch...@gmail.com>
Committed: Tue Nov 17 21:26:28 2015 -0600

----------------------------------------------------------------------
 .gitignore | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------



[37/50] [abbrv] incubator-toree git commit: Fixed hardcoded 1.5.1 version on dependencies

Posted by lr...@apache.org.
Fixed hardcoded 1.5.1 version on dependencies


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/f207e2fe
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/f207e2fe
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/f207e2fe

Branch: refs/heads/master
Commit: f207e2fec62e0490fa94c009ba183e8ca9b81701
Parents: a4f4bea
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Thu Dec 3 13:23:49 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Thu Dec 3 13:23:49 2015 -0600

----------------------------------------------------------------------
 project/Common.scala | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/f207e2fe/project/Common.scala
----------------------------------------------------------------------
diff --git a/project/Common.scala b/project/Common.scala
index cef0d45..0c5f6e5 100644
--- a/project/Common.scala
+++ b/project/Common.scala
@@ -141,7 +141,7 @@ object Common {
   ) ++ rebuildIvyXmlSettings // Include our rebuild ivy xml settings
 
 
-  buildLibraryDependencies ++= Seq( "org.apache.spark" %% "spark-core" % "1.5.1"  % "provided" excludeAll( // Apache v2
+  buildLibraryDependencies ++= Seq( "org.apache.spark" %% "spark-core" % sparkVersion  % "provided" excludeAll( // Apache v2
 
     // Exclude netty (org.jboss.netty is for 3.2.2.Final only)
     ExclusionRule(


[24/50] [abbrv] incubator-toree git commit: `make dist` creates a version file and targz package

Posted by lr...@apache.org.
`make dist` creates a version file and targz package


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/5ccdfeb8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/5ccdfeb8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/5ccdfeb8

Branch: refs/heads/master
Commit: 5ccdfeb85abe3328fcfd11fab93e1bfc9e204a74
Parents: 9314c48
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Wed Nov 18 11:58:23 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Tue Nov 24 08:49:50 2015 -0600

----------------------------------------------------------------------
 Makefile | 24 ++++++++++++++++--------
 1 file changed, 16 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/5ccdfeb8/Makefile
----------------------------------------------------------------------
diff --git a/Makefile b/Makefile
index 1a6282b..e4f17f7 100644
--- a/Makefile
+++ b/Makefile
@@ -14,14 +14,17 @@
 # limitations under the License.
 #
 
-.PHONY: clean build init dev test
+.PHONY: clean build init dev test test-travis
 
 VERSION?=0.1.5
 IS_SNAPSHOT?=true
 APACHE_SPARK_VERSION?=1.5.1
 APACHE_HADOOP_VERSION?=2.3.0
 
-ENV_OPTS=APACHE_SPARK_VERSION=$(APACHE_SPARK_VERSION) APACHE_HADOOP_VERSION=$(APACHE_HADOOP_VERSION) VERSION=$(VERSION)
+ENV_OPTS=APACHE_SPARK_VERSION=$(APACHE_SPARK_VERSION) APACHE_HADOOP_VERSION=$(APACHE_HADOOP_VERSION) VERSION=$(VERSION) IS_SNAPSHOT=$(IS_SNAPSHOT)
+
+FULL_VERSION=$(shell echo $(VERSION)`[ "$(IS_SNAPSHOT)" == "true" ] && (echo '-SNAPSHOT')` )
+ASSEMBLY_JAR=$(shell echo kernel-assembly-$(FULL_VERSION).jar )
 
 clean:
 	vagrant ssh -c "cd /src/spark-kernel/ && sbt clean"
@@ -30,12 +33,12 @@ clean:
 init:
 	vagrant up
 
-kernel/target/scala-2.10/kernel-assembly-$(VERSION).jar: ${shell find ./*/src/main/**/*}
-kernel/target/scala-2.10/kernel-assembly-$(VERSION).jar: ${shell find ./*/build.sbt}
-kernel/target/scala-2.10/kernel-assembly-$(VERSION).jar: project/build.properties project/Build.scala project/Common.scala project/plugins.sbt
+kernel/target/scala-2.10/$(ASSEMBLY_JAR): ${shell find ./*/src/main/**/*}
+kernel/target/scala-2.10/$(ASSEMBLY_JAR): ${shell find ./*/build.sbt}
+kernel/target/scala-2.10/$(ASSEMBLY_JAR): project/build.properties project/Build.scala project/Common.scala project/plugins.sbt
 	vagrant ssh -c "cd /src/spark-kernel/ && $(ENV_OPTS) sbt kernel/assembly"
 
-build: kernel/target/scala-2.10/kernel-assembly-$(VERSION).jar
+build: kernel/target/scala-2.10/$(ASSEMBLY_JAR)
 
 dev: dist
 	vagrant ssh -c "cd ~ && ipython notebook --ip=* --no-browser"
@@ -43,7 +46,12 @@ dev: dist
 test:
 	vagrant ssh -c "cd /src/spark-kernel/ && $(ENV_OPTS) sbt compile test"
 
-dist: build
+dist: COMMIT=$(shell git rev-parse --short=12 --verify HEAD)
+dist: VERSION_FILE=dist/spark-kernel/VERSION
+dist: kernel/target/scala-2.10/$(ASSEMBLY_JAR)
 	@mkdir -p dist/spark-kernel/bin dist/spark-kernel/lib
 	@cp -r etc/bin/* dist/spark-kernel/bin/.
-	@cp kernel/target/scala-2.10/kernel-assembly-*.jar dist/spark-kernel/lib/.
\ No newline at end of file
+	@cp kernel/target/scala-2.10/$(ASSEMBLY_JAR) dist/spark-kernel/lib/.
+	@echo "VERSION: $(FULL_VERSION)" > $(VERSION_FILE)
+	@echo "COMMIT: $(COMMIT)" >> $(VERSION_FILE)
+	@cd dist; tar -cvzf spark-kernel-$(FULL_VERSION).tar.gz spark-kernel


[14/50] [abbrv] incubator-toree git commit: Ignore ensime files.

Posted by lr...@apache.org.
Ignore ensime files.


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/b87812c8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/b87812c8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/b87812c8

Branch: refs/heads/master
Commit: b87812c89c5ebb2e7b0cdf4593a6d65fac99a7b7
Parents: 9db161f
Author: Jakob Odersky <jo...@gmail.com>
Authored: Tue Nov 17 14:59:05 2015 -0800
Committer: Jakob Odersky <jo...@gmail.com>
Committed: Tue Nov 17 17:12:36 2015 -0800

----------------------------------------------------------------------
 .gitignore | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/b87812c8/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index f7667bc..310328c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,3 +12,5 @@ scratch/
 test-output/
 out/
 
+.ensime
+.ensime_cache/


[18/50] [abbrv] incubator-toree git commit: Switching to use spark-submit and assembly to run kernel

Posted by lr...@apache.org.
Switching to use spark-submit and assembly to run kernel


Project: http://git-wip-us.apache.org/repos/asf/incubator-toree/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-toree/commit/ef77e3f2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-toree/tree/ef77e3f2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-toree/diff/ef77e3f2

Branch: refs/heads/master
Commit: ef77e3f2eb6102df29f4333ee937ef6aebffe6c0
Parents: 6f46c20
Author: Gino Bustelo <pa...@us.ibm.com>
Authored: Wed Nov 11 17:28:08 2015 -0600
Committer: Gino Bustelo <pa...@us.ibm.com>
Committed: Tue Nov 24 08:49:50 2015 -0600

----------------------------------------------------------------------
 .gitignore           |  1 +
 Makefile             | 22 ++++++++++++++--------
 Vagrantfile          |  5 +++--
 etc/bin/spark-kernel | 36 ++++++++++++++++++++++++++++++++++++
 kernel-api/build.sbt | 42 +++++++++++++++++++++---------------------
 project/Build.scala  |  4 +++-
 project/Common.scala | 29 ++++++++++++++++++++++++++---
 project/plugins.sbt  |  3 +++
 protocol/build.sbt   |  4 +++-
 9 files changed, 110 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/ef77e3f2/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index 310328c..12db42c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,6 +11,7 @@ scratch/
 **/*ivy.xml
 test-output/
 out/
+dist/
 
 .ensime
 .ensime_cache/

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/ef77e3f2/Makefile
----------------------------------------------------------------------
diff --git a/Makefile b/Makefile
index e41dbcd..e48d473 100644
--- a/Makefile
+++ b/Makefile
@@ -24,13 +24,11 @@ IOPUB_PORT?=48002
 CONTROL_PORT?=48003
 HB_PORT?=48004
 IP?=0.0.0.0
+VERSION?=0.1.5-SNAPSHOT
 
 clean:
 	vagrant ssh -c "cd /src/spark-kernel/ && sbt clean"
-
-kernel/target/pack/bin/sparkkernel: vagrantup ${shell find ./*/src/main/**/*}
-	vagrant ssh -c "cd /src/spark-kernel/ && sbt compile && sbt pack"
-	vagrant ssh -c "cd /src/spark-kernel/kernel/target/pack && make install"
+	@-rm -r dist
 
 build-image: IMAGE_NAME?cloudet/spark-kernel
 build-image: CACHE?=""
@@ -58,10 +56,18 @@ run-image: build-image
 vagrantup:
 	vagrant up
 
-build: kernel/target/pack/bin/sparkkernel
+kernel/target/scala-2.10/kernel-assembly-$(VERSION).jar: ${shell find ./*/src/main/**/*}
+	vagrant ssh -c "cd /src/spark-kernel/ && sbt kernel/assembly"
+
+build: kernel/target/scala-2.10/kernel-assembly-$(VERSION).jar
 
-dev: build
+dev: dist
 	vagrant ssh -c "cd ~ && ipython notebook --ip=* --no-browser"
 
-test: build
-	vagrant ssh -c "cd /src/spark-kernel/ && sbt test"
\ No newline at end of file
+test:
+	vagrant ssh -c "cd /src/spark-kernel/ && sbt compile test"
+
+dist: build
+	@mkdir -p dist/spark-kernel/bin dist/spark-kernel/lib
+	@cp -r etc/bin/* dist/spark-kernel/bin/.
+	@cp kernel/target/scala-2.10/kernel-assembly-*.jar dist/spark-kernel/lib/.
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/ef77e3f2/Vagrantfile
----------------------------------------------------------------------
diff --git a/Vagrantfile b/Vagrantfile
index 9ff2207..110befe 100644
--- a/Vagrantfile
+++ b/Vagrantfile
@@ -111,6 +111,7 @@ if ! flag_is_set SPARK; then
   wget http://apache.arvixe.com/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop2.3.tgz && \
   tar xvzf spark-${SPARK_VERSION}-bin-hadoop2.3.tgz && \
   ln -s spark-${SPARK_VERSION}-bin-hadoop2.3 spark && \
+  export SPARK_HOME=/opt/spark && \
   set_flag SPARK
 fi
 
@@ -122,13 +123,13 @@ cat << EOF > /home/vagrant/.ipython/kernels/spark/kernel.json
     "display_name": "Spark 1.5.1 (Scala 2.10.4)",
     "language_info": { "name": "scala" },
     "argv": [
-        "/home/vagrant/local/bin/sparkkernel",
+        "/src/spark-kernel/dist/spark-kernel/bin/spark-kernel",
         "--profile",
         "{connection_file}"
     ],
     "codemirror_mode": "scala",
     "env": {
-        "JVM_OPT": "-Xms1024M -Xmx4096M -Dlog4j.logLevel=trace",
+        "SPARK_OPTS": "--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=trace"
         "MAX_INTERPRETER_THREADS": "16",
         "SPARK_CONFIGURATION": "spark.cores.max=4",
         "CAPTURE_STANDARD_OUT": "true",

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/ef77e3f2/etc/bin/spark-kernel
----------------------------------------------------------------------
diff --git a/etc/bin/spark-kernel b/etc/bin/spark-kernel
new file mode 100755
index 0000000..18cce1d
--- /dev/null
+++ b/etc/bin/spark-kernel
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+
+#
+# Copyright 2015 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+                                           ``
+PROG_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+
+if [ -z "$SPARK_HOME" ]; then
+  echo "SPARK_HOME must be set to the location of a Spark distribution!"
+  exit 1
+fi
+
+echo "Starting Spark Kernel with SPARK_HOME=$SPARK_HOME"
+
+KERNEL_ASSEMBLY=`(cd ${PROG_HOME}/lib; ls -1 kernel-assembly-*.jar;)`
+
+# disable randomized hash for string in Python 3.3+
+export PYTHONHASHSEED=0
+
+exec "$SPARK_HOME"/bin/spark-submit \
+  ${SPARK_OPTS} \
+  --driver-class-path $PROG_HOME/lib/${KERNEL_ASSEMBLY} \
+  --class com.ibm.spark.SparkKernel $PROG_HOME/lib/${KERNEL_ASSEMBLY} "$@"

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/ef77e3f2/kernel-api/build.sbt
----------------------------------------------------------------------
diff --git a/kernel-api/build.sbt b/kernel-api/build.sbt
index ca2ba01..9995e85 100644
--- a/kernel-api/build.sbt
+++ b/kernel-api/build.sbt
@@ -43,31 +43,31 @@ libraryDependencies ++= Seq(
 
 // TODO: Mark these as provided and bring them in via the kernel project
 //       so users wanting to implement a magic do not bring in Spark itself
-libraryDependencies ++= Seq(
-  "org.apache.spark" %% "spark-core" % sparkVersion.value excludeAll( // Apache v2
-    ExclusionRule(organization = "org.apache.hadoop"),
-
-    // Exclude netty (org.jboss.netty is for 3.2.2.Final only)
-    ExclusionRule(
-      organization = "org.jboss.netty",
-      name = "netty"
-    )
-  ),
-  "org.apache.spark" %% "spark-streaming" % sparkVersion.value,      // Apache v2
-  "org.apache.spark" %% "spark-sql" % sparkVersion.value,            // Apache v2
-  "org.apache.spark" %% "spark-mllib" % sparkVersion.value,          // Apache v2
-  "org.apache.spark" %% "spark-graphx" % sparkVersion.value,         // Apache v2
-  "org.apache.spark" %% "spark-repl" % sparkVersion.value excludeAll // Apache v2
-    ExclusionRule(organization = "org.apache.hadoop")
-)
+//libraryDependencies ++= Seq(
+//  "org.apache.spark" %% "spark-core" % sparkVersion.value excludeAll( // Apache v2
+//    ExclusionRule(organization = "org.apache.hadoop"),
+//
+//    // Exclude netty (org.jboss.netty is for 3.2.2.Final only)
+//    ExclusionRule(
+//      organization = "org.jboss.netty",
+//      name = "netty"
+//    )
+//  ),
+//  "org.apache.spark" %% "spark-streaming" % sparkVersion.value,      // Apache v2
+//  "org.apache.spark" %% "spark-sql" % sparkVersion.value,            // Apache v2
+//  "org.apache.spark" %% "spark-mllib" % sparkVersion.value,          // Apache v2
+//  "org.apache.spark" %% "spark-graphx" % sparkVersion.value,         // Apache v2
+//  "org.apache.spark" %% "spark-repl" % sparkVersion.value excludeAll // Apache v2
+//    ExclusionRule(organization = "org.apache.hadoop")
+//)
 
 //
 // HADOOP DEPENDENCIES
 //
-libraryDependencies ++= Seq(
-  "org.apache.hadoop" % "hadoop-client" % hadoopVersion.value excludeAll
-    ExclusionRule(organization = "javax.servlet")
-)
+//libraryDependencies ++= Seq(
+//  "org.apache.hadoop" % "hadoop-client" % hadoopVersion.value excludeAll
+//    ExclusionRule(organization = "javax.servlet")
+//)
 
 //
 // EXECUTION DEPENDENCIES

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/ef77e3f2/project/Build.scala
----------------------------------------------------------------------
diff --git a/project/Build.scala b/project/Build.scala
index fc8dc19..b0d867e 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -27,6 +27,7 @@ import sbtunidoc.Plugin._
 import scoverage.ScoverageSbtPlugin
 import xerial.sbt.Pack._
 import com.typesafe.sbt.SbtGit.{GitKeys => git}
+import sbtassembly.AssemblyKeys._
 
 object Build extends Build with Settings with SubProjects with TestTasks {
   /**
@@ -98,7 +99,8 @@ trait SubProjects extends Settings with TestTasks {
     base = file("kernel"),
     settings = fullSettings ++
       packSettings ++ Seq(
-        packMain := Map("sparkkernel" -> "com.ibm.spark.SparkKernel")
+        packMain := Map("sparkkernel" -> "com.ibm.spark.SparkKernel"),
+        test in assembly := {}
       )
   )) dependsOn(
     macros % "test->test;compile->compile",

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/ef77e3f2/project/Common.scala
----------------------------------------------------------------------
diff --git a/project/Common.scala b/project/Common.scala
index 73e5395..f170dea 100644
--- a/project/Common.scala
+++ b/project/Common.scala
@@ -38,8 +38,11 @@ object Common {
   private val buildScalaVersion = "2.10.4"
   private val buildSbtVersion   = "0.13.7"
 
+  lazy val sparkVersion = settingKey[String]("The Apache Spark version to use")
+
   // Global dependencies provided to all projects
-  private val buildLibraryDependencies = Seq(
+  private var buildLibraryDependencies = Seq(
+
     // Needed to force consistent typesafe config with play json and spark
     "com.typesafe" % "config" % "1.2.1",
     "org.slf4j" % "slf4j-log4j12" % "1.7.5" % "test",
@@ -49,8 +52,6 @@ object Common {
     "org.mockito" % "mockito-all" % "1.9.5" % "test"   // MIT
   )
 
-  lazy val sparkVersion = settingKey[String]("The Apache Spark version to use")
-
   lazy val hadoopVersion = settingKey[String]("The Apache Hadoop version to use")
 
   // The prefix used for our custom artifact names
@@ -108,6 +109,9 @@ object Common {
       }
     },
 
+
+
+
     scalacOptions in (Compile, doc) ++= Seq(
       // Ignore packages (for Scaladoc) not from our project
       "-skip-packages", Seq(
@@ -162,6 +166,25 @@ object Common {
     compile <<= (compile in Compile) dependsOn (rebuildIvyXml dependsOn deliverLocal)
   ) ++ rebuildIvyXmlSettings // Include our rebuild ivy xml settings
 
+
+  buildLibraryDependencies ++= Seq( "org.apache.spark" %% "spark-core" % "1.5.1"  % "provided" excludeAll( // Apache v2
+    ExclusionRule(organization = "org.apache.hadoop"),
+
+    // Exclude netty (org.jboss.netty is for 3.2.2.Final only)
+    ExclusionRule(
+      organization = "org.jboss.netty",
+      name = "netty"
+    )
+    ),
+    "org.apache.spark" %% "spark-streaming" % "1.5.1" % "provided",      // Apache v2
+    "org.apache.spark" %% "spark-sql" % "1.5.1" % "provided",            // Apache v2
+    "org.apache.spark" %% "spark-mllib" % "1.5.1" % "provided",          // Apache v2
+    "org.apache.spark" %% "spark-graphx" % "1.5.1" % "provided",         // Apache v2
+    "org.apache.spark" %% "spark-repl" % "1.5.1"  % "provided" excludeAll // Apache v2
+      ExclusionRule(organization = "org.apache.hadoop"),
+    "org.apache.hadoop" % "hadoop-client" % "2.3.0" % "provided" excludeAll
+      ExclusionRule(organization = "javax.servlet"))
+
   // ==========================================================================
   // = REBUILD IVY XML SETTINGS BELOW
   // ==========================================================================

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/ef77e3f2/project/plugins.sbt
----------------------------------------------------------------------
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 4efa32c..491f0ca 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -33,6 +33,9 @@ addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.7.4")
 // using `sbt pack` or `sbt pack-archive` to generate a *.tar.gz file
 addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.6.1")
 
+// Provides abilit to create an uber-jar
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.0")
+
 //  Provides the ability to package our project as a docker image
 addSbtPlugin("se.marcuslonnberg" % "sbt-docker" % "0.5.2")
 

http://git-wip-us.apache.org/repos/asf/incubator-toree/blob/ef77e3f2/protocol/build.sbt
----------------------------------------------------------------------
diff --git a/protocol/build.sbt b/protocol/build.sbt
index 36f655a..b8b0d22 100644
--- a/protocol/build.sbt
+++ b/protocol/build.sbt
@@ -22,7 +22,9 @@ resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/release
 // JSON DEPENDENCIES
 //
 libraryDependencies ++= Seq(
-  "com.typesafe.play" %% "play-json" % "2.3.6", // Apache v2
+  "com.typesafe.play" %% "play-json" % "2.3.6" excludeAll( // Apache v2
+      ExclusionRule(organization = "com.fasterxml.jackson.core")
+    ),
   "org.slf4j" % "slf4j-api" % "1.7.5" // MIT
 )