You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@predictionio.apache.org by do...@apache.org on 2017/02/26 22:33:55 UTC

incubator-predictionio git commit: Submit JDBC driver JARs using --jars

Repository: incubator-predictionio
Updated Branches:
  refs/heads/ds/docker b6214b46e -> b9a076b4b


Submit JDBC driver JARs using --jars


Project: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/commit/b9a076b4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/tree/b9a076b4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/diff/b9a076b4

Branch: refs/heads/ds/docker
Commit: b9a076b4b7129ac37e5cbda0f1a6580f68f1c7fb
Parents: b6214b4
Author: Donald Szeto <do...@apache.org>
Authored: Sun Feb 26 14:33:22 2017 -0800
Committer: Donald Szeto <do...@apache.org>
Committed: Sun Feb 26 14:33:22 2017 -0800

----------------------------------------------------------------------
 .../predictionio/workflow/WorkflowUtils.scala   | 46 +++++++++++---------
 tests/Dockerfile                                |  2 +-
 .../org/apache/predictionio/tools/Runner.scala  | 14 +++---
 3 files changed, 33 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/b9a076b4/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala b/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala
index 93a676a..0e578be 100644
--- a/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala
+++ b/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala
@@ -19,28 +19,17 @@
 package org.apache.predictionio.workflow
 
 import java.io.File
-import java.io.FileNotFoundException
-
-import org.apache.predictionio.controller.EmptyParams
-import org.apache.predictionio.controller.EngineFactory
-import org.apache.predictionio.controller.EngineParamsGenerator
-import org.apache.predictionio.controller.Evaluation
-import org.apache.predictionio.controller.Params
-import org.apache.predictionio.controller.PersistentModelLoader
-import org.apache.predictionio.controller.Utils
-import org.apache.predictionio.core.BuildInfo
-
-import com.google.gson.Gson
-import com.google.gson.JsonSyntaxException
+import java.net.URI
+
+import com.google.gson.{Gson, JsonSyntaxException}
 import grizzled.slf4j.Logging
+import org.apache.log4j.{Level, LogManager}
+import org.apache.predictionio.controller._
 import org.apache.predictionio.workflow.JsonExtractorOption.JsonExtractorOption
-import org.apache.log4j.Level
-import org.apache.log4j.LogManager
 import org.apache.spark.SparkContext
 import org.apache.spark.api.java.JavaRDDLike
 import org.apache.spark.rdd.RDD
 import org.json4s.JsonAST.JValue
-import org.json4s.MappingException
 import org.json4s._
 import org.json4s.native.JsonMethods._
 
@@ -244,12 +233,12 @@ object WorkflowUtils extends Logging {
       "HADOOP_CONF_DIR" -> "core-site.xml",
       "HBASE_CONF_DIR" -> "hbase-site.xml")
 
-    thirdPartyFiles.keys.toSeq.map { k: String =>
+    thirdPartyFiles.keys.toSeq.flatMap { k: String =>
       sys.env.get(k) map { x =>
         val p = Seq(x, thirdPartyFiles(k)).mkString(File.separator)
         if (new File(p).exists) Seq(p) else Seq[String]()
       } getOrElse Seq[String]()
-    }.flatten
+    }
   }
 
   def thirdPartyClasspaths: Seq[String] = {
@@ -260,9 +249,26 @@ object WorkflowUtils extends Logging {
       "MYSQL_JDBC_DRIVER",
       "HADOOP_CONF_DIR",
       "HBASE_CONF_DIR")
-    thirdPartyPaths.map(p =>
+    thirdPartyPaths.flatMap(p =>
       sys.env.get(p).map(Seq(_)).getOrElse(Seq[String]())
-    ).flatten
+    )
+  }
+
+  def thirdPartyJars: Seq[URI] = {
+    val thirdPartyPaths = Seq(
+      "POSTGRES_JDBC_DRIVER",
+      "MYSQL_JDBC_DRIVER")
+    thirdPartyPaths.flatMap(p =>
+      sys.env.get(p) map { f =>
+        val file = new File(f)
+        if (file.exists()) {
+          Seq(file.toURI)
+        } else {
+          warn(s"Environment variable $p is pointing to a nonexistent file $f. Ignoring.")
+          Seq.empty[URI]
+        }
+      } getOrElse Seq.empty[URI]
+    )
   }
 
   def modifyLogging(verbose: Boolean): Unit = {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/b9a076b4/tests/Dockerfile
----------------------------------------------------------------------
diff --git a/tests/Dockerfile b/tests/Dockerfile
index aef2229..dff4fa4 100644
--- a/tests/Dockerfile
+++ b/tests/Dockerfile
@@ -24,7 +24,7 @@ ENV HBASE_VERSION 1.0.0
 ADD docker-files/spark-${SPARK_VERSION}-bin-hadoop2.6.tgz /vendors
 ENV SPARK_HOME /vendors/spark-${SPARK_VERSION}-bin-hadoop2.6
 
-COPY docker-files/postgresql-9.4-1204.jdbc41.jar /drivers
+COPY docker-files/postgresql-9.4-1204.jdbc41.jar /drivers/postgresql-9.4-1204.jdbc41.jar
 COPY docker-files/init.sh init.sh
 COPY docker-files/env-conf/hbase-site.xml ${PIO_HOME}/conf/hbase-site.xml
 COPY docker-files/env-conf/pio-env.sh ${PIO_HOME}/conf/pio-env.sh

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/b9a076b4/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala b/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala
index d9752df..f98dda1 100644
--- a/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala
+++ b/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala
@@ -15,18 +15,15 @@
  * limitations under the License.
  */
 
-
 package org.apache.predictionio.tools
 
 import java.io.File
 import java.net.URI
 
-import org.apache.predictionio.tools.console.ConsoleArgs
-import org.apache.predictionio.workflow.WorkflowUtils
-import org.apache.predictionio.tools.ReturnTypes._
 import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.FileSystem
-import org.apache.hadoop.fs.Path
+import org.apache.hadoop.fs.{FileSystem, Path}
+import org.apache.predictionio.tools.ReturnTypes._
+import org.apache.predictionio.workflow.WorkflowUtils
 
 import scala.sys.process._
 
@@ -163,8 +160,9 @@ object Runner extends EitherLogging {
     val sparkSubmitCommand =
       Seq(Seq(sparkHome, "bin", "spark-submit").mkString(File.separator))
 
-    val sparkSubmitJars = if (extraJars.nonEmpty) {
-      Seq("--jars", deployedJars.map(_.toString).mkString(","))
+    val sparkSubmitJarsList = WorkflowUtils.thirdPartyJars ++ deployedJars
+    val sparkSubmitJars = if (sparkSubmitJarsList.nonEmpty) {
+      Seq("--jars", sparkSubmitJarsList.map(_.toString).mkString(","))
     } else {
       Nil
     }