You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/01/19 23:28:04 UTC
spark git commit: [BUILD] Runner for spark packages
Repository: spark
Updated Branches:
refs/heads/master c6f971b4a -> efd7eed32
[BUILD] Runner for spark packages
This is a convenience method added to the SBT build for developers, though if people think its useful we could consider adding a official script that runs using the assembly instead of compiling on demand. It simply compiles spark (without requiring an assembly), and invokes Spark Submit to download / run the package.
Example Usage:
```
$ build/sbt
> sparkPackage com.databricks:spark-sql-perf_2.10:0.2.4 com.databricks.spark.sql.perf.RunBenchmark --help
```
Author: Michael Armbrust <mi...@databricks.com>
Closes #10834 from marmbrus/sparkPackageRunner.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/efd7eed3
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/efd7eed3
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/efd7eed3
Branch: refs/heads/master
Commit: efd7eed3222799d66d4fcb68785142dc570c8150
Parents: c6f971b
Author: Michael Armbrust <mi...@databricks.com>
Authored: Tue Jan 19 14:28:00 2016 -0800
Committer: Reynold Xin <rx...@databricks.com>
Committed: Tue Jan 19 14:28:00 2016 -0800
----------------------------------------------------------------------
project/SparkBuild.scala | 15 +++++++++++++++
1 file changed, 15 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/efd7eed3/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 4c34c88..06e561a 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -274,6 +274,11 @@ object SparkBuild extends PomBuild {
* Usage: `build/sbt sparkShell`
*/
val sparkShell = taskKey[Unit]("start a spark-shell.")
+ val sparkPackage = inputKey[Unit](
+ s"""
+ |Download and run a spark package.
+ |Usage `builds/sbt "sparkPackage <group:artifact:version> <MainClass> [args]
+ """.stripMargin)
val sparkSql = taskKey[Unit]("starts the spark sql CLI.")
enable(Seq(
@@ -287,6 +292,16 @@ object SparkBuild extends PomBuild {
(runMain in Compile).toTask(" org.apache.spark.repl.Main -usejavacp").value
},
+ sparkPackage := {
+ import complete.DefaultParsers._
+ val packages :: className :: otherArgs = spaceDelimited("<group:artifact:version> <MainClass> [args]").parsed.toList
+ val scalaRun = (runner in run).value
+ val classpath = (fullClasspath in Runtime).value
+ val args = Seq("--packages", packages, "--class", className, (Keys.`package` in Compile in "core").value.getCanonicalPath) ++ otherArgs
+ println(args)
+ scalaRun.run("org.apache.spark.deploy.SparkSubmit", classpath.map(_.data), args, streams.value.log)
+ },
+
javaOptions in Compile += "-Dspark.master=local",
sparkSql := {
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org