You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by la...@apache.org on 2019/02/15 01:30:48 UTC
[incubator-mxnet] branch master updated: Fix jar path and add
missing ones for spark jobs (#14020)
This is an automated email from the ASF dual-hosted git repository.
lanking pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git
The following commit(s) were added to refs/heads/master by this push:
new 8416c56 Fix jar path and add missing ones for spark jobs (#14020)
8416c56 is described below
commit 8416c563f2bc673f15bf890ea13be2a32afdf3ca
Author: Ashutosh Dwivedi <aa...@gmail.com>
AuthorDate: Fri Feb 15 07:00:31 2019 +0530
Fix jar path and add missing ones for spark jobs (#14020)
* Fix jar path and add missing ones for spark jobs
Fix path of jars / add missing jars in spark job
remove print, reduce clutter
* fixes scalastyle violations
* exclude all of javadoc, sources, bundle, and src while searching for jars
* simplfied the exclude experession
---
.../apache/mxnet/spark/SharedSparkContext.scala | 42 +++++++++++-----------
1 file changed, 22 insertions(+), 20 deletions(-)
diff --git a/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala b/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala
index 2efd181..6d36ca5 100644
--- a/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala
+++ b/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala
@@ -80,30 +80,27 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd
System.getProperty("user.dir")
}
- private def getJarFilePath(root: String): String = {
- for (platform <- List("linux-x86_64-cpu", "linux-x86_64-gpu", "osx-x86_64-cpu")) {
- val jarFiles = new File(s"$root/$platform/target/").listFiles(new FileFilter {
- override def accept(pathname: File) = {
- pathname.getAbsolutePath.endsWith(".jar") &&
- !pathname.getAbsolutePath.contains("javadoc") &&
- !pathname.getAbsolutePath.contains("sources")
- }
- })
- if (jarFiles != null && jarFiles.nonEmpty) {
- return jarFiles.head.getAbsolutePath
+ private def findJars(root: String): Array[File] = {
+ val excludedSuffixes = List("bundle", "src", "javadoc", "sources")
+ new File(root).listFiles(new FileFilter {
+ override def accept(pathname: File) = {
+ pathname.getAbsolutePath.endsWith(".jar") &&
+ excludedSuffixes.forall(!pathname.getAbsolutePath.contains(_))
}
+ })
+ }
+
+ private def getJarFilePath(root: String): String = {
+ val jarFiles = findJars(s"$root/target/")
+ if (jarFiles != null && jarFiles.nonEmpty) {
+ jarFiles.head.getAbsolutePath
+ } else {
+ null
}
- null
}
private def getSparkJar: String = {
- val jarFiles = new File(s"$composeWorkingDirPath/target/").listFiles(new FileFilter {
- override def accept(pathname: File) = {
- pathname.getAbsolutePath.endsWith(".jar") &&
- !pathname.getAbsolutePath.contains("javadoc") &&
- !pathname.getAbsolutePath.contains("sources")
- }
- })
+ val jarFiles = findJars(s"$composeWorkingDirPath/target/")
if (jarFiles != null && jarFiles.nonEmpty) {
jarFiles.head.getAbsolutePath
} else {
@@ -111,6 +108,9 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd
}
}
+ private def getNativeJars(root: String): String =
+ new File(root).listFiles().map(_.toPath).mkString(",")
+
protected def buildLeNet(): MXNet = {
val workingDir = composeWorkingDirPath
val assemblyRoot = s"$workingDir/../assembly"
@@ -130,6 +130,8 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd
protected def buildMlp(): MXNet = {
val workingDir = composeWorkingDirPath
val assemblyRoot = s"$workingDir/../assembly"
+ val nativeRoot = s"$workingDir/../native/target/lib"
+
new MXNet()
.setBatchSize(128)
.setLabelName("softmax_label")
@@ -139,7 +141,7 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd
.setNumEpoch(10)
.setNumServer(1)
.setNumWorker(numWorkers)
- .setExecutorJars(s"${getJarFilePath(assemblyRoot)},$getSparkJar")
+ .setExecutorJars(s"${getJarFilePath(assemblyRoot)},$getSparkJar,${getNativeJars(nativeRoot)}")
.setJava("java")
.setTimeout(0)
}