You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2019/05/21 18:56:51 UTC

[GitHub] [spark] mengxr commented on a change in pull request #24615: [SPARK-27488][CORE] Driver interface to support GPU resources

mengxr commented on a change in pull request #24615: [SPARK-27488][CORE] Driver interface to support GPU resources
URL: https://github.com/apache/spark/pull/24615#discussion_r286167488
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/ResourceDiscoverer.scala
 ##########
 @@ -73,21 +85,45 @@ private[spark] object ResourceDiscoverer extends Logging {
           val output = executeAndGetOutput(Seq(script.get), new File("."))
           val parsedJson = parse(output)
           val name = (parsedJson \ "name").extract[String]
-          val addresses = (parsedJson \ "addresses").extract[Array[String]].toArray
+          val addresses = (parsedJson \ "addresses").extract[Array[String]]
+          if (name != resourceName) {
+            throw new SparkException(s"Discovery script: ${script.get} specified via " +
+              s"$discoveryConf returned a resource name: $name that doesn't match the " +
+              s"config name: $resourceName")
+          }
           new ResourceInformation(name, addresses)
         } catch {
           case e @ (_: SparkException | _: MappingException | _: JsonParseException) =>
             throw new SparkException(s"Error running the resource discovery script: $scriptFile" +
-              s" for $resourceType", e)
+              s" for $resourceName", e)
         }
       } else {
-        throw new SparkException(s"Resource script: $scriptFile to discover $resourceType" +
+        throw new SparkException(s"Resource script: $scriptFile to discover $resourceName" +
           s" doesn't exist!")
       }
     } else {
-      throw new SparkException(s"User is expecting to use $resourceType resources but " +
+      throw new SparkException(s"User is expecting to use $resourceName resources but " +
         s"didn't specify a script via conf: $discoveryConf, to find them!")
     }
     result
   }
+
+  // Make sure the actual resources we have on startup is at least the number the user
+  // requested. Note that there is other code in SparkConf that makes sure we have executor configs
+  // for each task resource requirement and that they are large enough. This function
+  // is used by both driver and executors.
+  def checkActualResourcesMeetRequirements(
+      requiredResources: Map[String, String],
 
 Review comment:
   not in this PR, it would be nice if we use a class to capsulate the resource request

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org