You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by srowen <gi...@git.apache.org> on 2018/10/12 18:22:25 UTC

[GitHub] spark pull request #21322: [SPARK-24225][CORE] Support closing AutoClosable ...

Github user srowen commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21322#discussion_r224874828
  
    --- Diff: core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala ---
    @@ -384,15 +385,30 @@ private[spark] class MemoryStore(
         }
       }
     
    +  private def maybeReleaseResources(resource: (BlockId, MemoryEntry[_])): Unit = {
    +    maybeReleaseResources(resource._1, resource._2)
    +  }
    +
    +  private def maybeReleaseResources(blockId: BlockId, entry: MemoryEntry[_]): Unit = {
    +    entry match {
    +      case SerializedMemoryEntry(buffer, _, _) => buffer.dispose()
    +      case DeserializedMemoryEntry(values: Array[Any], _, _) => maybeCloseValues(values, blockId)
    +      case _ =>
    +    }
    +  }
    +
    +  private def maybeCloseValues(values: Array[Any], blockId: BlockId): Unit = {
    +    if (blockId.isBroadcast) {
    +      values.foreach(value => Utils.tryClose(value))
    --- End diff --
    
    Just a style thing, but could be `values.foreach(Utils.tryClose)`


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org