You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@uniffle.apache.org by zu...@apache.org on 2023/01/06 10:35:52 UTC

[incubator-uniffle] branch master updated: [ISSUE-455] Lazily create uncompressedData (#457)

This is an automated email from the ASF dual-hosted git repository.

zuston pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-uniffle.git


The following commit(s) were added to refs/heads/master by this push:
     new 2b756c34 [ISSUE-455] Lazily create uncompressedData (#457)
2b756c34 is described below

commit 2b756c3477d5adbbd60c3929ac7878afa4afcb97
Author: xianjingfeng <58...@qq.com>
AuthorDate: Fri Jan 6 18:35:46 2023 +0800

    [ISSUE-455] Lazily create uncompressedData (#457)
    
    
    ### What changes were proposed in this pull request?
    
    Lazily create `uncompressedData`.
    
    ### Why are the changes needed?
    
    Save memory. #455
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    The existing UTs are enough
---
 .../apache/spark/shuffle/reader/RssShuffleDataIterator.java    | 10 +---------
 1 file changed, 1 insertion(+), 9 deletions(-)

diff --git a/client-spark/common/src/main/java/org/apache/spark/shuffle/reader/RssShuffleDataIterator.java b/client-spark/common/src/main/java/org/apache/spark/shuffle/reader/RssShuffleDataIterator.java
index dd01d517..af2be209 100644
--- a/client-spark/common/src/main/java/org/apache/spark/shuffle/reader/RssShuffleDataIterator.java
+++ b/client-spark/common/src/main/java/org/apache/spark/shuffle/reader/RssShuffleDataIterator.java
@@ -27,7 +27,6 @@ import org.apache.spark.executor.ShuffleReadMetrics;
 import org.apache.spark.serializer.DeserializationStream;
 import org.apache.spark.serializer.Serializer;
 import org.apache.spark.serializer.SerializerInstance;
-import org.apache.spark.shuffle.RssSparkConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import scala.Product2;
@@ -38,7 +37,6 @@ import scala.runtime.BoxedUnit;
 
 import org.apache.uniffle.client.api.ShuffleReadClient;
 import org.apache.uniffle.client.response.CompressedShuffleBlock;
-import org.apache.uniffle.client.util.RssClientConfig;
 import org.apache.uniffle.common.compression.Codec;
 import org.apache.uniffle.common.config.RssConf;
 
@@ -69,13 +67,6 @@ public class RssShuffleDataIterator<K, C> extends AbstractIterator<Product2<K, C
     this.shuffleReadClient = shuffleReadClient;
     this.shuffleReadMetrics = shuffleReadMetrics;
     this.codec = Codec.newInstance(rssConf);
-    // todo: support off-heap bytebuffer
-    this.uncompressedData = ByteBuffer.allocate(
-        (int) rssConf.getSizeAsBytes(
-            RssClientConfig.RSS_WRITER_BUFFER_SIZE,
-            RssSparkConfig.RSS_WRITER_BUFFER_SIZE.defaultValueString()
-        )
-    );
   }
 
   public Iterator<Tuple2<Object, Object>> createKVIterator(ByteBuffer data, int size) {
@@ -121,6 +112,7 @@ public class RssShuffleDataIterator<K, C> extends AbstractIterator<Product2<K, C
 
         int uncompressedLen = compressedBlock.getUncompressLength();
         if (uncompressedData == null || uncompressedData.capacity() < uncompressedLen) {
+          // todo: support off-heap bytebuffer
           uncompressedData = ByteBuffer.allocate(uncompressedLen);
         }
         uncompressedData.clear();