You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by srowen <gi...@git.apache.org> on 2017/10/26 17:35:32 UTC

[GitHub] spark pull request #18805: [SPARK-19112][CORE] Support for ZStandard codec

Github user srowen commented on a diff in the pull request:

    https://github.com/apache/spark/pull/18805#discussion_r147213697
  
    --- Diff: core/src/main/scala/org/apache/spark/io/CompressionCodec.scala ---
    @@ -216,3 +218,33 @@ private final class SnappyOutputStreamWrapper(os: SnappyOutputStream) extends Ou
         }
       }
     }
    +
    +/**
    + * :: DeveloperApi ::
    + * ZStandard implementation of [[org.apache.spark.io.CompressionCodec]]. For more
    + * details see - http://facebook.github.io/zstd/
    + *
    + * @note The wire protocol for this codec is not guaranteed to be compatible across versions
    + * of Spark. This is intended for use as an internal compression utility within a single Spark
    + * application.
    + */
    +@DeveloperApi
    +class ZStdCompressionCodec(conf: SparkConf) extends CompressionCodec {
    +
    +  override def compressedOutputStream(s: OutputStream): OutputStream = {
    +    // Default compression level for zstd compression to 1 because it is
    +    // fastest of all with reasonably high compression ratio.
    +    val level = conf.getSizeAsBytes("spark.io.compression.zstd.level", "1").toInt
    +    val bufferSize = conf.getSizeAsBytes("spark.io.compression.zstd.bufferSize", "32k").toInt
    --- End diff --
    
    @sitalkedia how about comments like this?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org