You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by zs...@apache.org on 2016/10/19 02:43:12 UTC
spark git commit: [SPARK-17711][TEST-HADOOP2.2] Fix hadoop2.2
compilation error
Repository: spark
Updated Branches:
refs/heads/master 5f20ae039 -> 2629cd746
[SPARK-17711][TEST-HADOOP2.2] Fix hadoop2.2 compilation error
## What changes were proposed in this pull request?
Fix hadoop2.2 compilation error.
## How was this patch tested?
Existing tests.
cc tdas zsxwing
Author: Yu Peng <lo...@gmail.com>
Closes #15537 from loneknightpy/fix-17711.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2629cd74
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2629cd74
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2629cd74
Branch: refs/heads/master
Commit: 2629cd74602cfe77188b76428fed62a7a7149315
Parents: 5f20ae0
Author: Yu Peng <lo...@gmail.com>
Authored: Tue Oct 18 19:43:08 2016 -0700
Committer: Shixiong Zhu <sh...@databricks.com>
Committed: Tue Oct 18 19:43:08 2016 -0700
----------------------------------------------------------------------
core/src/main/scala/org/apache/spark/util/Utils.scala | 5 ++---
1 file changed, 2 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/2629cd74/core/src/main/scala/org/apache/spark/util/Utils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index a4da138..7fba901 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -42,7 +42,6 @@ import scala.util.control.{ControlThrowable, NonFatal}
import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache}
import com.google.common.io.{ByteStreams, Files => GFiles}
import com.google.common.net.InetAddresses
-import org.apache.commons.io.IOUtils
import org.apache.commons.lang3.SystemUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
@@ -1486,10 +1485,10 @@ private[spark] object Utils extends Logging {
val gzInputStream = new GZIPInputStream(new FileInputStream(file))
val bufSize = 1024
val buf = new Array[Byte](bufSize)
- var numBytes = IOUtils.read(gzInputStream, buf)
+ var numBytes = ByteStreams.read(gzInputStream, buf, 0, bufSize)
while (numBytes > 0) {
fileSize += numBytes
- numBytes = IOUtils.read(gzInputStream, buf)
+ numBytes = ByteStreams.read(gzInputStream, buf, 0, bufSize)
}
fileSize
} catch {
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org