You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2018/02/12 14:49:48 UTC
spark git commit: [SPARK-23391][CORE] It may lead to overflow for
some integer multiplication
Repository: spark
Updated Branches:
refs/heads/master 0e2c266de -> 4a4dd4f36
[SPARK-23391][CORE] It may lead to overflow for some integer multiplication
## What changes were proposed in this pull request?
In the `getBlockData`,`blockId.reduceId` is the `Int` type, when it is greater than 2^28, `blockId.reduceId*8` will overflow
In the `decompress0`, `len` and `unitSize` are Int type, so `len * unitSize` may lead to overflow
## How was this patch tested?
N/A
Author: liuxian <li...@zte.com.cn>
Closes #20581 from 10110346/overflow2.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/4a4dd4f3
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/4a4dd4f3
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/4a4dd4f3
Branch: refs/heads/master
Commit: 4a4dd4f36f65410ef5c87f7b61a960373f044e61
Parents: 0e2c266
Author: liuxian <li...@zte.com.cn>
Authored: Mon Feb 12 08:49:45 2018 -0600
Committer: Sean Owen <so...@cloudera.com>
Committed: Mon Feb 12 08:49:45 2018 -0600
----------------------------------------------------------------------
.../org/apache/spark/shuffle/IndexShuffleBlockResolver.scala | 4 ++--
.../sql/execution/columnar/compression/compressionSchemes.scala | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/4a4dd4f3/core/src/main/scala/org/apache/spark/shuffle/IndexShuffleBlockResolver.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/shuffle/IndexShuffleBlockResolver.scala b/core/src/main/scala/org/apache/spark/shuffle/IndexShuffleBlockResolver.scala
index d88b25c..d3f1c7e 100644
--- a/core/src/main/scala/org/apache/spark/shuffle/IndexShuffleBlockResolver.scala
+++ b/core/src/main/scala/org/apache/spark/shuffle/IndexShuffleBlockResolver.scala
@@ -202,13 +202,13 @@ private[spark] class IndexShuffleBlockResolver(
// class of issue from re-occurring in the future which is why they are left here even though
// SPARK-22982 is fixed.
val channel = Files.newByteChannel(indexFile.toPath)
- channel.position(blockId.reduceId * 8)
+ channel.position(blockId.reduceId * 8L)
val in = new DataInputStream(Channels.newInputStream(channel))
try {
val offset = in.readLong()
val nextOffset = in.readLong()
val actualPosition = channel.position()
- val expectedPosition = blockId.reduceId * 8 + 16
+ val expectedPosition = blockId.reduceId * 8L + 16
if (actualPosition != expectedPosition) {
throw new Exception(s"SPARK-22982: Incorrect channel position after index file reads: " +
s"expected $expectedPosition but actual position was $actualPosition.")
http://git-wip-us.apache.org/repos/asf/spark/blob/4a4dd4f3/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/compressionSchemes.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/compressionSchemes.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/compressionSchemes.scala
index 79dcf3a..00a1d54 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/compressionSchemes.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/compressionSchemes.scala
@@ -116,7 +116,7 @@ private[columnar] case object PassThrough extends CompressionScheme {
while (pos < capacity) {
if (pos != nextNullIndex) {
val len = nextNullIndex - pos
- assert(len * unitSize < Int.MaxValue)
+ assert(len * unitSize.toLong < Int.MaxValue)
putFunction(columnVector, pos, bufferPos, len)
bufferPos += len * unitSize
pos += len
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org