You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/11/03 06:50:53 UTC
spark git commit: [SPARK-18200][GRAPHX] Support zero as an initial
capacity in OpenHashSet
Repository: spark
Updated Branches:
refs/heads/master 9ddec8636 -> d24e73647
[SPARK-18200][GRAPHX] Support zero as an initial capacity in OpenHashSet
## What changes were proposed in this pull request?
[SPARK-18200](https://issues.apache.org/jira/browse/SPARK-18200) reports Apache Spark 2.x raises `java.lang.IllegalArgumentException: requirement failed: Invalid initial capacity` while running `triangleCount`. The root cause is that `VertexSet`, a type alias of `OpenHashSet`, does not allow zero as a initial size. This PR loosens the restriction to allow zero.
## How was this patch tested?
Pass the Jenkins test with a new test case in `OpenHashSetSuite`.
Author: Dongjoon Hyun <do...@apache.org>
Closes #15741 from dongjoon-hyun/SPARK-18200.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/d24e7364
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/d24e7364
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/d24e7364
Branch: refs/heads/master
Commit: d24e736471f34ef8f2c12766393379c4213fe96e
Parents: 9ddec86
Author: Dongjoon Hyun <do...@apache.org>
Authored: Wed Nov 2 23:50:50 2016 -0700
Committer: Reynold Xin <rx...@databricks.com>
Committed: Wed Nov 2 23:50:50 2016 -0700
----------------------------------------------------------------------
.../org/apache/spark/util/collection/OpenHashSet.scala | 10 +++++++---
.../apache/spark/util/collection/OpenHashMapSuite.scala | 3 ---
.../apache/spark/util/collection/OpenHashSetSuite.scala | 5 +++++
.../util/collection/PrimitiveKeyOpenHashMapSuite.scala | 3 ---
4 files changed, 12 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/d24e7364/core/src/main/scala/org/apache/spark/util/collection/OpenHashSet.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/collection/OpenHashSet.scala b/core/src/main/scala/org/apache/spark/util/collection/OpenHashSet.scala
index 0f6a425..7a1be85 100644
--- a/core/src/main/scala/org/apache/spark/util/collection/OpenHashSet.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/OpenHashSet.scala
@@ -48,7 +48,7 @@ class OpenHashSet[@specialized(Long, Int) T: ClassTag](
require(initialCapacity <= OpenHashSet.MAX_CAPACITY,
s"Can't make capacity bigger than ${OpenHashSet.MAX_CAPACITY} elements")
- require(initialCapacity >= 1, "Invalid initial capacity")
+ require(initialCapacity >= 0, "Invalid initial capacity")
require(loadFactor < 1.0, "Load factor must be less than 1.0")
require(loadFactor > 0.0, "Load factor must be greater than 0.0")
@@ -271,8 +271,12 @@ class OpenHashSet[@specialized(Long, Int) T: ClassTag](
private def hashcode(h: Int): Int = Hashing.murmur3_32().hashInt(h).asInt()
private def nextPowerOf2(n: Int): Int = {
- val highBit = Integer.highestOneBit(n)
- if (highBit == n) n else highBit << 1
+ if (n == 0) {
+ 2
+ } else {
+ val highBit = Integer.highestOneBit(n)
+ if (highBit == n) n else highBit << 1
+ }
}
}
http://git-wip-us.apache.org/repos/asf/spark/blob/d24e7364/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
index 3066e99..335ecb9 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
@@ -49,9 +49,6 @@ class OpenHashMapSuite extends SparkFunSuite with Matchers {
intercept[IllegalArgumentException] {
new OpenHashMap[String, Int](-1)
}
- intercept[IllegalArgumentException] {
- new OpenHashMap[String, String](0)
- }
}
test("primitive value") {
http://git-wip-us.apache.org/repos/asf/spark/blob/d24e7364/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala
index 2607a54..210bc5c 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala
@@ -176,4 +176,9 @@ class OpenHashSetSuite extends SparkFunSuite with Matchers {
assert(set.size === 1000)
assert(set.capacity > 1000)
}
+
+ test("SPARK-18200 Support zero as an initial set size") {
+ val set = new OpenHashSet[Long](0)
+ assert(set.size === 0)
+ }
}
http://git-wip-us.apache.org/repos/asf/spark/blob/d24e7364/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala
index 508e737..f5ee428 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala
@@ -49,9 +49,6 @@ class PrimitiveKeyOpenHashMapSuite extends SparkFunSuite with Matchers {
intercept[IllegalArgumentException] {
new PrimitiveKeyOpenHashMap[Int, Int](-1)
}
- intercept[IllegalArgumentException] {
- new PrimitiveKeyOpenHashMap[Int, Int](0)
- }
}
test("basic operations") {
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org