You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2015/07/30 10:21:42 UTC
spark git commit: Fix flaky HashedRelationSuite
Repository: spark
Updated Branches:
refs/heads/master 4a8bb9d00 -> 5ba2d4406
Fix flaky HashedRelationSuite
SparkEnv might not have been set in local unit tests.
Author: Reynold Xin <rx...@databricks.com>
Closes #7784 from rxin/HashedRelationSuite and squashes the following commits:
435d64b [Reynold Xin] Fix flaky HashedRelationSuite
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5ba2d440
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5ba2d440
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5ba2d440
Branch: refs/heads/master
Commit: 5ba2d44068b89fd8e81cfd24f49bf20d373f81b9
Parents: 4a8bb9d
Author: Reynold Xin <rx...@databricks.com>
Authored: Thu Jul 30 01:21:39 2015 -0700
Committer: Reynold Xin <rx...@databricks.com>
Committed: Thu Jul 30 01:21:39 2015 -0700
----------------------------------------------------------------------
.../org/apache/spark/sql/execution/joins/HashedRelation.scala | 7 +++++--
.../spark/sql/execution/joins/HashedRelationSuite.scala | 6 +++---
2 files changed, 8 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/5ba2d440/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
index 7a50739..26dbc91 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
@@ -21,7 +21,7 @@ import java.io.{Externalizable, ObjectInput, ObjectOutput}
import java.nio.ByteOrder
import java.util.{HashMap => JavaHashMap}
-import org.apache.spark.{SparkEnv, TaskContext}
+import org.apache.spark.{SparkConf, SparkEnv, TaskContext}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.execution.SparkSqlSerializer
@@ -260,7 +260,10 @@ private[joins] final class UnsafeHashedRelation(
val nKeys = in.readInt()
// This is used in Broadcast, shared by multiple tasks, so we use on-heap memory
val memoryManager = new TaskMemoryManager(new ExecutorMemoryManager(MemoryAllocator.HEAP))
- val pageSizeBytes = SparkEnv.get.conf.getSizeAsBytes("spark.buffer.pageSize", "64m")
+
+ val pageSizeBytes = Option(SparkEnv.get).map(_.conf).getOrElse(new SparkConf())
+ .getSizeAsBytes("spark.buffer.pageSize", "64m")
+
binaryMap = new BytesToBytesMap(
memoryManager,
nKeys * 2, // reduce hash collision
http://git-wip-us.apache.org/repos/asf/spark/blob/5ba2d440/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala
index 941f6d4..8b1a9b2 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala
@@ -33,7 +33,7 @@ class HashedRelationSuite extends SparkFunSuite {
override def apply(row: InternalRow): InternalRow = row
}
- ignore("GeneralHashedRelation") {
+ test("GeneralHashedRelation") {
val data = Array(InternalRow(0), InternalRow(1), InternalRow(2), InternalRow(2))
val hashed = HashedRelation(data.iterator, keyProjection)
assert(hashed.isInstanceOf[GeneralHashedRelation])
@@ -47,7 +47,7 @@ class HashedRelationSuite extends SparkFunSuite {
assert(hashed.get(data(2)) === data2)
}
- ignore("UniqueKeyHashedRelation") {
+ test("UniqueKeyHashedRelation") {
val data = Array(InternalRow(0), InternalRow(1), InternalRow(2))
val hashed = HashedRelation(data.iterator, keyProjection)
assert(hashed.isInstanceOf[UniqueKeyHashedRelation])
@@ -64,7 +64,7 @@ class HashedRelationSuite extends SparkFunSuite {
assert(uniqHashed.getValue(InternalRow(10)) === null)
}
- ignore("UnsafeHashedRelation") {
+ test("UnsafeHashedRelation") {
val schema = StructType(StructField("a", IntegerType, true) :: Nil)
val data = Array(InternalRow(0), InternalRow(1), InternalRow(2), InternalRow(2))
val toUnsafe = UnsafeProjection.create(schema)
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org