You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/10/02 00:57:27 UTC
git commit: [SPARK-3593][SQL] Add support for sorting BinaryType
Repository: spark
Updated Branches:
refs/heads/master f315fb7ef -> f84b228c4
[SPARK-3593][SQL] Add support for sorting BinaryType
BinaryType is derived from NativeType and added Ordering support.
Author: Venkata Ramana G <ramana.gollamudihuawei.com>
Author: Venkata Ramana Gollamudi <ra...@huawei.com>
Closes #2617 from gvramana/binarytype_sort and squashes the following commits:
1cf26f3 [Venkata Ramana Gollamudi] Supported Sorting of BinaryType
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f84b228c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f84b228c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f84b228c
Branch: refs/heads/master
Commit: f84b228c4002073ee4ff53be50462a63f48bd508
Parents: f315fb7
Author: Venkata Ramana Gollamudi <ra...@huawei.com>
Authored: Wed Oct 1 15:57:06 2014 -0700
Committer: Michael Armbrust <mi...@databricks.com>
Committed: Wed Oct 1 15:57:09 2014 -0700
----------------------------------------------------------------------
.../org/apache/spark/sql/catalyst/types/dataTypes.scala | 12 +++++++++++-
.../test/scala/org/apache/spark/sql/SQLQuerySuite.scala | 8 ++++++++
.../src/test/scala/org/apache/spark/sql/TestData.scala | 10 ++++++++++
3 files changed, 29 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/f84b228c/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala
index c7d73d3..ac043d4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala
@@ -157,8 +157,18 @@ case object StringType extends NativeType with PrimitiveType {
def simpleString: String = "string"
}
-case object BinaryType extends DataType with PrimitiveType {
+case object BinaryType extends NativeType with PrimitiveType {
private[sql] type JvmType = Array[Byte]
+ @transient private[sql] lazy val tag = ScalaReflectionLock.synchronized { typeTag[JvmType] }
+ private[sql] val ordering = new Ordering[JvmType] {
+ def compare(x: Array[Byte], y: Array[Byte]): Int = {
+ for (i <- 0 until x.length; if i < y.length) {
+ val res = x(i).compareTo(y(i))
+ if (res != 0) return res
+ }
+ return x.length - y.length
+ }
+ }
def simpleString: String = "binary"
}
http://git-wip-us.apache.org/repos/asf/spark/blob/f84b228c/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 08376eb..fdf3a22 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -191,6 +191,14 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
Seq((3,1), (3,2), (2,1), (2,2), (1,1), (1,2)))
checkAnswer(
+ sql("SELECT b FROM binaryData ORDER BY a ASC"),
+ (1 to 5).map(Row(_)).toSeq)
+
+ checkAnswer(
+ sql("SELECT b FROM binaryData ORDER BY a DESC"),
+ (1 to 5).map(Row(_)).toSeq.reverse)
+
+ checkAnswer(
sql("SELECT * FROM arrayData ORDER BY data[0] ASC"),
arrayData.collect().sortBy(_.data(0)).toSeq)
http://git-wip-us.apache.org/repos/asf/spark/blob/f84b228c/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
index eb33a61..10b7979 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
@@ -54,6 +54,16 @@ object TestData {
TestData2(3, 2) :: Nil)
testData2.registerTempTable("testData2")
+ case class BinaryData(a: Array[Byte], b: Int)
+ val binaryData: SchemaRDD =
+ TestSQLContext.sparkContext.parallelize(
+ BinaryData("12".getBytes(), 1) ::
+ BinaryData("22".getBytes(), 5) ::
+ BinaryData("122".getBytes(), 3) ::
+ BinaryData("121".getBytes(), 2) ::
+ BinaryData("123".getBytes(), 4) :: Nil)
+ binaryData.registerTempTable("binaryData")
+
// TODO: There is no way to express null primitives as case classes currently...
val testData3 =
logical.LocalRelation('a.int, 'b.int).loadData(
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org