You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2015/01/21 00:16:26 UTC
[2/3] spark git commit: [SPARK-5323][SQL] Remove Row's Seq
inheritance.
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 54fabc5..03b44ca 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -46,7 +46,7 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
test("SPARK-4625 support SORT BY in SimpleSQLParser & DSL") {
checkAnswer(
sql("SELECT a FROM testData2 SORT BY a"),
- Seq(1, 1, 2 ,2 ,3 ,3).map(Seq(_))
+ Seq(1, 1, 2 ,2 ,3 ,3).map(Row(_))
)
}
@@ -70,13 +70,13 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
test("SPARK-3176 Added Parser of SQL ABS()") {
checkAnswer(
sql("SELECT ABS(-1.3)"),
- 1.3)
+ Row(1.3))
checkAnswer(
sql("SELECT ABS(0.0)"),
- 0.0)
+ Row(0.0))
checkAnswer(
sql("SELECT ABS(2.5)"),
- 2.5)
+ Row(2.5))
}
test("aggregation with codegen") {
@@ -89,13 +89,13 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
test("SPARK-3176 Added Parser of SQL LAST()") {
checkAnswer(
sql("SELECT LAST(n) FROM lowerCaseData"),
- 4)
+ Row(4))
}
test("SPARK-2041 column name equals tablename") {
checkAnswer(
sql("SELECT tableName FROM tableName"),
- "test")
+ Row("test"))
}
test("SQRT") {
@@ -115,40 +115,40 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
test("SPARK-2407 Added Parser of SQL SUBSTR()") {
checkAnswer(
sql("SELECT substr(tableName, 1, 2) FROM tableName"),
- "te")
+ Row("te"))
checkAnswer(
sql("SELECT substr(tableName, 3) FROM tableName"),
- "st")
+ Row("st"))
checkAnswer(
sql("SELECT substring(tableName, 1, 2) FROM tableName"),
- "te")
+ Row("te"))
checkAnswer(
sql("SELECT substring(tableName, 3) FROM tableName"),
- "st")
+ Row("st"))
}
test("SPARK-3173 Timestamp support in the parser") {
checkAnswer(sql(
"SELECT time FROM timestamps WHERE time=CAST('1970-01-01 00:00:00.001' AS TIMESTAMP)"),
- Seq(Seq(java.sql.Timestamp.valueOf("1970-01-01 00:00:00.001"))))
+ Row(java.sql.Timestamp.valueOf("1970-01-01 00:00:00.001")))
checkAnswer(sql(
"SELECT time FROM timestamps WHERE time='1970-01-01 00:00:00.001'"),
- Seq(Seq(java.sql.Timestamp.valueOf("1970-01-01 00:00:00.001"))))
+ Row(java.sql.Timestamp.valueOf("1970-01-01 00:00:00.001")))
checkAnswer(sql(
"SELECT time FROM timestamps WHERE '1970-01-01 00:00:00.001'=time"),
- Seq(Seq(java.sql.Timestamp.valueOf("1970-01-01 00:00:00.001"))))
+ Row(java.sql.Timestamp.valueOf("1970-01-01 00:00:00.001")))
checkAnswer(sql(
"""SELECT time FROM timestamps WHERE time<'1970-01-01 00:00:00.003'
AND time>'1970-01-01 00:00:00.001'"""),
- Seq(Seq(java.sql.Timestamp.valueOf("1970-01-01 00:00:00.002"))))
+ Row(java.sql.Timestamp.valueOf("1970-01-01 00:00:00.002")))
checkAnswer(sql(
"SELECT time FROM timestamps WHERE time IN ('1970-01-01 00:00:00.001','1970-01-01 00:00:00.002')"),
- Seq(Seq(java.sql.Timestamp.valueOf("1970-01-01 00:00:00.001")),
- Seq(java.sql.Timestamp.valueOf("1970-01-01 00:00:00.002"))))
+ Seq(Row(java.sql.Timestamp.valueOf("1970-01-01 00:00:00.001")),
+ Row(java.sql.Timestamp.valueOf("1970-01-01 00:00:00.002"))))
checkAnswer(sql(
"SELECT time FROM timestamps WHERE time='123'"),
@@ -158,13 +158,13 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
test("index into array") {
checkAnswer(
sql("SELECT data, data[0], data[0] + data[1], data[0 + 1] FROM arrayData"),
- arrayData.map(d => (d.data, d.data(0), d.data(0) + d.data(1), d.data(1))).collect().toSeq)
+ arrayData.map(d => Row(d.data, d.data(0), d.data(0) + d.data(1), d.data(1))).collect())
}
test("left semi greater than predicate") {
checkAnswer(
sql("SELECT * FROM testData2 x LEFT SEMI JOIN testData2 y ON x.a >= y.a + 2"),
- Seq((3,1), (3,2))
+ Seq(Row(3,1), Row(3,2))
)
}
@@ -173,7 +173,7 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
sql(
"SELECT nestedData, nestedData[0][0], nestedData[0][0] + nestedData[0][1] FROM arrayData"),
arrayData.map(d =>
- (d.nestedData,
+ Row(d.nestedData,
d.nestedData(0)(0),
d.nestedData(0)(0) + d.nestedData(0)(1))).collect().toSeq)
}
@@ -181,13 +181,13 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
test("agg") {
checkAnswer(
sql("SELECT a, SUM(b) FROM testData2 GROUP BY a"),
- Seq((1,3),(2,3),(3,3)))
+ Seq(Row(1,3), Row(2,3), Row(3,3)))
}
test("aggregates with nulls") {
checkAnswer(
sql("SELECT MIN(a), MAX(a), AVG(a), SUM(a), COUNT(a) FROM nullInts"),
- (1, 3, 2, 6, 3) :: Nil
+ Row(1, 3, 2, 6, 3)
)
}
@@ -200,29 +200,29 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
test("simple select") {
checkAnswer(
sql("SELECT value FROM testData WHERE key = 1"),
- Seq(Seq("1")))
+ Row("1"))
}
def sortTest() = {
checkAnswer(
sql("SELECT * FROM testData2 ORDER BY a ASC, b ASC"),
- Seq((1,1), (1,2), (2,1), (2,2), (3,1), (3,2)))
+ Seq(Row(1,1), Row(1,2), Row(2,1), Row(2,2), Row(3,1), Row(3,2)))
checkAnswer(
sql("SELECT * FROM testData2 ORDER BY a ASC, b DESC"),
- Seq((1,2), (1,1), (2,2), (2,1), (3,2), (3,1)))
+ Seq(Row(1,2), Row(1,1), Row(2,2), Row(2,1), Row(3,2), Row(3,1)))
checkAnswer(
sql("SELECT * FROM testData2 ORDER BY a DESC, b DESC"),
- Seq((3,2), (3,1), (2,2), (2,1), (1,2), (1,1)))
+ Seq(Row(3,2), Row(3,1), Row(2,2), Row(2,1), Row(1,2), Row(1,1)))
checkAnswer(
sql("SELECT * FROM testData2 ORDER BY a DESC, b ASC"),
- Seq((3,1), (3,2), (2,1), (2,2), (1,1), (1,2)))
+ Seq(Row(3,1), Row(3,2), Row(2,1), Row(2,2), Row(1,1), Row(1,2)))
checkAnswer(
sql("SELECT b FROM binaryData ORDER BY a ASC"),
- (1 to 5).map(Row(_)).toSeq)
+ (1 to 5).map(Row(_)))
checkAnswer(
sql("SELECT b FROM binaryData ORDER BY a DESC"),
@@ -230,19 +230,19 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
checkAnswer(
sql("SELECT * FROM arrayData ORDER BY data[0] ASC"),
- arrayData.collect().sortBy(_.data(0)).toSeq)
+ arrayData.collect().sortBy(_.data(0)).map(Row.fromTuple).toSeq)
checkAnswer(
sql("SELECT * FROM arrayData ORDER BY data[0] DESC"),
- arrayData.collect().sortBy(_.data(0)).reverse.toSeq)
+ arrayData.collect().sortBy(_.data(0)).reverse.map(Row.fromTuple).toSeq)
checkAnswer(
sql("SELECT * FROM mapData ORDER BY data[1] ASC"),
- mapData.collect().sortBy(_.data(1)).toSeq)
+ mapData.collect().sortBy(_.data(1)).map(Row.fromTuple).toSeq)
checkAnswer(
sql("SELECT * FROM mapData ORDER BY data[1] DESC"),
- mapData.collect().sortBy(_.data(1)).reverse.toSeq)
+ mapData.collect().sortBy(_.data(1)).reverse.map(Row.fromTuple).toSeq)
}
test("sorting") {
@@ -266,94 +266,94 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
checkAnswer(
sql("SELECT * FROM arrayData LIMIT 1"),
- arrayData.collect().take(1).toSeq)
+ arrayData.collect().take(1).map(Row.fromTuple).toSeq)
checkAnswer(
sql("SELECT * FROM mapData LIMIT 1"),
- mapData.collect().take(1).toSeq)
+ mapData.collect().take(1).map(Row.fromTuple).toSeq)
}
test("from follow multiple brackets") {
checkAnswer(sql(
"select key from ((select * from testData limit 1) union all (select * from testData limit 1)) x limit 1"),
- 1
+ Row(1)
)
checkAnswer(sql(
"select key from (select * from testData) x limit 1"),
- 1
+ Row(1)
)
checkAnswer(sql(
"select key from (select * from testData limit 1 union all select * from testData limit 1) x limit 1"),
- 1
+ Row(1)
)
}
test("average") {
checkAnswer(
sql("SELECT AVG(a) FROM testData2"),
- 2.0)
+ Row(2.0))
}
test("average overflow") {
checkAnswer(
sql("SELECT AVG(a),b FROM largeAndSmallInts group by b"),
- Seq((2147483645.0,1),(2.0,2)))
+ Seq(Row(2147483645.0,1), Row(2.0,2)))
}
test("count") {
checkAnswer(
sql("SELECT COUNT(*) FROM testData2"),
- testData2.count())
+ Row(testData2.count()))
}
test("count distinct") {
checkAnswer(
sql("SELECT COUNT(DISTINCT b) FROM testData2"),
- 2)
+ Row(2))
}
test("approximate count distinct") {
checkAnswer(
sql("SELECT APPROXIMATE COUNT(DISTINCT a) FROM testData2"),
- 3)
+ Row(3))
}
test("approximate count distinct with user provided standard deviation") {
checkAnswer(
sql("SELECT APPROXIMATE(0.04) COUNT(DISTINCT a) FROM testData2"),
- 3)
+ Row(3))
}
test("null count") {
checkAnswer(
sql("SELECT a, COUNT(b) FROM testData3 GROUP BY a"),
- Seq((1, 0), (2, 1)))
+ Seq(Row(1, 0), Row(2, 1)))
checkAnswer(
sql("SELECT COUNT(a), COUNT(b), COUNT(1), COUNT(DISTINCT a), COUNT(DISTINCT b) FROM testData3"),
- (2, 1, 2, 2, 1) :: Nil)
+ Row(2, 1, 2, 2, 1))
}
test("inner join where, one match per row") {
checkAnswer(
sql("SELECT * FROM upperCaseData JOIN lowerCaseData WHERE n = N"),
Seq(
- (1, "A", 1, "a"),
- (2, "B", 2, "b"),
- (3, "C", 3, "c"),
- (4, "D", 4, "d")))
+ Row(1, "A", 1, "a"),
+ Row(2, "B", 2, "b"),
+ Row(3, "C", 3, "c"),
+ Row(4, "D", 4, "d")))
}
test("inner join ON, one match per row") {
checkAnswer(
sql("SELECT * FROM upperCaseData JOIN lowerCaseData ON n = N"),
Seq(
- (1, "A", 1, "a"),
- (2, "B", 2, "b"),
- (3, "C", 3, "c"),
- (4, "D", 4, "d")))
+ Row(1, "A", 1, "a"),
+ Row(2, "B", 2, "b"),
+ Row(3, "C", 3, "c"),
+ Row(4, "D", 4, "d")))
}
test("inner join, where, multiple matches") {
@@ -363,10 +363,10 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
| (SELECT * FROM testData2 WHERE a = 1) x JOIN
| (SELECT * FROM testData2 WHERE a = 1) y
|WHERE x.a = y.a""".stripMargin),
- (1,1,1,1) ::
- (1,1,1,2) ::
- (1,2,1,1) ::
- (1,2,1,2) :: Nil)
+ Row(1,1,1,1) ::
+ Row(1,1,1,2) ::
+ Row(1,2,1,1) ::
+ Row(1,2,1,2) :: Nil)
}
test("inner join, no matches") {
@@ -397,38 +397,38 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
| SELECT * FROM testData) y
|WHERE x.key = y.key""".stripMargin),
testData.flatMap(
- row => Seq.fill(16)((row ++ row).toSeq)).collect().toSeq)
+ row => Seq.fill(16)(Row.merge(row, row))).collect().toSeq)
}
ignore("cartesian product join") {
checkAnswer(
testData3.join(testData3),
- (1, null, 1, null) ::
- (1, null, 2, 2) ::
- (2, 2, 1, null) ::
- (2, 2, 2, 2) :: Nil)
+ Row(1, null, 1, null) ::
+ Row(1, null, 2, 2) ::
+ Row(2, 2, 1, null) ::
+ Row(2, 2, 2, 2) :: Nil)
}
test("left outer join") {
checkAnswer(
sql("SELECT * FROM upperCaseData LEFT OUTER JOIN lowerCaseData ON n = N"),
- (1, "A", 1, "a") ::
- (2, "B", 2, "b") ::
- (3, "C", 3, "c") ::
- (4, "D", 4, "d") ::
- (5, "E", null, null) ::
- (6, "F", null, null) :: Nil)
+ Row(1, "A", 1, "a") ::
+ Row(2, "B", 2, "b") ::
+ Row(3, "C", 3, "c") ::
+ Row(4, "D", 4, "d") ::
+ Row(5, "E", null, null) ::
+ Row(6, "F", null, null) :: Nil)
}
test("right outer join") {
checkAnswer(
sql("SELECT * FROM lowerCaseData RIGHT OUTER JOIN upperCaseData ON n = N"),
- (1, "a", 1, "A") ::
- (2, "b", 2, "B") ::
- (3, "c", 3, "C") ::
- (4, "d", 4, "D") ::
- (null, null, 5, "E") ::
- (null, null, 6, "F") :: Nil)
+ Row(1, "a", 1, "A") ::
+ Row(2, "b", 2, "B") ::
+ Row(3, "c", 3, "C") ::
+ Row(4, "d", 4, "D") ::
+ Row(null, null, 5, "E") ::
+ Row(null, null, 6, "F") :: Nil)
}
test("full outer join") {
@@ -440,12 +440,12 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
| (SELECT * FROM upperCaseData WHERE N >= 3) rightTable
| ON leftTable.N = rightTable.N
""".stripMargin),
- (1, "A", null, null) ::
- (2, "B", null, null) ::
- (3, "C", 3, "C") ::
- (4, "D", 4, "D") ::
- (null, null, 5, "E") ::
- (null, null, 6, "F") :: Nil)
+ Row(1, "A", null, null) ::
+ Row(2, "B", null, null) ::
+ Row(3, "C", 3, "C") ::
+ Row (4, "D", 4, "D") ::
+ Row(null, null, 5, "E") ::
+ Row(null, null, 6, "F") :: Nil)
}
test("SPARK-3349 partitioning after limit") {
@@ -457,12 +457,12 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
.registerTempTable("subset2")
checkAnswer(
sql("SELECT * FROM lowerCaseData INNER JOIN subset1 ON subset1.n = lowerCaseData.n"),
- (3, "c", 3) ::
- (4, "d", 4) :: Nil)
+ Row(3, "c", 3) ::
+ Row(4, "d", 4) :: Nil)
checkAnswer(
sql("SELECT * FROM lowerCaseData INNER JOIN subset2 ON subset2.n = lowerCaseData.n"),
- (1, "a", 1) ::
- (2, "b", 2) :: Nil)
+ Row(1, "a", 1) ::
+ Row(2, "b", 2) :: Nil)
}
test("mixed-case keywords") {
@@ -474,28 +474,28 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
| (sElEcT * FROM upperCaseData whERe N >= 3) rightTable
| oN leftTable.N = rightTable.N
""".stripMargin),
- (1, "A", null, null) ::
- (2, "B", null, null) ::
- (3, "C", 3, "C") ::
- (4, "D", 4, "D") ::
- (null, null, 5, "E") ::
- (null, null, 6, "F") :: Nil)
+ Row(1, "A", null, null) ::
+ Row(2, "B", null, null) ::
+ Row(3, "C", 3, "C") ::
+ Row(4, "D", 4, "D") ::
+ Row(null, null, 5, "E") ::
+ Row(null, null, 6, "F") :: Nil)
}
test("select with table name as qualifier") {
checkAnswer(
sql("SELECT testData.value FROM testData WHERE testData.key = 1"),
- Seq(Seq("1")))
+ Row("1"))
}
test("inner join ON with table name as qualifier") {
checkAnswer(
sql("SELECT * FROM upperCaseData JOIN lowerCaseData ON lowerCaseData.n = upperCaseData.N"),
Seq(
- (1, "A", 1, "a"),
- (2, "B", 2, "b"),
- (3, "C", 3, "c"),
- (4, "D", 4, "d")))
+ Row(1, "A", 1, "a"),
+ Row(2, "B", 2, "b"),
+ Row(3, "C", 3, "c"),
+ Row(4, "D", 4, "d")))
}
test("qualified select with inner join ON with table name as qualifier") {
@@ -503,72 +503,72 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
sql("SELECT upperCaseData.N, upperCaseData.L FROM upperCaseData JOIN lowerCaseData " +
"ON lowerCaseData.n = upperCaseData.N"),
Seq(
- (1, "A"),
- (2, "B"),
- (3, "C"),
- (4, "D")))
+ Row(1, "A"),
+ Row(2, "B"),
+ Row(3, "C"),
+ Row(4, "D")))
}
test("system function upper()") {
checkAnswer(
sql("SELECT n,UPPER(l) FROM lowerCaseData"),
Seq(
- (1, "A"),
- (2, "B"),
- (3, "C"),
- (4, "D")))
+ Row(1, "A"),
+ Row(2, "B"),
+ Row(3, "C"),
+ Row(4, "D")))
checkAnswer(
sql("SELECT n, UPPER(s) FROM nullStrings"),
Seq(
- (1, "ABC"),
- (2, "ABC"),
- (3, null)))
+ Row(1, "ABC"),
+ Row(2, "ABC"),
+ Row(3, null)))
}
test("system function lower()") {
checkAnswer(
sql("SELECT N,LOWER(L) FROM upperCaseData"),
Seq(
- (1, "a"),
- (2, "b"),
- (3, "c"),
- (4, "d"),
- (5, "e"),
- (6, "f")))
+ Row(1, "a"),
+ Row(2, "b"),
+ Row(3, "c"),
+ Row(4, "d"),
+ Row(5, "e"),
+ Row(6, "f")))
checkAnswer(
sql("SELECT n, LOWER(s) FROM nullStrings"),
Seq(
- (1, "abc"),
- (2, "abc"),
- (3, null)))
+ Row(1, "abc"),
+ Row(2, "abc"),
+ Row(3, null)))
}
test("UNION") {
checkAnswer(
sql("SELECT * FROM lowerCaseData UNION SELECT * FROM upperCaseData"),
- (1, "A") :: (1, "a") :: (2, "B") :: (2, "b") :: (3, "C") :: (3, "c") ::
- (4, "D") :: (4, "d") :: (5, "E") :: (6, "F") :: Nil)
+ Row(1, "A") :: Row(1, "a") :: Row(2, "B") :: Row(2, "b") :: Row(3, "C") :: Row(3, "c") ::
+ Row(4, "D") :: Row(4, "d") :: Row(5, "E") :: Row(6, "F") :: Nil)
checkAnswer(
sql("SELECT * FROM lowerCaseData UNION SELECT * FROM lowerCaseData"),
- (1, "a") :: (2, "b") :: (3, "c") :: (4, "d") :: Nil)
+ Row(1, "a") :: Row(2, "b") :: Row(3, "c") :: Row(4, "d") :: Nil)
checkAnswer(
sql("SELECT * FROM lowerCaseData UNION ALL SELECT * FROM lowerCaseData"),
- (1, "a") :: (1, "a") :: (2, "b") :: (2, "b") :: (3, "c") :: (3, "c") ::
- (4, "d") :: (4, "d") :: Nil)
+ Row(1, "a") :: Row(1, "a") :: Row(2, "b") :: Row(2, "b") :: Row(3, "c") :: Row(3, "c") ::
+ Row(4, "d") :: Row(4, "d") :: Nil)
}
test("UNION with column mismatches") {
// Column name mismatches are allowed.
checkAnswer(
sql("SELECT n,l FROM lowerCaseData UNION SELECT N as x1, L as x2 FROM upperCaseData"),
- (1, "A") :: (1, "a") :: (2, "B") :: (2, "b") :: (3, "C") :: (3, "c") ::
- (4, "D") :: (4, "d") :: (5, "E") :: (6, "F") :: Nil)
+ Row(1, "A") :: Row(1, "a") :: Row(2, "B") :: Row(2, "b") :: Row(3, "C") :: Row(3, "c") ::
+ Row(4, "D") :: Row(4, "d") :: Row(5, "E") :: Row(6, "F") :: Nil)
// Column type mismatches are not allowed, forcing a type coercion.
checkAnswer(
sql("SELECT n FROM lowerCaseData UNION SELECT L FROM upperCaseData"),
- ("1" :: "2" :: "3" :: "4" :: "A" :: "B" :: "C" :: "D" :: "E" :: "F" :: Nil).map(Tuple1(_)))
+ ("1" :: "2" :: "3" :: "4" :: "A" :: "B" :: "C" :: "D" :: "E" :: "F" :: Nil).map(Row(_)))
// Column type mismatches where a coercion is not possible, in this case between integer
// and array types, trigger a TreeNodeException.
intercept[TreeNodeException[_]] {
@@ -579,10 +579,10 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
test("EXCEPT") {
checkAnswer(
sql("SELECT * FROM lowerCaseData EXCEPT SELECT * FROM upperCaseData"),
- (1, "a") ::
- (2, "b") ::
- (3, "c") ::
- (4, "d") :: Nil)
+ Row(1, "a") ::
+ Row(2, "b") ::
+ Row(3, "c") ::
+ Row(4, "d") :: Nil)
checkAnswer(
sql("SELECT * FROM lowerCaseData EXCEPT SELECT * FROM lowerCaseData"), Nil)
checkAnswer(
@@ -592,10 +592,10 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
test("INTERSECT") {
checkAnswer(
sql("SELECT * FROM lowerCaseData INTERSECT SELECT * FROM lowerCaseData"),
- (1, "a") ::
- (2, "b") ::
- (3, "c") ::
- (4, "d") :: Nil)
+ Row(1, "a") ::
+ Row(2, "b") ::
+ Row(3, "c") ::
+ Row(4, "d") :: Nil)
checkAnswer(
sql("SELECT * FROM lowerCaseData INTERSECT SELECT * FROM upperCaseData"), Nil)
}
@@ -613,25 +613,25 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
sql(s"SET $testKey=$testVal")
checkAnswer(
sql("SET"),
- Seq(Seq(s"$testKey=$testVal"))
+ Row(s"$testKey=$testVal")
)
sql(s"SET ${testKey + testKey}=${testVal + testVal}")
checkAnswer(
sql("set"),
Seq(
- Seq(s"$testKey=$testVal"),
- Seq(s"${testKey + testKey}=${testVal + testVal}"))
+ Row(s"$testKey=$testVal"),
+ Row(s"${testKey + testKey}=${testVal + testVal}"))
)
// "set key"
checkAnswer(
sql(s"SET $testKey"),
- Seq(Seq(s"$testKey=$testVal"))
+ Row(s"$testKey=$testVal")
)
checkAnswer(
sql(s"SET $nonexistentKey"),
- Seq(Seq(s"$nonexistentKey=<undefined>"))
+ Row(s"$nonexistentKey=<undefined>")
)
conf.clear()
}
@@ -655,17 +655,17 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
schemaRDD1.registerTempTable("applySchema1")
checkAnswer(
sql("SELECT * FROM applySchema1"),
- (1, "A1", true, null) ::
- (2, "B2", false, null) ::
- (3, "C3", true, null) ::
- (4, "D4", true, 2147483644) :: Nil)
+ Row(1, "A1", true, null) ::
+ Row(2, "B2", false, null) ::
+ Row(3, "C3", true, null) ::
+ Row(4, "D4", true, 2147483644) :: Nil)
checkAnswer(
sql("SELECT f1, f4 FROM applySchema1"),
- (1, null) ::
- (2, null) ::
- (3, null) ::
- (4, 2147483644) :: Nil)
+ Row(1, null) ::
+ Row(2, null) ::
+ Row(3, null) ::
+ Row(4, 2147483644) :: Nil)
val schema2 = StructType(
StructField("f1", StructType(
@@ -685,17 +685,17 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
schemaRDD2.registerTempTable("applySchema2")
checkAnswer(
sql("SELECT * FROM applySchema2"),
- (Seq(1, true), Map("A1" -> null)) ::
- (Seq(2, false), Map("B2" -> null)) ::
- (Seq(3, true), Map("C3" -> null)) ::
- (Seq(4, true), Map("D4" -> 2147483644)) :: Nil)
+ Row(Row(1, true), Map("A1" -> null)) ::
+ Row(Row(2, false), Map("B2" -> null)) ::
+ Row(Row(3, true), Map("C3" -> null)) ::
+ Row(Row(4, true), Map("D4" -> 2147483644)) :: Nil)
checkAnswer(
sql("SELECT f1.f11, f2['D4'] FROM applySchema2"),
- (1, null) ::
- (2, null) ::
- (3, null) ::
- (4, 2147483644) :: Nil)
+ Row(1, null) ::
+ Row(2, null) ::
+ Row(3, null) ::
+ Row(4, 2147483644) :: Nil)
// The value of a MapType column can be a mutable map.
val rowRDD3 = unparsedStrings.map { r =>
@@ -711,26 +711,26 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
checkAnswer(
sql("SELECT f1.f11, f2['D4'] FROM applySchema3"),
- (1, null) ::
- (2, null) ::
- (3, null) ::
- (4, 2147483644) :: Nil)
+ Row(1, null) ::
+ Row(2, null) ::
+ Row(3, null) ::
+ Row(4, 2147483644) :: Nil)
}
test("SPARK-3423 BETWEEN") {
checkAnswer(
sql("SELECT key, value FROM testData WHERE key BETWEEN 5 and 7"),
- Seq((5, "5"), (6, "6"), (7, "7"))
+ Seq(Row(5, "5"), Row(6, "6"), Row(7, "7"))
)
checkAnswer(
sql("SELECT key, value FROM testData WHERE key BETWEEN 7 and 7"),
- Seq((7, "7"))
+ Row(7, "7")
)
checkAnswer(
sql("SELECT key, value FROM testData WHERE key BETWEEN 9 and 7"),
- Seq()
+ Nil
)
}
@@ -738,7 +738,7 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
// TODO Ensure true/false string letter casing is consistent with Hive in all cases.
checkAnswer(
sql("SELECT CAST(TRUE AS STRING), CAST(FALSE AS STRING) FROM testData LIMIT 1"),
- ("true", "false") :: Nil)
+ Row("true", "false"))
}
test("metadata is propagated correctly") {
@@ -768,17 +768,20 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
test("SPARK-3371 Renaming a function expression with group by gives error") {
udf.register("len", (s: String) => s.length)
checkAnswer(
- sql("SELECT len(value) as temp FROM testData WHERE key = 1 group by len(value)"), 1)
+ sql("SELECT len(value) as temp FROM testData WHERE key = 1 group by len(value)"),
+ Row(1))
}
test("SPARK-3813 CASE a WHEN b THEN c [WHEN d THEN e]* [ELSE f] END") {
checkAnswer(
- sql("SELECT CASE key WHEN 1 THEN 1 ELSE 0 END FROM testData WHERE key = 1 group by key"), 1)
+ sql("SELECT CASE key WHEN 1 THEN 1 ELSE 0 END FROM testData WHERE key = 1 group by key"),
+ Row(1))
}
test("SPARK-3813 CASE WHEN a THEN b [WHEN c THEN d]* [ELSE e] END") {
checkAnswer(
- sql("SELECT CASE WHEN key = 1 THEN 1 ELSE 2 END FROM testData WHERE key = 1 group by key"), 1)
+ sql("SELECT CASE WHEN key = 1 THEN 1 ELSE 2 END FROM testData WHERE key = 1 group by key"),
+ Row(1))
}
test("throw errors for non-aggregate attributes with aggregation") {
@@ -808,130 +811,131 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
test("Test to check we can use Long.MinValue") {
checkAnswer(
- sql(s"SELECT ${Long.MinValue} FROM testData ORDER BY key LIMIT 1"), Long.MinValue
+ sql(s"SELECT ${Long.MinValue} FROM testData ORDER BY key LIMIT 1"), Row(Long.MinValue)
)
checkAnswer(
- sql(s"SELECT key FROM testData WHERE key > ${Long.MinValue}"), (1 to 100).map(Row(_)).toSeq
+ sql(s"SELECT key FROM testData WHERE key > ${Long.MinValue}"),
+ (1 to 100).map(Row(_)).toSeq
)
}
test("Floating point number format") {
checkAnswer(
- sql("SELECT 0.3"), 0.3
+ sql("SELECT 0.3"), Row(0.3)
)
checkAnswer(
- sql("SELECT -0.8"), -0.8
+ sql("SELECT -0.8"), Row(-0.8)
)
checkAnswer(
- sql("SELECT .5"), 0.5
+ sql("SELECT .5"), Row(0.5)
)
checkAnswer(
- sql("SELECT -.18"), -0.18
+ sql("SELECT -.18"), Row(-0.18)
)
}
test("Auto cast integer type") {
checkAnswer(
- sql(s"SELECT ${Int.MaxValue + 1L}"), Int.MaxValue + 1L
+ sql(s"SELECT ${Int.MaxValue + 1L}"), Row(Int.MaxValue + 1L)
)
checkAnswer(
- sql(s"SELECT ${Int.MinValue - 1L}"), Int.MinValue - 1L
+ sql(s"SELECT ${Int.MinValue - 1L}"), Row(Int.MinValue - 1L)
)
checkAnswer(
- sql("SELECT 9223372036854775808"), new java.math.BigDecimal("9223372036854775808")
+ sql("SELECT 9223372036854775808"), Row(new java.math.BigDecimal("9223372036854775808"))
)
checkAnswer(
- sql("SELECT -9223372036854775809"), new java.math.BigDecimal("-9223372036854775809")
+ sql("SELECT -9223372036854775809"), Row(new java.math.BigDecimal("-9223372036854775809"))
)
}
test("Test to check we can apply sign to expression") {
checkAnswer(
- sql("SELECT -100"), -100
+ sql("SELECT -100"), Row(-100)
)
checkAnswer(
- sql("SELECT +230"), 230
+ sql("SELECT +230"), Row(230)
)
checkAnswer(
- sql("SELECT -5.2"), -5.2
+ sql("SELECT -5.2"), Row(-5.2)
)
checkAnswer(
- sql("SELECT +6.8"), 6.8
+ sql("SELECT +6.8"), Row(6.8)
)
checkAnswer(
- sql("SELECT -key FROM testData WHERE key = 2"), -2
+ sql("SELECT -key FROM testData WHERE key = 2"), Row(-2)
)
checkAnswer(
- sql("SELECT +key FROM testData WHERE key = 3"), 3
+ sql("SELECT +key FROM testData WHERE key = 3"), Row(3)
)
checkAnswer(
- sql("SELECT -(key + 1) FROM testData WHERE key = 1"), -2
+ sql("SELECT -(key + 1) FROM testData WHERE key = 1"), Row(-2)
)
checkAnswer(
- sql("SELECT - key + 1 FROM testData WHERE key = 10"), -9
+ sql("SELECT - key + 1 FROM testData WHERE key = 10"), Row(-9)
)
checkAnswer(
- sql("SELECT +(key + 5) FROM testData WHERE key = 5"), 10
+ sql("SELECT +(key + 5) FROM testData WHERE key = 5"), Row(10)
)
checkAnswer(
- sql("SELECT -MAX(key) FROM testData"), -100
+ sql("SELECT -MAX(key) FROM testData"), Row(-100)
)
checkAnswer(
- sql("SELECT +MAX(key) FROM testData"), 100
+ sql("SELECT +MAX(key) FROM testData"), Row(100)
)
checkAnswer(
- sql("SELECT - (-10)"), 10
+ sql("SELECT - (-10)"), Row(10)
)
checkAnswer(
- sql("SELECT + (-key) FROM testData WHERE key = 32"), -32
+ sql("SELECT + (-key) FROM testData WHERE key = 32"), Row(-32)
)
checkAnswer(
- sql("SELECT - (+Max(key)) FROM testData"), -100
+ sql("SELECT - (+Max(key)) FROM testData"), Row(-100)
)
checkAnswer(
- sql("SELECT - - 3"), 3
+ sql("SELECT - - 3"), Row(3)
)
checkAnswer(
- sql("SELECT - + 20"), -20
+ sql("SELECT - + 20"), Row(-20)
)
checkAnswer(
- sql("SELEcT - + 45"), -45
+ sql("SELEcT - + 45"), Row(-45)
)
checkAnswer(
- sql("SELECT + + 100"), 100
+ sql("SELECT + + 100"), Row(100)
)
checkAnswer(
- sql("SELECT - - Max(key) FROM testData"), 100
+ sql("SELECT - - Max(key) FROM testData"), Row(100)
)
checkAnswer(
- sql("SELECT + - key FROM testData WHERE key = 33"), -33
+ sql("SELECT + - key FROM testData WHERE key = 33"), Row(-33)
)
}
@@ -943,7 +947,7 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
|JOIN testData b ON a.key = b.key
|JOIN testData c ON a.key = c.key
""".stripMargin),
- (1 to 100).map(i => Seq(i, i, i)))
+ (1 to 100).map(i => Row(i, i, i)))
}
test("SPARK-3483 Special chars in column names") {
@@ -953,19 +957,19 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
}
test("SPARK-3814 Support Bitwise & operator") {
- checkAnswer(sql("SELECT key&1 FROM testData WHERE key = 1 "), 1)
+ checkAnswer(sql("SELECT key&1 FROM testData WHERE key = 1 "), Row(1))
}
test("SPARK-3814 Support Bitwise | operator") {
- checkAnswer(sql("SELECT key|0 FROM testData WHERE key = 1 "), 1)
+ checkAnswer(sql("SELECT key|0 FROM testData WHERE key = 1 "), Row(1))
}
test("SPARK-3814 Support Bitwise ^ operator") {
- checkAnswer(sql("SELECT key^0 FROM testData WHERE key = 1 "), 1)
+ checkAnswer(sql("SELECT key^0 FROM testData WHERE key = 1 "), Row(1))
}
test("SPARK-3814 Support Bitwise ~ operator") {
- checkAnswer(sql("SELECT ~key FROM testData WHERE key = 1 "), -2)
+ checkAnswer(sql("SELECT ~key FROM testData WHERE key = 1 "), Row(-2))
}
test("SPARK-4120 Join of multiple tables does not work in SparkSQL") {
@@ -975,40 +979,40 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
|FROM testData a,testData b,testData c
|where a.key = b.key and a.key = c.key
""".stripMargin),
- (1 to 100).map(i => Seq(i, i, i)))
+ (1 to 100).map(i => Row(i, i, i)))
}
test("SPARK-4154 Query does not work if it has 'not between' in Spark SQL and HQL") {
checkAnswer(sql("SELECT key FROM testData WHERE key not between 0 and 10 order by key"),
- (11 to 100).map(i => Seq(i)))
+ (11 to 100).map(i => Row(i)))
}
test("SPARK-4207 Query which has syntax like 'not like' is not working in Spark SQL") {
checkAnswer(sql("SELECT key FROM testData WHERE value not like '100%' order by key"),
- (1 to 99).map(i => Seq(i)))
+ (1 to 99).map(i => Row(i)))
}
test("SPARK-4322 Grouping field with struct field as sub expression") {
jsonRDD(sparkContext.makeRDD("""{"a": {"b": [{"c": 1}]}}""" :: Nil)).registerTempTable("data")
- checkAnswer(sql("SELECT a.b[0].c FROM data GROUP BY a.b[0].c"), 1)
+ checkAnswer(sql("SELECT a.b[0].c FROM data GROUP BY a.b[0].c"), Row(1))
dropTempTable("data")
jsonRDD(sparkContext.makeRDD("""{"a": {"b": 1}}""" :: Nil)).registerTempTable("data")
- checkAnswer(sql("SELECT a.b + 1 FROM data GROUP BY a.b + 1"), 2)
+ checkAnswer(sql("SELECT a.b + 1 FROM data GROUP BY a.b + 1"), Row(2))
dropTempTable("data")
}
test("SPARK-4432 Fix attribute reference resolution error when using ORDER BY") {
checkAnswer(
sql("SELECT a + b FROM testData2 ORDER BY a"),
- Seq(2, 3, 3 ,4 ,4 ,5).map(Seq(_))
+ Seq(2, 3, 3 ,4 ,4 ,5).map(Row(_))
)
}
test("oder by asc by default when not specify ascending and descending") {
checkAnswer(
sql("SELECT a, b FROM testData2 ORDER BY a desc, b"),
- Seq((3, 1), (3, 2), (2, 1), (2,2), (1, 1), (1, 2))
+ Seq(Row(3, 1), Row(3, 2), Row(2, 1), Row(2,2), Row(1, 1), Row(1, 2))
)
}
@@ -1021,13 +1025,13 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
rdd2.registerTempTable("nulldata2")
checkAnswer(sql("SELECT nulldata1.key FROM nulldata1 join " +
"nulldata2 on nulldata1.value <=> nulldata2.value"),
- (1 to 2).map(i => Seq(i)))
+ (1 to 2).map(i => Row(i)))
}
test("Multi-column COUNT(DISTINCT ...)") {
val data = TestData(1,"val_1") :: TestData(2,"val_2") :: Nil
val rdd = sparkContext.parallelize((0 to 1).map(i => data(i)))
rdd.registerTempTable("distinctData")
- checkAnswer(sql("SELECT COUNT(DISTINCT key,value) FROM distinctData"), 2)
+ checkAnswer(sql("SELECT COUNT(DISTINCT key,value) FROM distinctData"), Row(2))
}
}
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/core/src/test/scala/org/apache/spark/sql/ScalaReflectionRelationSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ScalaReflectionRelationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ScalaReflectionRelationSuite.scala
index ee381da..a015884 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ScalaReflectionRelationSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/ScalaReflectionRelationSuite.scala
@@ -82,7 +82,7 @@ class ScalaReflectionRelationSuite extends FunSuite {
rdd.registerTempTable("reflectData")
assert(sql("SELECT * FROM reflectData").collect().head ===
- Seq("a", 1, 1L, 1.toFloat, 1.toDouble, 1.toShort, 1.toByte, true,
+ Row("a", 1, 1L, 1.toFloat, 1.toDouble, 1.toShort, 1.toByte, true,
new java.math.BigDecimal(1), new Date(12345), new Timestamp(12345), Seq(1,2,3)))
}
@@ -91,7 +91,7 @@ class ScalaReflectionRelationSuite extends FunSuite {
val rdd = sparkContext.parallelize(data :: Nil)
rdd.registerTempTable("reflectNullData")
- assert(sql("SELECT * FROM reflectNullData").collect().head === Seq.fill(7)(null))
+ assert(sql("SELECT * FROM reflectNullData").collect().head === Row.fromSeq(Seq.fill(7)(null)))
}
test("query case class RDD with Nones") {
@@ -99,7 +99,7 @@ class ScalaReflectionRelationSuite extends FunSuite {
val rdd = sparkContext.parallelize(data :: Nil)
rdd.registerTempTable("reflectOptionalData")
- assert(sql("SELECT * FROM reflectOptionalData").collect().head === Seq.fill(7)(null))
+ assert(sql("SELECT * FROM reflectOptionalData").collect().head === Row.fromSeq(Seq.fill(7)(null)))
}
// Equality is broken for Arrays, so we test that separately.
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala
index 9be0b38..be2b34d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala
@@ -42,8 +42,8 @@ class ColumnStatsSuite extends FunSuite {
test(s"$columnStatsName: empty") {
val columnStats = columnStatsClass.newInstance()
- columnStats.collectedStatistics.zip(initialStatistics).foreach { case (actual, expected) =>
- assert(actual === expected)
+ columnStats.collectedStatistics.toSeq.zip(initialStatistics.toSeq).foreach {
+ case (actual, expected) => assert(actual === expected)
}
}
@@ -54,7 +54,7 @@ class ColumnStatsSuite extends FunSuite {
val rows = Seq.fill(10)(makeRandomRow(columnType)) ++ Seq.fill(10)(makeNullRow(1))
rows.foreach(columnStats.gatherStats(_, 0))
- val values = rows.take(10).map(_.head.asInstanceOf[T#JvmType])
+ val values = rows.take(10).map(_(0).asInstanceOf[T#JvmType])
val ordering = columnType.dataType.ordering.asInstanceOf[Ordering[T#JvmType]]
val stats = columnStats.collectedStatistics
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
index d94729b..e61f3c3 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
@@ -49,7 +49,7 @@ class InMemoryColumnarQuerySuite extends QueryTest {
checkAnswer(scan, testData.collect().map {
case Row(key: Int, value: String) => value -> key
- }.toSeq)
+ }.map(Row.fromTuple))
}
test("SPARK-1436 regression: in-memory columns must be able to be accessed multiple times") {
@@ -63,49 +63,49 @@ class InMemoryColumnarQuerySuite extends QueryTest {
test("SPARK-1678 regression: compression must not lose repeated values") {
checkAnswer(
sql("SELECT * FROM repeatedData"),
- repeatedData.collect().toSeq)
+ repeatedData.collect().toSeq.map(Row.fromTuple))
cacheTable("repeatedData")
checkAnswer(
sql("SELECT * FROM repeatedData"),
- repeatedData.collect().toSeq)
+ repeatedData.collect().toSeq.map(Row.fromTuple))
}
test("with null values") {
checkAnswer(
sql("SELECT * FROM nullableRepeatedData"),
- nullableRepeatedData.collect().toSeq)
+ nullableRepeatedData.collect().toSeq.map(Row.fromTuple))
cacheTable("nullableRepeatedData")
checkAnswer(
sql("SELECT * FROM nullableRepeatedData"),
- nullableRepeatedData.collect().toSeq)
+ nullableRepeatedData.collect().toSeq.map(Row.fromTuple))
}
test("SPARK-2729 regression: timestamp data type") {
checkAnswer(
sql("SELECT time FROM timestamps"),
- timestamps.collect().toSeq)
+ timestamps.collect().toSeq.map(Row.fromTuple))
cacheTable("timestamps")
checkAnswer(
sql("SELECT time FROM timestamps"),
- timestamps.collect().toSeq)
+ timestamps.collect().toSeq.map(Row.fromTuple))
}
test("SPARK-3320 regression: batched column buffer building should work with empty partitions") {
checkAnswer(
sql("SELECT * FROM withEmptyParts"),
- withEmptyParts.collect().toSeq)
+ withEmptyParts.collect().toSeq.map(Row.fromTuple))
cacheTable("withEmptyParts")
checkAnswer(
sql("SELECT * FROM withEmptyParts"),
- withEmptyParts.collect().toSeq)
+ withEmptyParts.collect().toSeq.map(Row.fromTuple))
}
test("SPARK-4182 Caching complex types") {
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/core/src/test/scala/org/apache/spark/sql/columnar/PartitionBatchPruningSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/PartitionBatchPruningSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/columnar/PartitionBatchPruningSuite.scala
index 592cafb..c3a3f8d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/PartitionBatchPruningSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/columnar/PartitionBatchPruningSuite.scala
@@ -108,7 +108,7 @@ class PartitionBatchPruningSuite extends FunSuite with BeforeAndAfterAll with Be
val queryExecution = schemaRdd.queryExecution
assertResult(expectedQueryResult.toArray, s"Wrong query result: $queryExecution") {
- schemaRdd.collect().map(_.head).toArray
+ schemaRdd.collect().map(_(0)).toArray
}
val (readPartitions, readBatches) = schemaRdd.queryExecution.executedPlan.collect {
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala
index d9e488e..8b518f0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala
@@ -34,7 +34,7 @@ class BooleanBitSetSuite extends FunSuite {
val builder = TestCompressibleColumnBuilder(new NoopColumnStats, BOOLEAN, BooleanBitSet)
val rows = Seq.fill[Row](count)(makeRandomRow(BOOLEAN))
- val values = rows.map(_.head)
+ val values = rows.map(_(0))
rows.foreach(builder.appendFrom(_, 0))
val buffer = builder.build()
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/core/src/test/scala/org/apache/spark/sql/execution/TgfSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/TgfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/TgfSuite.scala
index 2cab5e0..272c0d4 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/TgfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/TgfSuite.scala
@@ -59,7 +59,7 @@ class TgfSuite extends QueryTest {
checkAnswer(
inputData.generate(ExampleTGF()),
Seq(
- "michael is 29 years old" :: Nil,
- "Next year, michael will be 30 years old" :: Nil))
+ Row("michael is 29 years old"),
+ Row("Next year, michael will be 30 years old")))
}
}
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
index 2bc9aed..94d14ac 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
@@ -229,13 +229,13 @@ class JsonSuite extends QueryTest {
checkAnswer(
sql("select * from jsonTable"),
- (new java.math.BigDecimal("92233720368547758070"),
- true,
- 1.7976931348623157E308,
- 10,
- 21474836470L,
- null,
- "this is a simple string.") :: Nil
+ Row(new java.math.BigDecimal("92233720368547758070"),
+ true,
+ 1.7976931348623157E308,
+ 10,
+ 21474836470L,
+ null,
+ "this is a simple string.")
)
}
@@ -271,48 +271,49 @@ class JsonSuite extends QueryTest {
// Access elements of a primitive array.
checkAnswer(
sql("select arrayOfString[0], arrayOfString[1], arrayOfString[2] from jsonTable"),
- ("str1", "str2", null) :: Nil
+ Row("str1", "str2", null)
)
// Access an array of null values.
checkAnswer(
sql("select arrayOfNull from jsonTable"),
- Seq(Seq(null, null, null, null)) :: Nil
+ Row(Seq(null, null, null, null))
)
// Access elements of a BigInteger array (we use DecimalType internally).
checkAnswer(
sql("select arrayOfBigInteger[0], arrayOfBigInteger[1], arrayOfBigInteger[2] from jsonTable"),
- (new java.math.BigDecimal("922337203685477580700"),
- new java.math.BigDecimal("-922337203685477580800"), null) :: Nil
+ Row(new java.math.BigDecimal("922337203685477580700"),
+ new java.math.BigDecimal("-922337203685477580800"), null)
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray1[0], arrayOfArray1[1] from jsonTable"),
- (Seq("1", "2", "3"), Seq("str1", "str2")) :: Nil
+ Row(Seq("1", "2", "3"), Seq("str1", "str2"))
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray2[0], arrayOfArray2[1] from jsonTable"),
- (Seq(1.0, 2.0, 3.0), Seq(1.1, 2.1, 3.1)) :: Nil
+ Row(Seq(1.0, 2.0, 3.0), Seq(1.1, 2.1, 3.1))
)
// Access elements of an array inside a filed with the type of ArrayType(ArrayType).
checkAnswer(
sql("select arrayOfArray1[1][1], arrayOfArray2[1][1] from jsonTable"),
- ("str2", 2.1) :: Nil
+ Row("str2", 2.1)
)
// Access elements of an array of structs.
checkAnswer(
sql("select arrayOfStruct[0], arrayOfStruct[1], arrayOfStruct[2], arrayOfStruct[3] " +
"from jsonTable"),
- (true :: "str1" :: null :: Nil,
- false :: null :: null :: Nil,
- null :: null :: null :: Nil,
- null) :: Nil
+ Row(
+ Row(true, "str1", null),
+ Row(false, null, null),
+ Row(null, null, null),
+ null)
)
// Access a struct and fields inside of it.
@@ -327,13 +328,13 @@ class JsonSuite extends QueryTest {
// Access an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1, structWithArrayFields.field2 from jsonTable"),
- (Seq(4, 5, 6), Seq("str1", "str2")) :: Nil
+ Row(Seq(4, 5, 6), Seq("str1", "str2"))
)
// Access elements of an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1[1], structWithArrayFields.field2[3] from jsonTable"),
- (5, null) :: Nil
+ Row(5, null)
)
}
@@ -344,14 +345,14 @@ class JsonSuite extends QueryTest {
// Right now, "field1" and "field2" are treated as aliases. We should fix it.
checkAnswer(
sql("select arrayOfStruct[0].field1, arrayOfStruct[0].field2 from jsonTable"),
- (true, "str1") :: Nil
+ Row(true, "str1")
)
// Right now, the analyzer cannot resolve arrayOfStruct.field1 and arrayOfStruct.field2.
// Getting all values of a specific field from an array of structs.
checkAnswer(
sql("select arrayOfStruct.field1, arrayOfStruct.field2 from jsonTable"),
- (Seq(true, false), Seq("str1", null)) :: Nil
+ Row(Seq(true, false), Seq("str1", null))
)
}
@@ -372,57 +373,57 @@ class JsonSuite extends QueryTest {
checkAnswer(
sql("select * from jsonTable"),
- ("true", 11L, null, 1.1, "13.1", "str1") ::
- ("12", null, new java.math.BigDecimal("21474836470.9"), null, null, "true") ::
- ("false", 21474836470L, new java.math.BigDecimal("92233720368547758070"), 100, "str1", "false") ::
- (null, 21474836570L, new java.math.BigDecimal("1.1"), 21474836470L, "92233720368547758070", null) :: Nil
+ Row("true", 11L, null, 1.1, "13.1", "str1") ::
+ Row("12", null, new java.math.BigDecimal("21474836470.9"), null, null, "true") ::
+ Row("false", 21474836470L, new java.math.BigDecimal("92233720368547758070"), 100, "str1", "false") ::
+ Row(null, 21474836570L, new java.math.BigDecimal("1.1"), 21474836470L, "92233720368547758070", null) :: Nil
)
// Number and Boolean conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_bool - 10 from jsonTable where num_bool > 11"),
- 2
+ Row(2)
)
// Widening to LongType
checkAnswer(
sql("select num_num_1 - 100 from jsonTable where num_num_1 > 11"),
- Seq(21474836370L) :: Seq(21474836470L) :: Nil
+ Row(21474836370L) :: Row(21474836470L) :: Nil
)
checkAnswer(
sql("select num_num_1 - 100 from jsonTable where num_num_1 > 10"),
- Seq(-89) :: Seq(21474836370L) :: Seq(21474836470L) :: Nil
+ Row(-89) :: Row(21474836370L) :: Row(21474836470L) :: Nil
)
// Widening to DecimalType
checkAnswer(
sql("select num_num_2 + 1.2 from jsonTable where num_num_2 > 1.1"),
- Seq(new java.math.BigDecimal("21474836472.1")) :: Seq(new java.math.BigDecimal("92233720368547758071.2")) :: Nil
+ Row(new java.math.BigDecimal("21474836472.1")) :: Row(new java.math.BigDecimal("92233720368547758071.2")) :: Nil
)
// Widening to DoubleType
checkAnswer(
sql("select num_num_3 + 1.2 from jsonTable where num_num_3 > 1.1"),
- Seq(101.2) :: Seq(21474836471.2) :: Nil
+ Row(101.2) :: Row(21474836471.2) :: Nil
)
// Number and String conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_str + 1.2 from jsonTable where num_str > 14"),
- 92233720368547758071.2
+ Row(92233720368547758071.2)
)
// Number and String conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_str + 1.2 from jsonTable where num_str > 92233720368547758060"),
- new java.math.BigDecimal("92233720368547758061.2").doubleValue
+ Row(new java.math.BigDecimal("92233720368547758061.2").doubleValue)
)
// String and Boolean conflict: resolve the type as string.
checkAnswer(
sql("select * from jsonTable where str_bool = 'str1'"),
- ("true", 11L, null, 1.1, "13.1", "str1") :: Nil
+ Row("true", 11L, null, 1.1, "13.1", "str1")
)
}
@@ -434,24 +435,24 @@ class JsonSuite extends QueryTest {
// Number and Boolean conflict: resolve the type as boolean in this query.
checkAnswer(
sql("select num_bool from jsonTable where NOT num_bool"),
- false
+ Row(false)
)
checkAnswer(
sql("select str_bool from jsonTable where NOT str_bool"),
- false
+ Row(false)
)
// Right now, the analyzer does not know that num_bool should be treated as a boolean.
// Number and Boolean conflict: resolve the type as boolean in this query.
checkAnswer(
sql("select num_bool from jsonTable where num_bool"),
- true
+ Row(true)
)
checkAnswer(
sql("select str_bool from jsonTable where str_bool"),
- false
+ Row(false)
)
// The plan of the following DSL is
@@ -464,7 +465,7 @@ class JsonSuite extends QueryTest {
jsonSchemaRDD.
where('num_str > BigDecimal("92233720368547758060")).
select('num_str + 1.2 as Symbol("num")),
- new java.math.BigDecimal("92233720368547758061.2")
+ Row(new java.math.BigDecimal("92233720368547758061.2"))
)
// The following test will fail. The type of num_str is StringType.
@@ -475,7 +476,7 @@ class JsonSuite extends QueryTest {
// Number and String conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_str + 1.2 from jsonTable where num_str > 13"),
- Seq(14.3) :: Seq(92233720368547758071.2) :: Nil
+ Row(14.3) :: Row(92233720368547758071.2) :: Nil
)
}
@@ -496,10 +497,10 @@ class JsonSuite extends QueryTest {
checkAnswer(
sql("select * from jsonTable"),
- (Seq(), "11", "[1,2,3]", Seq(null), "[]") ::
- (null, """{"field":false}""", null, null, "{}") ::
- (Seq(4, 5, 6), null, "str", Seq(null), "[7,8,9]") ::
- (Seq(7), "{}","[str1,str2,33]", Seq("str"), """{"field":true}""") :: Nil
+ Row(Seq(), "11", "[1,2,3]", Row(null), "[]") ::
+ Row(null, """{"field":false}""", null, null, "{}") ::
+ Row(Seq(4, 5, 6), null, "str", Row(null), "[7,8,9]") ::
+ Row(Seq(7), "{}","[str1,str2,33]", Row("str"), """{"field":true}""") :: Nil
)
}
@@ -518,16 +519,16 @@ class JsonSuite extends QueryTest {
checkAnswer(
sql("select * from jsonTable"),
- Seq(Seq("1", "1.1", "true", null, "[]", "{}", "[2,3,4]",
- """{"field":"str"}"""), Seq(Seq(214748364700L), Seq(1)), null) ::
- Seq(null, null, Seq("""{"field":"str"}""", """{"field":1}""")) ::
- Seq(null, null, Seq("1", "2", "3")) :: Nil
+ Row(Seq("1", "1.1", "true", null, "[]", "{}", "[2,3,4]",
+ """{"field":"str"}"""), Seq(Row(214748364700L), Row(1)), null) ::
+ Row(null, null, Seq("""{"field":"str"}""", """{"field":1}""")) ::
+ Row(null, null, Seq("1", "2", "3")) :: Nil
)
// Treat an element as a number.
checkAnswer(
sql("select array1[0] + 1 from jsonTable where array1 is not null"),
- 2
+ Row(2)
)
}
@@ -568,13 +569,13 @@ class JsonSuite extends QueryTest {
checkAnswer(
sql("select * from jsonTable"),
- (new java.math.BigDecimal("92233720368547758070"),
+ Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
- "this is a simple string.") :: Nil
+ "this is a simple string.")
)
}
@@ -594,13 +595,13 @@ class JsonSuite extends QueryTest {
checkAnswer(
sql("select * from jsonTableSQL"),
- (new java.math.BigDecimal("92233720368547758070"),
+ Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
- "this is a simple string.") :: Nil
+ "this is a simple string.")
)
}
@@ -626,13 +627,13 @@ class JsonSuite extends QueryTest {
checkAnswer(
sql("select * from jsonTable1"),
- (new java.math.BigDecimal("92233720368547758070"),
+ Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
- "this is a simple string.") :: Nil
+ "this is a simple string.")
)
val jsonSchemaRDD2 = jsonRDD(primitiveFieldAndType, schema)
@@ -643,13 +644,13 @@ class JsonSuite extends QueryTest {
checkAnswer(
sql("select * from jsonTable2"),
- (new java.math.BigDecimal("92233720368547758070"),
+ Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
- "this is a simple string.") :: Nil
+ "this is a simple string.")
)
}
@@ -659,7 +660,7 @@ class JsonSuite extends QueryTest {
checkAnswer(
sql("select arrayOfStruct[0].field1, arrayOfStruct[0].field2 from jsonTable"),
- (true, "str1") :: Nil
+ Row(true, "str1")
)
checkAnswer(
sql(
@@ -667,7 +668,7 @@ class JsonSuite extends QueryTest {
|select complexArrayOfStruct[0].field1[1].inner2[0], complexArrayOfStruct[1].field2[0][1]
|from jsonTable
""".stripMargin),
- ("str2", 6) :: Nil
+ Row("str2", 6)
)
}
@@ -681,7 +682,7 @@ class JsonSuite extends QueryTest {
|select arrayOfArray1[0][0][0], arrayOfArray1[1][0][1], arrayOfArray1[1][1][0]
|from jsonTable
""".stripMargin),
- (5, 7, 8) :: Nil
+ Row(5, 7, 8)
)
checkAnswer(
sql(
@@ -690,7 +691,7 @@ class JsonSuite extends QueryTest {
|arrayOfArray2[1][1][1].inner2[0], arrayOfArray2[2][0][0].inner3[0][0].inner4
|from jsonTable
""".stripMargin),
- ("str1", Nil, "str4", 2) :: Nil
+ Row("str1", Nil, "str4", 2)
)
}
@@ -704,10 +705,10 @@ class JsonSuite extends QueryTest {
|select a, b, c
|from jsonTable
""".stripMargin),
- ("str_a_1", null, null) ::
- ("str_a_2", null, null) ::
- (null, "str_b_3", null) ::
- ("str_a_4", "str_b_4", "str_c_4") :: Nil
+ Row("str_a_1", null, null) ::
+ Row("str_a_2", null, null) ::
+ Row(null, "str_b_3", null) ::
+ Row("str_a_4", "str_b_4", "str_c_4") :: Nil
)
}
@@ -734,12 +735,12 @@ class JsonSuite extends QueryTest {
|SELECT a, b, c, _unparsed
|FROM jsonTable
""".stripMargin),
- (null, null, null, "{") ::
- (null, null, null, "") ::
- (null, null, null, """{"a":1, b:2}""") ::
- (null, null, null, """{"a":{, b:3}""") ::
- ("str_a_4", "str_b_4", "str_c_4", null) ::
- (null, null, null, "]") :: Nil
+ Row(null, null, null, "{") ::
+ Row(null, null, null, "") ::
+ Row(null, null, null, """{"a":1, b:2}""") ::
+ Row(null, null, null, """{"a":{, b:3}""") ::
+ Row("str_a_4", "str_b_4", "str_c_4", null) ::
+ Row(null, null, null, "]") :: Nil
)
checkAnswer(
@@ -749,7 +750,7 @@ class JsonSuite extends QueryTest {
|FROM jsonTable
|WHERE _unparsed IS NULL
""".stripMargin),
- ("str_a_4", "str_b_4", "str_c_4") :: Nil
+ Row("str_a_4", "str_b_4", "str_c_4")
)
checkAnswer(
@@ -759,11 +760,11 @@ class JsonSuite extends QueryTest {
|FROM jsonTable
|WHERE _unparsed IS NOT NULL
""".stripMargin),
- Seq("{") ::
- Seq("") ::
- Seq("""{"a":1, b:2}""") ::
- Seq("""{"a":{, b:3}""") ::
- Seq("]") :: Nil
+ Row("{") ::
+ Row("") ::
+ Row("""{"a":1, b:2}""") ::
+ Row("""{"a":{, b:3}""") ::
+ Row("]") :: Nil
)
TestSQLContext.setConf(SQLConf.COLUMN_NAME_OF_CORRUPT_RECORD, oldColumnNameOfCorruptRecord)
@@ -793,10 +794,10 @@ class JsonSuite extends QueryTest {
|SELECT field1, field2, field3, field4
|FROM jsonTable
""".stripMargin),
- Seq(Seq(Seq(null), Seq(Seq(Seq("Test")))), null, null, null) ::
- Seq(null, Seq(null, Seq(Seq(1))), null, null) ::
- Seq(null, null, Seq(Seq(null), Seq(Seq("2"))), null) ::
- Seq(null, null, null, Seq(Seq(null, Seq(1, 2, 3)))) :: Nil
+ Row(Seq(Seq(null), Seq(Seq(Seq("Test")))), null, null, null) ::
+ Row(null, Seq(null, Seq(Row(1))), null, null) ::
+ Row(null, null, Seq(Seq(null), Seq(Row("2"))), null) ::
+ Row(null, null, null, Seq(Seq(null, Seq(1, 2, 3)))) :: Nil
)
}
@@ -851,12 +852,12 @@ class JsonSuite extends QueryTest {
primTable.registerTempTable("primativeTable")
checkAnswer(
sql("select * from primativeTable"),
- (new java.math.BigDecimal("92233720368547758070"),
+ Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
- "this is a simple string.") :: Nil
+ "this is a simple string.")
)
val complexJsonSchemaRDD = jsonRDD(complexFieldAndType1)
@@ -865,38 +866,38 @@ class JsonSuite extends QueryTest {
// Access elements of a primitive array.
checkAnswer(
sql("select arrayOfString[0], arrayOfString[1], arrayOfString[2] from complexTable"),
- ("str1", "str2", null) :: Nil
+ Row("str1", "str2", null)
)
// Access an array of null values.
checkAnswer(
sql("select arrayOfNull from complexTable"),
- Seq(Seq(null, null, null, null)) :: Nil
+ Row(Seq(null, null, null, null))
)
// Access elements of a BigInteger array (we use DecimalType internally).
checkAnswer(
sql("select arrayOfBigInteger[0], arrayOfBigInteger[1], arrayOfBigInteger[2] from complexTable"),
- (new java.math.BigDecimal("922337203685477580700"),
- new java.math.BigDecimal("-922337203685477580800"), null) :: Nil
+ Row(new java.math.BigDecimal("922337203685477580700"),
+ new java.math.BigDecimal("-922337203685477580800"), null)
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray1[0], arrayOfArray1[1] from complexTable"),
- (Seq("1", "2", "3"), Seq("str1", "str2")) :: Nil
+ Row(Seq("1", "2", "3"), Seq("str1", "str2"))
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray2[0], arrayOfArray2[1] from complexTable"),
- (Seq(1.0, 2.0, 3.0), Seq(1.1, 2.1, 3.1)) :: Nil
+ Row(Seq(1.0, 2.0, 3.0), Seq(1.1, 2.1, 3.1))
)
// Access elements of an array inside a filed with the type of ArrayType(ArrayType).
checkAnswer(
sql("select arrayOfArray1[1][1], arrayOfArray2[1][1] from complexTable"),
- ("str2", 2.1) :: Nil
+ Row("str2", 2.1)
)
// Access a struct and fields inside of it.
@@ -911,13 +912,13 @@ class JsonSuite extends QueryTest {
// Access an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1, structWithArrayFields.field2 from complexTable"),
- (Seq(4, 5, 6), Seq("str1", "str2")) :: Nil
+ Row(Seq(4, 5, 6), Seq("str1", "str2"))
)
// Access elements of an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1[1], structWithArrayFields.field2[3] from complexTable"),
- (5, null) :: Nil
+ Row(5, null)
)
}
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetFilterSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetFilterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetFilterSuite.scala
index 4c3a045..4ad8c47 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetFilterSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetFilterSuite.scala
@@ -46,7 +46,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
predicate: Predicate,
filterClass: Class[_ <: FilterPredicate],
checker: (SchemaRDD, Any) => Unit,
- expectedResult: => Any): Unit = {
+ expectedResult: Any): Unit = {
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED -> "true") {
val query = rdd.select(output.map(_.attr): _*).where(predicate)
@@ -65,11 +65,20 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
}
}
- private def checkFilterPushdown
+ private def checkFilterPushdown1
(rdd: SchemaRDD, output: Symbol*)
(predicate: Predicate, filterClass: Class[_ <: FilterPredicate])
- (expectedResult: => Any): Unit = {
- checkFilterPushdown(rdd, output, predicate, filterClass, checkAnswer _, expectedResult)
+ (expectedResult: => Seq[Row]): Unit = {
+ checkFilterPushdown(rdd, output, predicate, filterClass,
+ (query, expected) => checkAnswer(query, expected.asInstanceOf[Seq[Row]]), expectedResult)
+ }
+
+ private def checkFilterPushdown
+ (rdd: SchemaRDD, output: Symbol*)
+ (predicate: Predicate, filterClass: Class[_ <: FilterPredicate])
+ (expectedResult: Int): Unit = {
+ checkFilterPushdown(rdd, output, predicate, filterClass,
+ (query, expected) => checkAnswer(query, expected.asInstanceOf[Seq[Row]]), Seq(Row(expectedResult)))
}
def checkBinaryFilterPushdown
@@ -89,27 +98,25 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
test("filter pushdown - boolean") {
withParquetRDD((true :: false :: Nil).map(b => Tuple1.apply(Option(b)))) { rdd =>
- checkFilterPushdown(rdd, '_1)('_1.isNull, classOf[Eq[java.lang.Boolean]])(Seq.empty[Row])
- checkFilterPushdown(rdd, '_1)('_1.isNotNull, classOf[NotEq[java.lang.Boolean]]) {
+ checkFilterPushdown1(rdd, '_1)('_1.isNull, classOf[Eq[java.lang.Boolean]])(Seq.empty[Row])
+ checkFilterPushdown1(rdd, '_1)('_1.isNotNull, classOf[NotEq[java.lang.Boolean]]) {
Seq(Row(true), Row(false))
}
- checkFilterPushdown(rdd, '_1)('_1 === true, classOf[Eq[java.lang.Boolean]])(true)
- checkFilterPushdown(rdd, '_1)('_1 !== true, classOf[Operators.NotEq[java.lang.Boolean]]) {
- false
- }
+ checkFilterPushdown1(rdd, '_1)('_1 === true, classOf[Eq[java.lang.Boolean]])(Seq(Row(true)))
+ checkFilterPushdown1(rdd, '_1)('_1 !== true, classOf[Operators.NotEq[java.lang.Boolean]])(Seq(Row(false)))
}
}
test("filter pushdown - integer") {
withParquetRDD((1 to 4).map(i => Tuple1(Option(i)))) { rdd =>
- checkFilterPushdown(rdd, '_1)('_1.isNull, classOf[Eq[Integer]])(Seq.empty[Row])
- checkFilterPushdown(rdd, '_1)('_1.isNotNull, classOf[NotEq[Integer]]) {
+ checkFilterPushdown1(rdd, '_1)('_1.isNull, classOf[Eq[Integer]])(Seq.empty[Row])
+ checkFilterPushdown1(rdd, '_1)('_1.isNotNull, classOf[NotEq[Integer]]) {
(1 to 4).map(Row.apply(_))
}
checkFilterPushdown(rdd, '_1)('_1 === 1, classOf[Eq[Integer]])(1)
- checkFilterPushdown(rdd, '_1)('_1 !== 1, classOf[Operators.NotEq[Integer]]) {
+ checkFilterPushdown1(rdd, '_1)('_1 !== 1, classOf[Operators.NotEq[Integer]]) {
(2 to 4).map(Row.apply(_))
}
@@ -126,7 +133,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
checkFilterPushdown(rdd, '_1)(!('_1 < 4), classOf[Operators.GtEq[Integer]])(4)
checkFilterPushdown(rdd, '_1)('_1 > 2 && '_1 < 4, classOf[Operators.And])(3)
- checkFilterPushdown(rdd, '_1)('_1 < 2 || '_1 > 3, classOf[Operators.Or]) {
+ checkFilterPushdown1(rdd, '_1)('_1 < 2 || '_1 > 3, classOf[Operators.Or]) {
Seq(Row(1), Row(4))
}
}
@@ -134,13 +141,13 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
test("filter pushdown - long") {
withParquetRDD((1 to 4).map(i => Tuple1(Option(i.toLong)))) { rdd =>
- checkFilterPushdown(rdd, '_1)('_1.isNull, classOf[Eq[java.lang.Long]])(Seq.empty[Row])
- checkFilterPushdown(rdd, '_1)('_1.isNotNull, classOf[NotEq[java.lang.Long]]) {
+ checkFilterPushdown1(rdd, '_1)('_1.isNull, classOf[Eq[java.lang.Long]])(Seq.empty[Row])
+ checkFilterPushdown1(rdd, '_1)('_1.isNotNull, classOf[NotEq[java.lang.Long]]) {
(1 to 4).map(Row.apply(_))
}
checkFilterPushdown(rdd, '_1)('_1 === 1, classOf[Eq[java.lang.Long]])(1)
- checkFilterPushdown(rdd, '_1)('_1 !== 1, classOf[Operators.NotEq[java.lang.Long]]) {
+ checkFilterPushdown1(rdd, '_1)('_1 !== 1, classOf[Operators.NotEq[java.lang.Long]]) {
(2 to 4).map(Row.apply(_))
}
@@ -157,7 +164,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
checkFilterPushdown(rdd, '_1)(!('_1 < 4), classOf[Operators.GtEq[java.lang.Long]])(4)
checkFilterPushdown(rdd, '_1)('_1 > 2 && '_1 < 4, classOf[Operators.And])(3)
- checkFilterPushdown(rdd, '_1)('_1 < 2 || '_1 > 3, classOf[Operators.Or]) {
+ checkFilterPushdown1(rdd, '_1)('_1 < 2 || '_1 > 3, classOf[Operators.Or]) {
Seq(Row(1), Row(4))
}
}
@@ -165,13 +172,13 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
test("filter pushdown - float") {
withParquetRDD((1 to 4).map(i => Tuple1(Option(i.toFloat)))) { rdd =>
- checkFilterPushdown(rdd, '_1)('_1.isNull, classOf[Eq[java.lang.Float]])(Seq.empty[Row])
- checkFilterPushdown(rdd, '_1)('_1.isNotNull, classOf[NotEq[java.lang.Float]]) {
+ checkFilterPushdown1(rdd, '_1)('_1.isNull, classOf[Eq[java.lang.Float]])(Seq.empty[Row])
+ checkFilterPushdown1(rdd, '_1)('_1.isNotNull, classOf[NotEq[java.lang.Float]]) {
(1 to 4).map(Row.apply(_))
}
checkFilterPushdown(rdd, '_1)('_1 === 1, classOf[Eq[java.lang.Float]])(1)
- checkFilterPushdown(rdd, '_1)('_1 !== 1, classOf[Operators.NotEq[java.lang.Float]]) {
+ checkFilterPushdown1(rdd, '_1)('_1 !== 1, classOf[Operators.NotEq[java.lang.Float]]) {
(2 to 4).map(Row.apply(_))
}
@@ -188,7 +195,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
checkFilterPushdown(rdd, '_1)(!('_1 < 4), classOf[Operators.GtEq[java.lang.Float]])(4)
checkFilterPushdown(rdd, '_1)('_1 > 2 && '_1 < 4, classOf[Operators.And])(3)
- checkFilterPushdown(rdd, '_1)('_1 < 2 || '_1 > 3, classOf[Operators.Or]) {
+ checkFilterPushdown1(rdd, '_1)('_1 < 2 || '_1 > 3, classOf[Operators.Or]) {
Seq(Row(1), Row(4))
}
}
@@ -196,13 +203,13 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
test("filter pushdown - double") {
withParquetRDD((1 to 4).map(i => Tuple1(Option(i.toDouble)))) { rdd =>
- checkFilterPushdown(rdd, '_1)('_1.isNull, classOf[Eq[java.lang.Double]])(Seq.empty[Row])
- checkFilterPushdown(rdd, '_1)('_1.isNotNull, classOf[NotEq[java.lang.Double]]) {
+ checkFilterPushdown1(rdd, '_1)('_1.isNull, classOf[Eq[java.lang.Double]])(Seq.empty[Row])
+ checkFilterPushdown1(rdd, '_1)('_1.isNotNull, classOf[NotEq[java.lang.Double]]) {
(1 to 4).map(Row.apply(_))
}
checkFilterPushdown(rdd, '_1)('_1 === 1, classOf[Eq[java.lang.Double]])(1)
- checkFilterPushdown(rdd, '_1)('_1 !== 1, classOf[Operators.NotEq[java.lang.Double]]) {
+ checkFilterPushdown1(rdd, '_1)('_1 !== 1, classOf[Operators.NotEq[java.lang.Double]]) {
(2 to 4).map(Row.apply(_))
}
@@ -219,7 +226,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
checkFilterPushdown(rdd, '_1)(!('_1 < 4), classOf[Operators.GtEq[java.lang.Double]])(4)
checkFilterPushdown(rdd, '_1)('_1 > 2 && '_1 < 4, classOf[Operators.And])(3)
- checkFilterPushdown(rdd, '_1)('_1 < 2 || '_1 > 3, classOf[Operators.Or]) {
+ checkFilterPushdown1(rdd, '_1)('_1 < 2 || '_1 > 3, classOf[Operators.Or]) {
Seq(Row(1), Row(4))
}
}
@@ -227,30 +234,30 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
test("filter pushdown - string") {
withParquetRDD((1 to 4).map(i => Tuple1(i.toString))) { rdd =>
- checkFilterPushdown(rdd, '_1)('_1.isNull, classOf[Eq[java.lang.String]])(Seq.empty[Row])
- checkFilterPushdown(rdd, '_1)('_1.isNotNull, classOf[NotEq[java.lang.String]]) {
+ checkFilterPushdown1(rdd, '_1)('_1.isNull, classOf[Eq[java.lang.String]])(Seq.empty[Row])
+ checkFilterPushdown1(rdd, '_1)('_1.isNotNull, classOf[NotEq[java.lang.String]]) {
(1 to 4).map(i => Row.apply(i.toString))
}
- checkFilterPushdown(rdd, '_1)('_1 === "1", classOf[Eq[String]])("1")
- checkFilterPushdown(rdd, '_1)('_1 !== "1", classOf[Operators.NotEq[String]]) {
+ checkFilterPushdown1(rdd, '_1)('_1 === "1", classOf[Eq[String]])(Seq(Row("1")))
+ checkFilterPushdown1(rdd, '_1)('_1 !== "1", classOf[Operators.NotEq[String]]) {
(2 to 4).map(i => Row.apply(i.toString))
}
- checkFilterPushdown(rdd, '_1)('_1 < "2", classOf[Lt [java.lang.String]])("1")
- checkFilterPushdown(rdd, '_1)('_1 > "3", classOf[Gt [java.lang.String]])("4")
- checkFilterPushdown(rdd, '_1)('_1 <= "1", classOf[LtEq[java.lang.String]])("1")
- checkFilterPushdown(rdd, '_1)('_1 >= "4", classOf[GtEq[java.lang.String]])("4")
+ checkFilterPushdown1(rdd, '_1)('_1 < "2", classOf[Lt [java.lang.String]])(Seq(Row("1")))
+ checkFilterPushdown1(rdd, '_1)('_1 > "3", classOf[Gt [java.lang.String]])(Seq(Row("4")))
+ checkFilterPushdown1(rdd, '_1)('_1 <= "1", classOf[LtEq[java.lang.String]])(Seq(Row("1")))
+ checkFilterPushdown1(rdd, '_1)('_1 >= "4", classOf[GtEq[java.lang.String]])(Seq(Row("4")))
- checkFilterPushdown(rdd, '_1)(Literal("1") === '_1, classOf[Eq [java.lang.String]])("1")
- checkFilterPushdown(rdd, '_1)(Literal("2") > '_1, classOf[Lt [java.lang.String]])("1")
- checkFilterPushdown(rdd, '_1)(Literal("3") < '_1, classOf[Gt [java.lang.String]])("4")
- checkFilterPushdown(rdd, '_1)(Literal("1") >= '_1, classOf[LtEq[java.lang.String]])("1")
- checkFilterPushdown(rdd, '_1)(Literal("4") <= '_1, classOf[GtEq[java.lang.String]])("4")
+ checkFilterPushdown1(rdd, '_1)(Literal("1") === '_1, classOf[Eq [java.lang.String]])(Seq(Row("1")))
+ checkFilterPushdown1(rdd, '_1)(Literal("2") > '_1, classOf[Lt [java.lang.String]])(Seq(Row("1")))
+ checkFilterPushdown1(rdd, '_1)(Literal("3") < '_1, classOf[Gt [java.lang.String]])(Seq(Row("4")))
+ checkFilterPushdown1(rdd, '_1)(Literal("1") >= '_1, classOf[LtEq[java.lang.String]])(Seq(Row("1")))
+ checkFilterPushdown1(rdd, '_1)(Literal("4") <= '_1, classOf[GtEq[java.lang.String]])(Seq(Row("4")))
- checkFilterPushdown(rdd, '_1)(!('_1 < "4"), classOf[Operators.GtEq[java.lang.String]])("4")
- checkFilterPushdown(rdd, '_1)('_1 > "2" && '_1 < "4", classOf[Operators.And])("3")
- checkFilterPushdown(rdd, '_1)('_1 < "2" || '_1 > "3", classOf[Operators.Or]) {
+ checkFilterPushdown1(rdd, '_1)(!('_1 < "4"), classOf[Operators.GtEq[java.lang.String]])(Seq(Row("4")))
+ checkFilterPushdown1(rdd, '_1)('_1 > "2" && '_1 < "4", classOf[Operators.And])(Seq(Row("3")))
+ checkFilterPushdown1(rdd, '_1)('_1 < "2" || '_1 > "3", classOf[Operators.Or]) {
Seq(Row("1"), Row("4"))
}
}
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala
index 973819a..a57e4e8 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala
@@ -68,8 +68,8 @@ class ParquetIOSuite extends QueryTest with ParquetTest {
/**
* Writes `data` to a Parquet file, reads it back and check file contents.
*/
- protected def checkParquetFile[T <: Product: ClassTag: TypeTag](data: Seq[T]): Unit = {
- withParquetRDD(data)(checkAnswer(_, data))
+ protected def checkParquetFile[T <: Product : ClassTag: TypeTag](data: Seq[T]): Unit = {
+ withParquetRDD(data)(r => checkAnswer(r, data.map(Row.fromTuple)))
}
test("basic data types (without binary)") {
@@ -143,7 +143,7 @@ class ParquetIOSuite extends QueryTest with ParquetTest {
withParquetRDD(data) { rdd =>
// Structs are converted to `Row`s
checkAnswer(rdd, data.map { case Tuple1(struct) =>
- Tuple1(Row(struct.productIterator.toSeq: _*))
+ Row(Row(struct.productIterator.toSeq: _*))
})
}
}
@@ -153,7 +153,7 @@ class ParquetIOSuite extends QueryTest with ParquetTest {
withParquetRDD(data) { rdd =>
// Structs are converted to `Row`s
checkAnswer(rdd, data.map { case Tuple1(struct) =>
- Tuple1(Row(struct.productIterator.toSeq: _*))
+ Row(Row(struct.productIterator.toSeq: _*))
})
}
}
@@ -162,7 +162,7 @@ class ParquetIOSuite extends QueryTest with ParquetTest {
val data = (1 to 4).map(i => Tuple1(Map(i -> (i, s"val_$i"))))
withParquetRDD(data) { rdd =>
checkAnswer(rdd, data.map { case Tuple1(m) =>
- Tuple1(m.mapValues(struct => Row(struct.productIterator.toSeq: _*)))
+ Row(m.mapValues(struct => Row(struct.productIterator.toSeq: _*)))
})
}
}
@@ -261,7 +261,7 @@ class ParquetIOSuite extends QueryTest with ParquetTest {
val path = new Path(dir.toURI.toString, "part-r-0.parquet")
makeRawParquetFile(path)
checkAnswer(parquetFile(path.toString), (0 until 10).map { i =>
- (i % 2 == 0, i, i.toLong, i.toFloat, i.toDouble)
+ Row(i % 2 == 0, i, i.toLong, i.toFloat, i.toDouble)
})
}
}
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala
index 3a073a6..2c5345b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala
@@ -28,7 +28,7 @@ import parquet.hadoop.util.ContextUtil
import parquet.io.api.Binary
import org.apache.spark.sql._
-import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.expressions.{Row => _, _}
import org.apache.spark.sql.catalyst.util.getTempFilePath
import org.apache.spark.sql.test.TestSQLContext
import org.apache.spark.sql.test.TestSQLContext._
@@ -191,8 +191,8 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
parquetFile(path).registerTempTable("tmp")
checkAnswer(
sql("SELECT key, value FROM tmp WHERE value = 'val_5' OR value = 'val_7'"),
- (5, "val_5") ::
- (7, "val_7") :: Nil)
+ Row(5, "val_5") ::
+ Row(7, "val_7") :: Nil)
Utils.deleteRecursively(file)
@@ -207,8 +207,8 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
parquetFile(path).registerTempTable("tmp")
checkAnswer(
sql("SELECT key, value FROM tmp WHERE value = 'val_5' OR value = 'val_7'"),
- (5, "val_5") ::
- (7, "val_7") :: Nil)
+ Row(5, "val_5") ::
+ Row(7, "val_7") :: Nil)
Utils.deleteRecursively(file)
@@ -223,8 +223,8 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
parquetFile(path).registerTempTable("tmp")
checkAnswer(
sql("SELECT key, value FROM tmp WHERE value = 'val_5' OR value = 'val_7'"),
- (5, "val_5") ::
- (7, "val_7") :: Nil)
+ Row(5, "val_5") ::
+ Row(7, "val_7") :: Nil)
Utils.deleteRecursively(file)
@@ -239,8 +239,8 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
parquetFile(path).registerTempTable("tmp")
checkAnswer(
sql("SELECT key, value FROM tmp WHERE value = 'val_5' OR value = 'val_7'"),
- (5, "val_5") ::
- (7, "val_7") :: Nil)
+ Row(5, "val_5") ::
+ Row(7, "val_7") :: Nil)
Utils.deleteRecursively(file)
@@ -255,8 +255,8 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
parquetFile(path).registerTempTable("tmp")
checkAnswer(
sql("SELECT key, value FROM tmp WHERE value = 'val_5' OR value = 'val_7'"),
- (5, "val_5") ::
- (7, "val_7") :: Nil)
+ Row(5, "val_5") ::
+ Row(7, "val_7") :: Nil)
Utils.deleteRecursively(file)
@@ -303,7 +303,7 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
assert(result.size === 9, "self-join result has incorrect size")
assert(result(0).size === 12, "result row has incorrect size")
result.zipWithIndex.foreach {
- case (row, index) => row.zipWithIndex.foreach {
+ case (row, index) => row.toSeq.zipWithIndex.foreach {
case (field, column) => assert(field != null, s"self-join contains null value in row $index field $column")
}
}
@@ -423,7 +423,7 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
val readFile = parquetFile(path)
val rdd_saved = readFile.collect()
- assert(rdd_saved(0) === Seq.fill(5)(null))
+ assert(rdd_saved(0) === Row(null, null, null, null, null))
Utils.deleteRecursively(file)
assert(true)
}
@@ -438,7 +438,7 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
val readFile = parquetFile(path)
val rdd_saved = readFile.collect()
- assert(rdd_saved(0) === Seq.fill(5)(null))
+ assert(rdd_saved(0) === Row(null, null, null, null, null))
Utils.deleteRecursively(file)
assert(true)
}
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite2.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite2.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite2.scala
index 4c081fb..7b3f8c2 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite2.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite2.scala
@@ -38,7 +38,7 @@ class ParquetQuerySuite2 extends QueryTest with ParquetTest {
val data = (0 until 10).map(i => (i, i.toString))
withParquetTable(data, "t") {
sql("INSERT INTO t SELECT * FROM t")
- checkAnswer(table("t"), data ++ data)
+ checkAnswer(table("t"), (data ++ data).map(Row.fromTuple))
}
}
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
index 264f6d9..b1e0919 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
@@ -244,7 +244,7 @@ class TableScanSuite extends DataSourceTest {
sqlTest(
"SELECT count(*) FROM tableWithSchema",
- 10)
+ Seq(Row(10)))
sqlTest(
"SELECT `string$%Field` FROM tableWithSchema",
@@ -260,7 +260,7 @@ class TableScanSuite extends DataSourceTest {
sqlTest(
"SELECT structFieldSimple.key, arrayFieldSimple[1] FROM tableWithSchema a where int_Field=1",
- Seq(Seq(1, 2)))
+ Seq(Row(1, 2)))
sqlTest(
"SELECT structFieldComplex.Value.`value_(2)` FROM tableWithSchema",
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index 10833c1..3e26fe3 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -368,10 +368,10 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
.mkString("\t")
}
case command: ExecutedCommand =>
- command.executeCollect().map(_.head.toString)
+ command.executeCollect().map(_(0).toString)
case other =>
- val result: Seq[Seq[Any]] = other.executeCollect().toSeq
+ val result: Seq[Seq[Any]] = other.executeCollect().map(_.toSeq).toSeq
// We need the types so we can output struct field names
val types = analyzed.output.map(_.dataType)
// Reformat to match hive tab delimited output.
@@ -395,7 +395,7 @@ private object HiveContext {
protected[sql] def toHiveString(a: (Any, DataType)): String = a match {
case (struct: Row, StructType(fields)) =>
- struct.zip(fields).map {
+ struct.toSeq.zip(fields).map {
case (v, t) => s""""${t.name}":${toHiveStructString(v, t.dataType)}"""
}.mkString("{", ",", "}")
case (seq: Seq[_], ArrayType(typ, _)) =>
@@ -418,7 +418,7 @@ private object HiveContext {
/** Hive outputs fields of structs slightly differently than top level attributes. */
protected def toHiveStructString(a: (Any, DataType)): String = a match {
case (struct: Row, StructType(fields)) =>
- struct.zip(fields).map {
+ struct.toSeq.zip(fields).map {
case (v, t) => s""""${t.name}":${toHiveStructString(v, t.dataType)}"""
}.mkString("{", ",", "}")
case (seq: Seq[_], ArrayType(typ, _)) =>
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
index eeabfdd..82dba99 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
@@ -348,7 +348,7 @@ private[hive] trait HiveInspectors {
(o: Any) => {
if (o != null) {
val struct = soi.create()
- (soi.getAllStructFieldRefs, wrappers, o.asInstanceOf[Row]).zipped.foreach {
+ (soi.getAllStructFieldRefs, wrappers, o.asInstanceOf[Row].toSeq).zipped.foreach {
(field, wrapper, data) => soi.setStructFieldData(struct, field, wrapper(data))
}
struct
@@ -432,7 +432,7 @@ private[hive] trait HiveInspectors {
}
case x: SettableStructObjectInspector =>
val fieldRefs = x.getAllStructFieldRefs
- val row = a.asInstanceOf[Seq[_]]
+ val row = a.asInstanceOf[Row]
// 1. create the pojo (most likely) object
val result = x.create()
var i = 0
@@ -448,7 +448,7 @@ private[hive] trait HiveInspectors {
result
case x: StructObjectInspector =>
val fieldRefs = x.getAllStructFieldRefs
- val row = a.asInstanceOf[Seq[_]]
+ val row = a.asInstanceOf[Row]
val result = new java.util.ArrayList[AnyRef](fieldRefs.length)
var i = 0
while (i < fieldRefs.length) {
@@ -475,7 +475,7 @@ private[hive] trait HiveInspectors {
}
def wrap(
- row: Seq[Any],
+ row: Row,
inspectors: Seq[ObjectInspector],
cache: Array[AnyRef]): Array[AnyRef] = {
var i = 0
@@ -486,6 +486,18 @@ private[hive] trait HiveInspectors {
cache
}
+ def wrap(
+ row: Seq[Any],
+ inspectors: Seq[ObjectInspector],
+ cache: Array[AnyRef]): Array[AnyRef] = {
+ var i = 0
+ while (i < inspectors.length) {
+ cache(i) = wrap(row(i), inspectors(i))
+ i += 1
+ }
+ cache
+ }
+
/**
* @param dataType Catalyst data type
* @return Hive java object inspector (recursively), not the Writable ObjectInspector
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala
index d898b87..76d2140 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala
@@ -360,7 +360,7 @@ private[hive] case class HiveUdafFunction(
protected lazy val cached = new Array[AnyRef](exprs.length)
def update(input: Row): Unit = {
- val inputs = inputProjection(input).asInstanceOf[Seq[AnyRef]].toArray
+ val inputs = inputProjection(input)
function.iterate(buffer, wrap(inputs, inspectors, cached))
}
}
http://git-wip-us.apache.org/repos/asf/spark/blob/d181c2a1/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala
index cc8bb3e..aae175e 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala
@@ -209,7 +209,7 @@ private[spark] class SparkHiveDynamicPartitionWriterContainer(
override def getLocalFileWriter(row: Row): FileSinkOperator.RecordWriter = {
val dynamicPartPath = dynamicPartColNames
- .zip(row.takeRight(dynamicPartColNames.length))
+ .zip(row.toSeq.takeRight(dynamicPartColNames.length))
.map { case (col, rawVal) =>
val string = if (rawVal == null) null else String.valueOf(rawVal)
s"/$col=${if (string == null || string.isEmpty) defaultPartName else string}"
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org