You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/09/16 20:40:31 UTC
git commit: [SPARK-3308][SQL] Ability to read JSON Arrays as tables
Repository: spark
Updated Branches:
refs/heads/master 9d5fa763d -> 758369987
[SPARK-3308][SQL] Ability to read JSON Arrays as tables
This PR aims to support reading top level JSON arrays and take every element in such an array as a row (an empty array will not generate a row).
JIRA: https://issues.apache.org/jira/browse/SPARK-3308
Author: Yin Huai <hu...@cse.ohio-state.edu>
Closes #2400 from yhuai/SPARK-3308 and squashes the following commits:
990077a [Yin Huai] Handle top level JSON arrays.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/75836998
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/75836998
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/75836998
Branch: refs/heads/master
Commit: 7583699873fb4f252c6ce65db1096783ef438731
Parents: 9d5fa76
Author: Yin Huai <hu...@cse.ohio-state.edu>
Authored: Tue Sep 16 11:40:28 2014 -0700
Committer: Michael Armbrust <mi...@databricks.com>
Committed: Tue Sep 16 11:40:28 2014 -0700
----------------------------------------------------------------------
.../scala/org/apache/spark/sql/json/JsonRDD.scala | 10 +++++++---
.../org/apache/spark/sql/json/JsonSuite.scala | 17 +++++++++++++++++
.../org/apache/spark/sql/json/TestJsonData.scala | 7 +++++++
3 files changed, 31 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/75836998/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
index 8732218..0f27fd1 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
@@ -287,9 +287,13 @@ private[sql] object JsonRDD extends Logging {
// the ObjectMapper will take the last value associated with this duplicate key.
// For example: for {"key": 1, "key":2}, we will get "key"->2.
val mapper = new ObjectMapper()
- iter.map { record =>
- val parsed = scalafy(mapper.readValue(record, classOf[java.util.Map[String, Any]]))
- parsed.asInstanceOf[Map[String, Any]]
+ iter.flatMap { record =>
+ val parsed = mapper.readValue(record, classOf[Object]) match {
+ case map: java.util.Map[_, _] => scalafy(map).asInstanceOf[Map[String, Any]] :: Nil
+ case list: java.util.List[_] => scalafy(list).asInstanceOf[Seq[Map[String, Any]]]
+ }
+
+ parsed
}
})
}
http://git-wip-us.apache.org/repos/asf/spark/blob/75836998/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
index b50d938..685e788 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
@@ -622,4 +622,21 @@ class JsonSuite extends QueryTest {
("str1", Nil, "str4", 2) :: Nil
)
}
+
+ test("SPARK-3308 Read top level JSON arrays") {
+ val jsonSchemaRDD = jsonRDD(jsonArray)
+ jsonSchemaRDD.registerTempTable("jsonTable")
+
+ checkAnswer(
+ sql(
+ """
+ |select a, b, c
+ |from jsonTable
+ """.stripMargin),
+ ("str_a_1", null, null) ::
+ ("str_a_2", null, null) ::
+ (null, "str_b_3", null) ::
+ ("str_a_4", "str_b_4", "str_c_4") ::Nil
+ )
+ }
}
http://git-wip-us.apache.org/repos/asf/spark/blob/75836998/sql/core/src/test/scala/org/apache/spark/sql/json/TestJsonData.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/json/TestJsonData.scala b/sql/core/src/test/scala/org/apache/spark/sql/json/TestJsonData.scala
index 5f0b395..fc833b8 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/json/TestJsonData.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/json/TestJsonData.scala
@@ -136,4 +136,11 @@ object TestJsonData {
]
]]
}""" :: Nil)
+
+ val jsonArray =
+ TestSQLContext.sparkContext.parallelize(
+ """[{"a":"str_a_1"}]""" ::
+ """[{"a":"str_a_2"}, {"b":"str_b_3"}]""" ::
+ """{"b":"str_b_4", "a":"str_a_4", "c":"str_c_4"}""" ::
+ """[]""" :: Nil)
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org