You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by fe...@apache.org on 2017/03/27 17:43:03 UTC
spark git commit: [SPARK-20105][TESTS][R] Add tests for checkType and
type string in structField in R
Repository: spark
Updated Branches:
refs/heads/master 314cf51de -> 3fada2f50
[SPARK-20105][TESTS][R] Add tests for checkType and type string in structField in R
## What changes were proposed in this pull request?
It seems `checkType` and the type string in `structField` are not being tested closely. This string format currently seems SparkR-specific (see https://github.com/apache/spark/blob/d1f6c64c4b763c05d6d79ae5497f298dc3835f3e/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala#L93-L131) but resembles SQL type definition.
Therefore, it seems nicer if we test positive/negative cases in R side.
## How was this patch tested?
Unit tests in `test_sparkSQL.R`.
Author: hyukjinkwon <gu...@gmail.com>
Closes #17439 from HyukjinKwon/r-typestring-tests.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3fada2f5
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3fada2f5
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3fada2f5
Branch: refs/heads/master
Commit: 3fada2f502107bd5572fb895471943de7b2c38e4
Parents: 314cf51
Author: hyukjinkwon <gu...@gmail.com>
Authored: Mon Mar 27 10:43:00 2017 -0700
Committer: Felix Cheung <fe...@apache.org>
Committed: Mon Mar 27 10:43:00 2017 -0700
----------------------------------------------------------------------
R/pkg/inst/tests/testthat/test_sparkSQL.R | 53 ++++++++++++++++++++++++++
1 file changed, 53 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/3fada2f5/R/pkg/inst/tests/testthat/test_sparkSQL.R
----------------------------------------------------------------------
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R
index 394d1a0..5acf871 100644
--- a/R/pkg/inst/tests/testthat/test_sparkSQL.R
+++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R
@@ -140,6 +140,59 @@ test_that("structType and structField", {
expect_equal(testSchema$fields()[[1]]$dataType.toString(), "StringType")
})
+test_that("structField type strings", {
+ # positive cases
+ primitiveTypes <- list(byte = "ByteType",
+ integer = "IntegerType",
+ float = "FloatType",
+ double = "DoubleType",
+ string = "StringType",
+ binary = "BinaryType",
+ boolean = "BooleanType",
+ timestamp = "TimestampType",
+ date = "DateType")
+
+ complexTypes <- list("map<string,integer>" = "MapType(StringType,IntegerType,true)",
+ "array<string>" = "ArrayType(StringType,true)",
+ "struct<a:string>" = "StructType(StructField(a,StringType,true))")
+
+ typeList <- c(primitiveTypes, complexTypes)
+ typeStrings <- names(typeList)
+
+ for (i in seq_along(typeStrings)){
+ typeString <- typeStrings[i]
+ expected <- typeList[[i]]
+ testField <- structField("_col", typeString)
+ expect_is(testField, "structField")
+ expect_true(testField$nullable())
+ expect_equal(testField$dataType.toString(), expected)
+ }
+
+ # negative cases
+ primitiveErrors <- list(Byte = "Byte",
+ INTEGER = "INTEGER",
+ numeric = "numeric",
+ character = "character",
+ raw = "raw",
+ logical = "logical")
+
+ complexErrors <- list("map<string, integer>" = " integer",
+ "array<String>" = "String",
+ "struct<a:string >" = "string ",
+ "map <string,integer>" = "map <string,integer>",
+ "array< string>" = " string",
+ "struct<a: string>" = " string")
+
+ errorList <- c(primitiveErrors, complexErrors)
+ typeStrings <- names(errorList)
+
+ for (i in seq_along(typeStrings)){
+ typeString <- typeStrings[i]
+ expected <- paste0("Unsupported type for SparkDataframe: ", errorList[[i]])
+ expect_error(structField("_col", typeString), expected)
+ }
+})
+
test_that("create DataFrame from RDD", {
rdd <- lapply(parallelize(sc, 1:10), function(x) { list(x, as.character(x)) })
df <- createDataFrame(rdd, list("a", "b"))
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org