You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2016/08/19 01:19:58 UTC

spark git commit: [SPARK-16947][SQL] Support type coercion and foldable expression for inline tables

Repository: spark
Updated Branches:
  refs/heads/master b72bb62d4 -> f5472dda5


[SPARK-16947][SQL] Support type coercion and foldable expression for inline tables

## What changes were proposed in this pull request?
This patch improves inline table support with the following:

1. Support type coercion.
2. Support using foldable expressions. Previously only literals were supported.
3. Improve error message handling.
4. Improve test coverage.

## How was this patch tested?
Added a new unit test suite ResolveInlineTablesSuite and a new file-based end-to-end test inline-table.sql.

Author: petermaxlee <pe...@gmail.com>

Closes #14676 from petermaxlee/SPARK-16947.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f5472dda
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f5472dda
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f5472dda

Branch: refs/heads/master
Commit: f5472dda51b980a726346587257c22873ff708e3
Parents: b72bb62
Author: petermaxlee <pe...@gmail.com>
Authored: Fri Aug 19 09:19:47 2016 +0800
Committer: Wenchen Fan <we...@databricks.com>
Committed: Fri Aug 19 09:19:47 2016 +0800

----------------------------------------------------------------------
 .../spark/sql/catalyst/analysis/Analyzer.scala  |   1 +
 .../catalyst/analysis/ResolveInlineTables.scala | 112 ++++++++++++++
 .../sql/catalyst/analysis/TypeCoercion.scala    |   2 +-
 .../sql/catalyst/analysis/unresolved.scala      |  26 +++-
 .../spark/sql/catalyst/parser/AstBuilder.scala  |  41 ++----
 .../analysis/ResolveInlineTablesSuite.scala     | 101 +++++++++++++
 .../sql/catalyst/parser/PlanParserSuite.scala   |  22 +--
 .../resources/sql-tests/inputs/inline-table.sql |  48 ++++++
 .../sql-tests/results/inline-table.sql.out      | 145 +++++++++++++++++++
 9 files changed, 452 insertions(+), 46 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/f5472dda/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 333dd4d..41e0e6d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -108,6 +108,7 @@ class Analyzer(
       GlobalAggregates ::
       ResolveAggregateFunctions ::
       TimeWindowing ::
+      ResolveInlineTables ::
       TypeCoercion.typeCoercionRules ++
       extendedResolutionRules : _*),
     Batch("Nondeterministic", Once,

http://git-wip-us.apache.org/repos/asf/spark/blob/f5472dda/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTables.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTables.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTables.scala
new file mode 100644
index 0000000..7323197
--- /dev/null
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTables.scala
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.analysis
+
+import scala.util.control.NonFatal
+
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.expressions.Cast
+import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, LogicalPlan}
+import org.apache.spark.sql.catalyst.rules.Rule
+import org.apache.spark.sql.types.{StructField, StructType}
+
+/**
+ * An analyzer rule that replaces [[UnresolvedInlineTable]] with [[LocalRelation]].
+ */
+object ResolveInlineTables extends Rule[LogicalPlan] {
+  override def apply(plan: LogicalPlan): LogicalPlan = plan transformUp {
+    case table: UnresolvedInlineTable if table.expressionsResolved =>
+      validateInputDimension(table)
+      validateInputEvaluable(table)
+      convert(table)
+  }
+
+  /**
+   * Validates the input data dimension:
+   * 1. All rows have the same cardinality.
+   * 2. The number of column aliases defined is consistent with the number of columns in data.
+   *
+   * This is package visible for unit testing.
+   */
+  private[analysis] def validateInputDimension(table: UnresolvedInlineTable): Unit = {
+    if (table.rows.nonEmpty) {
+      val numCols = table.names.size
+      table.rows.zipWithIndex.foreach { case (row, ri) =>
+        if (row.size != numCols) {
+          table.failAnalysis(s"expected $numCols columns but found ${row.size} columns in row $ri")
+        }
+      }
+    }
+  }
+
+  /**
+   * Validates that all inline table data are valid expressions that can be evaluated
+   * (in this they must be foldable).
+   *
+   * This is package visible for unit testing.
+   */
+  private[analysis] def validateInputEvaluable(table: UnresolvedInlineTable): Unit = {
+    table.rows.foreach { row =>
+      row.foreach { e =>
+        // Note that nondeterministic expressions are not supported since they are not foldable.
+        if (!e.resolved || !e.foldable) {
+          e.failAnalysis(s"cannot evaluate expression ${e.sql} in inline table definition")
+        }
+      }
+    }
+  }
+
+  /**
+   * Convert a valid (with right shape and foldable inputs) [[UnresolvedInlineTable]]
+   * into a [[LocalRelation]].
+   *
+   * This function attempts to coerce inputs into consistent types.
+   *
+   * This is package visible for unit testing.
+   */
+  private[analysis] def convert(table: UnresolvedInlineTable): LocalRelation = {
+    // For each column, traverse all the values and find a common data type and nullability.
+    val fields = table.rows.transpose.zip(table.names).map { case (column, name) =>
+      val inputTypes = column.map(_.dataType)
+      val tpe = TypeCoercion.findWiderTypeWithoutStringPromotion(inputTypes).getOrElse {
+        table.failAnalysis(s"incompatible types found in column $name for inline table")
+      }
+      StructField(name, tpe, nullable = column.exists(_.nullable))
+    }
+    val attributes = StructType(fields).toAttributes
+    assert(fields.size == table.names.size)
+
+    val newRows: Seq[InternalRow] = table.rows.map { row =>
+      InternalRow.fromSeq(row.zipWithIndex.map { case (e, ci) =>
+        val targetType = fields(ci).dataType
+        try {
+          if (e.dataType.sameType(targetType)) {
+            e.eval()
+          } else {
+            Cast(e, targetType).eval()
+          }
+        } catch {
+          case NonFatal(ex) =>
+            table.failAnalysis(s"failed to evaluate expression ${e.sql}: ${ex.getMessage}")
+        }
+      })
+    }
+
+    LocalRelation(attributes, newRows)
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/f5472dda/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
index 21e96aa..193c3ec 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
@@ -150,7 +150,7 @@ object TypeCoercion {
    * [[findTightestCommonType]], but can handle decimal types. If the wider decimal type exceeds
    * system limitation, this rule will truncate the decimal type before return it.
    */
-  private def findWiderTypeWithoutStringPromotion(types: Seq[DataType]): Option[DataType] = {
+  def findWiderTypeWithoutStringPromotion(types: Seq[DataType]): Option[DataType] = {
     types.foldLeft[Option[DataType]](Some(NullType))((r, c) => r match {
       case Some(d) => findTightestCommonTypeOfTwo(d, c).orElse((d, c) match {
         case (t1: DecimalType, t2: DecimalType) =>

http://git-wip-us.apache.org/repos/asf/spark/blob/f5472dda/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
index 3735a15..235ae04 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
@@ -50,10 +50,30 @@ case class UnresolvedRelation(
 }
 
 /**
- * Holds a table-valued function call that has yet to be resolved.
+ * An inline table that has not been resolved yet. Once resolved, it is turned by the analyzer into
+ * a [[org.apache.spark.sql.catalyst.plans.logical.LocalRelation]].
+ *
+ * @param names list of column names
+ * @param rows expressions for the data
+ */
+case class UnresolvedInlineTable(
+    names: Seq[String],
+    rows: Seq[Seq[Expression]])
+  extends LeafNode {
+
+  lazy val expressionsResolved: Boolean = rows.forall(_.forall(_.resolved))
+  override lazy val resolved = false
+  override def output: Seq[Attribute] = Nil
+}
+
+/**
+ * A table-valued function, e.g.
+ * {{{
+ *   select * from range(10);
+ * }}}
  */
-case class UnresolvedTableValuedFunction(
-    functionName: String, functionArgs: Seq[Expression]) extends LeafNode {
+case class UnresolvedTableValuedFunction(functionName: String, functionArgs: Seq[Expression])
+  extends LeafNode {
 
   override def output: Seq[Attribute] = Nil
 

http://git-wip-us.apache.org/repos/asf/spark/blob/f5472dda/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 01322ae..283e4d4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -670,39 +670,24 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
    */
   override def visitInlineTable(ctx: InlineTableContext): LogicalPlan = withOrigin(ctx) {
     // Get the backing expressions.
-    val expressions = ctx.expression.asScala.map { eCtx =>
-      val e = expression(eCtx)
-      validate(e.foldable, "All expressions in an inline table must be constants.", eCtx)
-      e
-    }
-
-    // Validate and evaluate the rows.
-    val (structType, structConstructor) = expressions.head.dataType match {
-      case st: StructType =>
-        (st, (e: Expression) => e)
-      case dt =>
-        val st = CreateStruct(Seq(expressions.head)).dataType
-        (st, (e: Expression) => CreateStruct(Seq(e)))
-    }
-    val rows = expressions.map {
-      case expression =>
-        val safe = Cast(structConstructor(expression), structType)
-        safe.eval().asInstanceOf[InternalRow]
+    val rows = ctx.expression.asScala.map { e =>
+      expression(e) match {
+        // inline table comes in two styles:
+        // style 1: values (1), (2), (3)  -- multiple columns are supported
+        // style 2: values 1, 2, 3  -- only a single column is supported here
+        case CreateStruct(children) => children  // style 1
+        case child => Seq(child)  // style 2
+      }
     }
 
-    // Construct attributes.
-    val baseAttributes = structType.toAttributes.map(_.withNullability(true))
-    val attributes = if (ctx.identifierList != null) {
-      val aliases = visitIdentifierList(ctx.identifierList)
-      validate(aliases.size == baseAttributes.size,
-        "Number of aliases must match the number of fields in an inline table.", ctx)
-      baseAttributes.zip(aliases).map(p => p._1.withName(p._2))
+    val aliases = if (ctx.identifierList != null) {
+      visitIdentifierList(ctx.identifierList)
     } else {
-      baseAttributes
+      Seq.tabulate(rows.head.size)(i => s"col${i + 1}")
     }
 
-    // Create plan and add an alias if a name has been defined.
-    LocalRelation(attributes, rows).optionalMap(ctx.identifier)(aliasPlan)
+    val table = UnresolvedInlineTable(aliases, rows)
+    table.optionalMap(ctx.identifier)(aliasPlan)
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/f5472dda/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTablesSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTablesSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTablesSuite.scala
new file mode 100644
index 0000000..920c6ea
--- /dev/null
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTablesSuite.scala
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.analysis
+
+import org.scalatest.BeforeAndAfter
+
+import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.catalyst.expressions.{Literal, Rand}
+import org.apache.spark.sql.catalyst.expressions.aggregate.Count
+import org.apache.spark.sql.catalyst.plans.PlanTest
+import org.apache.spark.sql.types.{LongType, NullType}
+
+/**
+ * Unit tests for [[ResolveInlineTables]]. Note that there are also test cases defined in
+ * end-to-end tests (in sql/core module) for verifying the correct error messages are shown
+ * in negative cases.
+ */
+class ResolveInlineTablesSuite extends PlanTest with BeforeAndAfter {
+
+  private def lit(v: Any): Literal = Literal(v)
+
+  test("validate inputs are foldable") {
+    ResolveInlineTables.validateInputEvaluable(
+      UnresolvedInlineTable(Seq("c1", "c2"), Seq(Seq(lit(1)))))
+
+    // nondeterministic (rand) should not work
+    intercept[AnalysisException] {
+      ResolveInlineTables.validateInputEvaluable(
+        UnresolvedInlineTable(Seq("c1"), Seq(Seq(Rand(1)))))
+    }
+
+    // aggregate should not work
+    intercept[AnalysisException] {
+      ResolveInlineTables.validateInputEvaluable(
+        UnresolvedInlineTable(Seq("c1"), Seq(Seq(Count(lit(1))))))
+    }
+
+    // unresolved attribute should not work
+    intercept[AnalysisException] {
+      ResolveInlineTables.validateInputEvaluable(
+        UnresolvedInlineTable(Seq("c1"), Seq(Seq(UnresolvedAttribute("A")))))
+    }
+  }
+
+  test("validate input dimensions") {
+    ResolveInlineTables.validateInputDimension(
+      UnresolvedInlineTable(Seq("c1"), Seq(Seq(lit(1)), Seq(lit(2)))))
+
+    // num alias != data dimension
+    intercept[AnalysisException] {
+      ResolveInlineTables.validateInputDimension(
+        UnresolvedInlineTable(Seq("c1", "c2"), Seq(Seq(lit(1)), Seq(lit(2)))))
+    }
+
+    // num alias == data dimension, but data themselves are inconsistent
+    intercept[AnalysisException] {
+      ResolveInlineTables.validateInputDimension(
+        UnresolvedInlineTable(Seq("c1"), Seq(Seq(lit(1)), Seq(lit(21), lit(22)))))
+    }
+  }
+
+  test("do not fire the rule if not all expressions are resolved") {
+    val table = UnresolvedInlineTable(Seq("c1", "c2"), Seq(Seq(UnresolvedAttribute("A"))))
+    assert(ResolveInlineTables(table) == table)
+  }
+
+  test("convert") {
+    val table = UnresolvedInlineTable(Seq("c1"), Seq(Seq(lit(1)), Seq(lit(2L))))
+    val converted = ResolveInlineTables.convert(table)
+
+    assert(converted.output.map(_.dataType) == Seq(LongType))
+    assert(converted.data.size == 2)
+    assert(converted.data(0).getLong(0) == 1L)
+    assert(converted.data(1).getLong(0) == 2L)
+  }
+
+  test("nullability inference in convert") {
+    val table1 = UnresolvedInlineTable(Seq("c1"), Seq(Seq(lit(1)), Seq(lit(2L))))
+    val converted1 = ResolveInlineTables.convert(table1)
+    assert(!converted1.schema.fields(0).nullable)
+
+    val table2 = UnresolvedInlineTable(Seq("c1"), Seq(Seq(lit(1)), Seq(Literal(null, NullType))))
+    val converted2 = ResolveInlineTables.convert(table2)
+    assert(converted2.schema.fields(0).nullable)
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/f5472dda/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
index cbe4a02..2fcbfc7 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
@@ -17,9 +17,8 @@
 
 package org.apache.spark.sql.catalyst.parser
 
-import org.apache.spark.sql.Row
 import org.apache.spark.sql.catalyst.FunctionIdentifier
-import org.apache.spark.sql.catalyst.analysis.{UnresolvedGenerator, UnresolvedTableValuedFunction}
+import org.apache.spark.sql.catalyst.analysis.{UnresolvedGenerator, UnresolvedInlineTable, UnresolvedTableValuedFunction}
 import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.catalyst.plans._
 import org.apache.spark.sql.catalyst.plans.logical._
@@ -433,19 +432,14 @@ class PlanParserSuite extends PlanTest {
   }
 
   test("inline table") {
-    assertEqual("values 1, 2, 3, 4", LocalRelation.fromExternalRows(
-      Seq('col1.int),
-      Seq(1, 2, 3, 4).map(x => Row(x))))
+    assertEqual("values 1, 2, 3, 4",
+      UnresolvedInlineTable(Seq("col1"), Seq(1, 2, 3, 4).map(x => Seq(Literal(x)))))
+
     assertEqual(
-      "values (1, 'a'), (2, 'b'), (3, 'c') as tbl(a, b)",
-      LocalRelation.fromExternalRows(
-        Seq('a.int, 'b.string),
-        Seq((1, "a"), (2, "b"), (3, "c")).map(x => Row(x._1, x._2))).as("tbl"))
-    intercept("values (a, 'a'), (b, 'b')",
-      "All expressions in an inline table must be constants.")
-    intercept("values (1, 'a'), (2, 'b') as tbl(a, b, c)",
-      "Number of aliases must match the number of fields in an inline table.")
-    intercept[ArrayIndexOutOfBoundsException](parsePlan("values (1, 'a'), (2, 'b', 5Y)"))
+      "values (1, 'a'), (2, 'b') as tbl(a, b)",
+      UnresolvedInlineTable(
+        Seq("a", "b"),
+        Seq(Literal(1), Literal("a")) :: Seq(Literal(2), Literal("b")) :: Nil).as("tbl"))
   }
 
   test("simple select query with !> and !<") {

http://git-wip-us.apache.org/repos/asf/spark/blob/f5472dda/sql/core/src/test/resources/sql-tests/inputs/inline-table.sql
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/inputs/inline-table.sql b/sql/core/src/test/resources/sql-tests/inputs/inline-table.sql
new file mode 100644
index 0000000..5107fa4
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/inputs/inline-table.sql
@@ -0,0 +1,48 @@
+
+-- single row, without table and column alias
+select * from values ("one", 1);
+
+-- single row, without column alias
+select * from values ("one", 1) as data;
+
+-- single row
+select * from values ("one", 1) as data(a, b);
+
+-- single column multiple rows
+select * from values 1, 2, 3 as data(a);
+
+-- three rows
+select * from values ("one", 1), ("two", 2), ("three", null) as data(a, b);
+
+-- null type
+select * from values ("one", null), ("two", null) as data(a, b);
+
+-- int and long coercion
+select * from values ("one", 1), ("two", 2L) as data(a, b);
+
+-- foldable expressions
+select * from values ("one", 1 + 0), ("two", 1 + 3L) as data(a, b);
+
+-- complex types
+select * from values ("one", array(0, 1)), ("two", array(2, 3)) as data(a, b);
+
+-- decimal and double coercion
+select * from values ("one", 2.0), ("two", 3.0D) as data(a, b);
+
+-- error reporting: nondeterministic function rand
+select * from values ("one", rand(5)), ("two", 3.0D) as data(a, b);
+
+-- error reporting: different number of columns
+select * from values ("one", 2.0), ("two") as data(a, b);
+
+-- error reporting: types that are incompatible
+select * from values ("one", array(0, 1)), ("two", struct(1, 2)) as data(a, b);
+
+-- error reporting: number aliases different from number data values
+select * from values ("one"), ("two") as data(a, b);
+
+-- error reporting: unresolved expression
+select * from values ("one", random_not_exist_func(1)), ("two", 2) as data(a, b);
+
+-- error reporting: aggregate expression
+select * from values ("one", count(1)), ("two", 2) as data(a, b);

http://git-wip-us.apache.org/repos/asf/spark/blob/f5472dda/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out b/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out
new file mode 100644
index 0000000..de6f01b
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out
@@ -0,0 +1,145 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 16
+
+
+-- !query 0
+select * from values ("one", 1)
+-- !query 0 schema
+struct<col1:string,col2:int>
+-- !query 0 output
+one	1
+
+
+-- !query 1
+select * from values ("one", 1) as data
+-- !query 1 schema
+struct<col1:string,col2:int>
+-- !query 1 output
+one	1
+
+
+-- !query 2
+select * from values ("one", 1) as data(a, b)
+-- !query 2 schema
+struct<a:string,b:int>
+-- !query 2 output
+one	1
+
+
+-- !query 3
+select * from values 1, 2, 3 as data(a)
+-- !query 3 schema
+struct<a:int>
+-- !query 3 output
+1
+2
+3
+
+
+-- !query 4
+select * from values ("one", 1), ("two", 2), ("three", null) as data(a, b)
+-- !query 4 schema
+struct<a:string,b:int>
+-- !query 4 output
+one	1
+three	NULL
+two	2
+
+
+-- !query 5
+select * from values ("one", null), ("two", null) as data(a, b)
+-- !query 5 schema
+struct<a:string,b:null>
+-- !query 5 output
+one	NULL
+two	NULL
+
+
+-- !query 6
+select * from values ("one", 1), ("two", 2L) as data(a, b)
+-- !query 6 schema
+struct<a:string,b:bigint>
+-- !query 6 output
+one	1
+two	2
+
+
+-- !query 7
+select * from values ("one", 1 + 0), ("two", 1 + 3L) as data(a, b)
+-- !query 7 schema
+struct<a:string,b:bigint>
+-- !query 7 output
+one	1
+two	4
+
+
+-- !query 8
+select * from values ("one", array(0, 1)), ("two", array(2, 3)) as data(a, b)
+-- !query 8 schema
+struct<a:string,b:array<int>>
+-- !query 8 output
+one	[0,1]
+two	[2,3]
+
+
+-- !query 9
+select * from values ("one", 2.0), ("two", 3.0D) as data(a, b)
+-- !query 9 schema
+struct<a:string,b:double>
+-- !query 9 output
+one	2.0
+two	3.0
+
+
+-- !query 10
+select * from values ("one", rand(5)), ("two", 3.0D) as data(a, b)
+-- !query 10 schema
+struct<>
+-- !query 10 output
+org.apache.spark.sql.AnalysisException
+cannot evaluate expression rand(5) in inline table definition; line 1 pos 29
+
+
+-- !query 11
+select * from values ("one", 2.0), ("two") as data(a, b)
+-- !query 11 schema
+struct<>
+-- !query 11 output
+org.apache.spark.sql.AnalysisException
+expected 2 columns but found 1 columns in row 1; line 1 pos 14
+
+
+-- !query 12
+select * from values ("one", array(0, 1)), ("two", struct(1, 2)) as data(a, b)
+-- !query 12 schema
+struct<>
+-- !query 12 output
+org.apache.spark.sql.AnalysisException
+incompatible types found in column b for inline table; line 1 pos 14
+
+
+-- !query 13
+select * from values ("one"), ("two") as data(a, b)
+-- !query 13 schema
+struct<>
+-- !query 13 output
+org.apache.spark.sql.AnalysisException
+expected 2 columns but found 1 columns in row 0; line 1 pos 14
+
+
+-- !query 14
+select * from values ("one", random_not_exist_func(1)), ("two", 2) as data(a, b)
+-- !query 14 schema
+struct<>
+-- !query 14 output
+org.apache.spark.sql.AnalysisException
+Undefined function: 'random_not_exist_func'. This function is neither a registered temporary function nor a permanent function registered in the database 'default'.; line 1 pos 29
+
+
+-- !query 15
+select * from values ("one", count(1)), ("two", 2) as data(a, b)
+-- !query 15 schema
+struct<>
+-- !query 15 output
+org.apache.spark.sql.AnalysisException
+cannot evaluate expression count(1) in inline table definition; line 1 pos 29


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org