You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/03/23 23:21:51 UTC

[2/2] git commit: Fixed coding style issues in Spark SQL

Fixed coding style issues in Spark SQL

This PR addresses various coding style issues in Spark SQL, including but not limited to those mentioned by @mateiz in PR #146.

As this PR affects lots of source files and may cause potential conflicts, it would be better to merge this as soon as possible *after* PR #205 (In-memory columnar representation for Spark SQL) is merged.

Author: Cheng Lian <li...@gmail.com>

Closes #208 from liancheng/fixCodingStyle and squashes the following commits:

fc2b528 [Cheng Lian] Merge branch 'master' into fixCodingStyle
b531273 [Cheng Lian] Fixed coding style issues in sql/hive
0b56f77 [Cheng Lian] Fixed coding style issues in sql/core
fae7b02 [Cheng Lian] Addressed styling issues mentioned by @marmbrus
9265366 [Cheng Lian] Fixed coding style issues in sql/core
3dcbbbd [Cheng Lian] Fixed relative package imports for package catalyst


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/8265dc77
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/8265dc77
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/8265dc77

Branch: refs/heads/master
Commit: 8265dc7739caccc59bc2456b2df055ca96337fe4
Parents: 57a4379
Author: Cheng Lian <li...@gmail.com>
Authored: Sun Mar 23 15:21:40 2014 -0700
Committer: Patrick Wendell <pw...@gmail.com>
Committed: Sun Mar 23 15:21:40 2014 -0700

----------------------------------------------------------------------
 .../spark/sql/catalyst/ScalaReflection.scala    |  69 +++++++++++
 .../apache/spark/sql/catalyst/SqlParser.scala   |  28 ++---
 .../spark/sql/catalyst/analysis/Analyzer.scala  |   7 +-
 .../spark/sql/catalyst/analysis/Catalog.scala   |   3 +-
 .../catalyst/analysis/FunctionRegistry.scala    |   2 +-
 .../catalyst/analysis/HiveTypeCoercion.scala    |   8 +-
 .../analysis/MultiInstanceRelation.scala        |   8 +-
 .../spark/sql/catalyst/analysis/package.scala   |   1 +
 .../sql/catalyst/analysis/unresolved.scala      |   8 +-
 .../apache/spark/sql/catalyst/dsl/package.scala |  56 +--------
 .../spark/sql/catalyst/errors/package.scala     |   7 +-
 .../catalyst/expressions/BoundAttribute.scala   |   7 +-
 .../spark/sql/catalyst/expressions/Cast.scala   |   4 +-
 .../sql/catalyst/expressions/Expression.scala   |   6 +-
 .../spark/sql/catalyst/expressions/Rand.scala   |   2 +-
 .../spark/sql/catalyst/expressions/Row.scala    |   2 +-
 .../sql/catalyst/expressions/ScalaUdf.scala     |   2 +-
 .../sql/catalyst/expressions/WrapDynamic.scala  |   2 +-
 .../sql/catalyst/expressions/aggregates.scala   |   2 +-
 .../sql/catalyst/expressions/arithmetic.scala   |   4 +-
 .../sql/catalyst/expressions/complexTypes.scala |   2 +-
 .../sql/catalyst/expressions/generators.scala   |   2 +-
 .../sql/catalyst/expressions/literals.scala     |   2 +-
 .../catalyst/expressions/namedExpressions.scala |   6 +-
 .../catalyst/expressions/nullFunctions.scala    |   2 +-
 .../sql/catalyst/expressions/predicates.scala   |   4 +-
 .../catalyst/expressions/stringOperations.scala |   2 +-
 .../sql/catalyst/optimizer/Optimizer.scala      |  25 ++--
 .../sql/catalyst/planning/QueryPlanner.scala    |   5 +-
 .../spark/sql/catalyst/planning/patterns.scala  |   4 +-
 .../spark/sql/catalyst/plans/QueryPlan.scala    |   4 +-
 .../catalyst/plans/logical/LogicalPlan.scala    |   6 +-
 .../plans/logical/ScriptTransformation.scala    |   2 +-
 .../catalyst/plans/logical/TestRelation.scala   |   3 +-
 .../catalyst/plans/logical/basicOperators.scala |   2 +-
 .../catalyst/plans/logical/partitioning.scala   |   2 +-
 .../catalyst/plans/physical/partitioning.scala  |   4 +-
 .../apache/spark/sql/catalyst/rules/Rule.scala  |   2 +-
 .../spark/sql/catalyst/rules/RuleExecutor.scala |  14 +--
 .../spark/sql/catalyst/trees/TreeNode.scala     |   2 +-
 .../spark/sql/catalyst/AnalysisSuite.scala      |  41 -------
 .../catalyst/ExpressionEvaluationSuite.scala    | 115 -------------------
 .../sql/catalyst/HiveTypeCoercionSuite.scala    |  74 ------------
 .../spark/sql/catalyst/RuleExecutorSuite.scala  |  57 ---------
 .../spark/sql/catalyst/TreeNodeSuite.scala      |  81 -------------
 .../sql/catalyst/analysis/AnalysisSuite.scala   |  39 +++++++
 .../analysis/HiveTypeCoercionSuite.scala        |  73 ++++++++++++
 .../expressions/ExpressionEvaluationSuite.scala | 114 ++++++++++++++++++
 .../optimizer/ConstantFoldingSuite.scala        |  19 +--
 .../optimizer/FilterPushdownSuite.scala         |  13 +--
 .../sql/catalyst/optimizer/OptimizerTest.scala  |  12 +-
 .../sql/catalyst/trees/RuleExecutorSuite.scala  |  57 +++++++++
 .../sql/catalyst/trees/TreeNodeSuite.scala      |  78 +++++++++++++
 .../spark/rdd/PartitionLocalRDDFunctions.scala  |   7 +-
 .../scala/org/apache/spark/sql/SQLContext.scala |   5 +-
 .../scala/org/apache/spark/sql/SchemaRDD.scala  |   4 +-
 .../apache/spark/sql/execution/Exchange.scala   |  21 ++--
 .../apache/spark/sql/execution/Generate.scala   |   3 +-
 .../spark/sql/execution/SparkStrategies.scala   |  16 +--
 .../apache/spark/sql/execution/aggregates.scala |   9 +-
 .../spark/sql/execution/basicOperators.scala    |  13 +--
 .../org/apache/spark/sql/execution/joins.scala  |   8 +-
 .../apache/spark/sql/execution/package.scala    |   3 +-
 .../spark/sql/parquet/ParquetRelation.scala     |  39 +++----
 .../sql/parquet/ParquetTableOperations.scala    |  28 ++---
 .../spark/sql/parquet/ParquetTableSupport.scala |  20 ++--
 .../spark/sql/parquet/ParquetTestData.scala     |  11 +-
 .../org/apache/spark/sql/DslQuerySuite.scala    |   7 +-
 .../org/apache/spark/sql/PlannerSuite.scala     |  62 ----------
 .../scala/org/apache/spark/sql/QueryTest.scala  |   8 +-
 .../org/apache/spark/sql/SQLQuerySuite.scala    |  26 ++---
 .../scala/org/apache/spark/sql/TestData.scala   |  14 +--
 .../scala/org/apache/spark/sql/TgfSuite.scala   |  71 ------------
 .../spark/sql/execution/PlannerSuite.scala      |  60 ++++++++++
 .../apache/spark/sql/execution/TgfSuite.scala   |  66 +++++++++++
 .../spark/sql/parquet/ParquetQuerySuite.scala   |  15 ++-
 .../hadoop/mapred/SparkHadoopWriter.scala       |   7 +-
 .../org/apache/spark/sql/hive/HiveContext.scala |  25 ++--
 .../spark/sql/hive/HiveMetastoreCatalog.scala   |  22 ++--
 .../org/apache/spark/sql/hive/HiveQl.scala      |  25 ++--
 .../apache/spark/sql/hive/HiveStrategies.scala  |  11 +-
 .../spark/sql/hive/ScriptTransformation.scala   |   5 +-
 .../org/apache/spark/sql/hive/TableReader.scala |  26 ++---
 .../org/apache/spark/sql/hive/TestHive.scala    |  21 ++--
 .../apache/spark/sql/hive/hiveOperators.scala   |  28 ++---
 .../org/apache/spark/sql/hive/hiveUdfs.scala    |  22 ++--
 .../hive/execution/ConcurrentHiveSuite.scala    |   3 +-
 .../sql/hive/execution/HiveComparisonTest.scala |  12 +-
 .../hive/execution/HiveCompatibilitySuite.scala |   5 -
 .../sql/hive/execution/HiveQueryFileTest.scala  |   6 +-
 .../sql/hive/execution/HiveQuerySuite.scala     |   3 +-
 .../hive/execution/HiveResolutionSuite.scala    |   4 +-
 .../spark/sql/hive/execution/PruningSuite.scala |   5 +-
 .../spark/sql/parquet/HiveParquetSuite.scala    |  16 +--
 94 files changed, 908 insertions(+), 950 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
new file mode 100644
index 0000000..bf7318d
--- /dev/null
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql
+package catalyst
+
+import org.apache.spark.sql.catalyst.expressions.Attribute
+import org.apache.spark.sql.catalyst.expressions.AttributeReference
+import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
+import org.apache.spark.sql.catalyst.types._
+
+/**
+ * Provides experimental support for generating catalyst schemas for scala objects.
+ */
+object ScalaReflection {
+  import scala.reflect.runtime.universe._
+
+  /** Returns a Sequence of attributes for the given case class type. */
+  def attributesFor[T: TypeTag]: Seq[Attribute] = schemaFor[T] match {
+    case s: StructType =>
+      s.fields.map(f => AttributeReference(f.name, f.dataType, nullable = true)())
+  }
+
+  /** Returns a catalyst DataType for the given Scala Type using reflection. */
+  def schemaFor[T: TypeTag]: DataType = schemaFor(typeOf[T])
+
+  /** Returns a catalyst DataType for the given Scala Type using reflection. */
+  def schemaFor(tpe: `Type`): DataType = tpe match {
+    case t if t <:< typeOf[Product] =>
+      val params = t.member("<init>": TermName).asMethod.paramss
+      StructType(
+        params.head.map(p => StructField(p.name.toString, schemaFor(p.typeSignature), true)))
+    case t if t <:< typeOf[Seq[_]] =>
+      val TypeRef(_, _, Seq(elementType)) = t
+      ArrayType(schemaFor(elementType))
+    case t if t <:< typeOf[String] => StringType
+    case t if t <:< definitions.IntTpe => IntegerType
+    case t if t <:< definitions.LongTpe => LongType
+    case t if t <:< definitions.DoubleTpe => DoubleType
+    case t if t <:< definitions.ShortTpe => ShortType
+    case t if t <:< definitions.ByteTpe => ByteType
+  }
+
+  implicit class CaseClassRelation[A <: Product : TypeTag](data: Seq[A]) {
+
+    /**
+     * Implicitly added to Sequences of case class objects.  Returns a catalyst logical relation
+     * for the the data in the sequence.
+     */
+    def asRelation: LocalRelation = {
+      val output = attributesFor[A]
+      LocalRelation(output, data)
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
index 919bf4d..9dec4e3 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
@@ -17,21 +17,18 @@
 
 package org.apache.spark.sql.catalyst
 
-import scala.util.matching.Regex
-import scala.util.parsing.combinator._
+import scala.util.parsing.combinator.lexical.StdLexical
+import scala.util.parsing.combinator.syntactical.StandardTokenParsers
 import scala.util.parsing.input.CharArrayReader.EofCh
-import lexical._
-import syntactical._
-import token._
 
-import analysis._
-import expressions._
-import plans._
-import plans.logical._
-import types._
+import org.apache.spark.sql.catalyst.analysis._
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans._
+import org.apache.spark.sql.catalyst.plans.logical._
+import org.apache.spark.sql.catalyst.types._
 
 /**
- * A very simple SQL parser.  Based loosly on:
+ * A very simple SQL parser.  Based loosely on:
  * https://github.com/stephentu/scala-sql-parser/blob/master/src/main/scala/parser.scala
  *
  * Limitations:
@@ -39,10 +36,9 @@ import types._
  *  - Keywords must be capital.
  *
  * This is currently included mostly for illustrative purposes.  Users wanting more complete support
- * for a SQL like language should checkout the HiveQL support in the sql/hive subproject.
+ * for a SQL like language should checkout the HiveQL support in the sql/hive sub-project.
  */
 class SqlParser extends StandardTokenParsers {
-
   def apply(input: String): LogicalPlan = {
     phrase(query)(new lexical.Scanner(input)) match {
       case Success(r, x) => r
@@ -196,7 +192,7 @@ class SqlParser extends StandardTokenParsers {
 
   protected lazy val from: Parser[LogicalPlan] = FROM ~> relations
 
-  // Based very loosly on the MySQL Grammar.
+  // Based very loosely on the MySQL Grammar.
   // http://dev.mysql.com/doc/refman/5.0/en/join.html
   protected lazy val relations: Parser[LogicalPlan] =
     relation ~ "," ~ relation ^^ { case r1 ~ _ ~ r2 => Join(r1, r2, Inner, None) } |
@@ -261,9 +257,9 @@ class SqlParser extends StandardTokenParsers {
     andExpression * (OR ^^^ { (e1: Expression, e2: Expression) => Or(e1,e2) })
 
   protected lazy val andExpression: Parser[Expression] =
-    comparisionExpression * (AND ^^^ { (e1: Expression, e2: Expression) => And(e1,e2) })
+    comparisonExpression * (AND ^^^ { (e1: Expression, e2: Expression) => And(e1,e2) })
 
-  protected lazy val comparisionExpression: Parser[Expression] =
+  protected lazy val comparisonExpression: Parser[Expression] =
     termExpression ~ "=" ~ termExpression ^^ { case e1 ~ _ ~ e2 => Equals(e1, e2) } |
     termExpression ~ "<" ~ termExpression ^^ { case e1 ~ _ ~ e2 => LessThan(e1, e2) } |
     termExpression ~ "<=" ~ termExpression ^^ { case e1 ~ _ ~ e2 => LessThanOrEqual(e1, e2) } |

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 9eb992e..fc76e76 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -19,9 +19,10 @@ package org.apache.spark.sql
 package catalyst
 package analysis
 
-import expressions._
-import plans.logical._
-import rules._
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical._
+import org.apache.spark.sql.catalyst.rules._
+
 
 /**
  * A trivial [[Analyzer]] with an [[EmptyCatalog]] and [[EmptyFunctionRegistry]]. Used for testing

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
index 71e4dcd..b77f0bb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
@@ -19,9 +19,10 @@ package org.apache.spark.sql
 package catalyst
 package analysis
 
-import plans.logical.{LogicalPlan, Subquery}
 import scala.collection.mutable
 
+import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Subquery}
+
 /**
  * An interface for looking up relations by name.  Used by an [[Analyzer]].
  */

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index a359eb5..eed058d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 package catalyst
 package analysis
 
-import expressions._
+import org.apache.spark.sql.catalyst.expressions.Expression
 
 /** A catalog for looking up user defined functions, used by an [[Analyzer]]. */
 trait FunctionRegistry {

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
index a0105cd..a6ecf6e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
@@ -19,10 +19,10 @@ package org.apache.spark.sql
 package catalyst
 package analysis
 
-import expressions._
-import plans.logical._
-import rules._
-import types._
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project, Union}
+import org.apache.spark.sql.catalyst.rules.Rule
+import org.apache.spark.sql.catalyst.types._
 
 /**
  * A collection of [[catalyst.rules.Rule Rules]] that can be used to coerce differing types that

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/MultiInstanceRelation.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/MultiInstanceRelation.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/MultiInstanceRelation.scala
index fe18cc4..3cad3a5 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/MultiInstanceRelation.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/MultiInstanceRelation.scala
@@ -18,14 +18,14 @@
 package org.apache.spark.sql.catalyst
 package analysis
 
-import plans.logical.LogicalPlan
-import rules._
+import org.apache.spark.sql.catalyst.rules.Rule
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 
 /**
  * A trait that should be mixed into query operators where an single instance might appear multiple
  * times in a logical query plan.  It is invalid to have multiple copies of the same attribute
- * produced by distinct operators in a query tree as this breaks the gurantee that expression
- * ids, which are used to differentate attributes, are unique.
+ * produced by distinct operators in a query tree as this breaks the guarantee that expression
+ * ids, which are used to differentiate attributes, are unique.
  *
  * Before analysis, all operators that include this trait will be asked to produce a new version
  * of itself with globally unique expression ids.

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
index 375c99f..30c55ba 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
@@ -15,6 +15,7 @@
  * limitations under the License.
  */
 
+package org.apache.spark.sql
 package catalyst
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
index 2ed2af1..04ae481 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
@@ -19,9 +19,9 @@ package org.apache.spark.sql
 package catalyst
 package analysis
 
-import expressions._
-import plans.logical.BaseRelation
-import trees.TreeNode
+import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, Expression, NamedExpression}
+import org.apache.spark.sql.catalyst.plans.logical.BaseRelation
+import org.apache.spark.sql.catalyst.trees.TreeNode
 
 /**
  * Thrown when an invalid attempt is made to access a property of a tree that has yet to be fully
@@ -95,7 +95,7 @@ case class Star(
       // If there is no table specified, use all input attributes.
       case None => input
       // If there is a table, pick out attributes that are part of this table.
-      case Some(table) => input.filter(_.qualifiers contains table)
+      case Some(t) => input.filter(_.qualifiers contains t)
     }
     val mappedAttributes = expandedAttributes.map(mapFunction).zip(input).map {
       case (n: NamedExpression, _) => n

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
index cd8de9d..e6255bc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
@@ -19,58 +19,12 @@ package org.apache.spark.sql
 package catalyst
 
 import scala.language.implicitConversions
-import scala.reflect.runtime.universe.TypeTag
 
-import analysis.UnresolvedAttribute
-import expressions._
-import plans._
-import plans.logical._
-import types._
-
-/**
- * Provides experimental support for generating catalyst schemas for scala objects.
- */
-object ScalaReflection {
-  import scala.reflect.runtime.universe._
-
-  /** Returns a Sequence of attributes for the given case class type. */
-  def attributesFor[T: TypeTag]: Seq[Attribute] = schemaFor[T] match {
-    case s: StructType =>
-      s.fields.map(f => AttributeReference(f.name, f.dataType, nullable = true)())
-  }
-
-  /** Returns a catalyst DataType for the given Scala Type using reflection. */
-  def schemaFor[T: TypeTag]: DataType = schemaFor(typeOf[T])
-
-  /** Returns a catalyst DataType for the given Scala Type using reflection. */
-  def schemaFor(tpe: `Type`): DataType = tpe match {
-    case t if t <:< typeOf[Product] =>
-      val params = t.member("<init>": TermName).asMethod.paramss
-      StructType(
-        params.head.map(p => StructField(p.name.toString, schemaFor(p.typeSignature), true)))
-    case t if t <:< typeOf[Seq[_]] =>
-      val TypeRef(_, _, Seq(elementType)) = t
-      ArrayType(schemaFor(elementType))
-    case t if t <:< typeOf[String] => StringType
-    case t if t <:< definitions.IntTpe => IntegerType
-    case t if t <:< definitions.LongTpe => LongType
-    case t if t <:< definitions.DoubleTpe => DoubleType
-    case t if t <:< definitions.ShortTpe => ShortType
-    case t if t <:< definitions.ByteTpe => ByteType
-  }
-
-  implicit class CaseClassRelation[A <: Product : TypeTag](data: Seq[A]) {
-
-    /**
-     * Implicitly added to Sequences of case class objects.  Returns a catalyst logical relation
-     * for the the data in the sequence.
-     */
-    def asRelation: LocalRelation = {
-      val output = attributesFor[A]
-      LocalRelation(output, data)
-    }
-  }
-}
+import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical._
+import org.apache.spark.sql.catalyst.plans.{Inner, JoinType}
+import org.apache.spark.sql.catalyst.types._
 
 /**
  * A collection of implicit conversions that create a DSL for constructing catalyst data structures.

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala
index c253587..d8b6994 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala
@@ -18,15 +18,16 @@
 package org.apache.spark.sql
 package catalyst
 
-import trees._
+import org.apache.spark.sql.catalyst.trees.TreeNode
 
 /**
  * Functions for attaching and retrieving trees that are associated with errors.
  */
 package object errors {
 
-  class TreeNodeException[TreeType <: TreeNode[_]]
-    (tree: TreeType, msg: String, cause: Throwable) extends Exception(msg, cause) {
+  class TreeNodeException[TreeType <: TreeNode[_]](
+      tree: TreeType, msg: String, cause: Throwable)
+    extends Exception(msg, cause) {
 
     // Yes, this is the same as a default parameter, but... those don't seem to work with SBT
     // external project dependencies for some reason.

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala
index 3b6bac1..3fa4148 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala
@@ -19,10 +19,9 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import rules._
-import errors._
-
-import catalyst.plans.QueryPlan
+import org.apache.spark.sql.catalyst.errors.attachTree
+import org.apache.spark.sql.catalyst.plans.QueryPlan
+import org.apache.spark.sql.catalyst.rules.Rule
 
 /**
  * A bound reference points to a specific slot in the input tuple, allowing the actual value

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
index 608656d..71f64ef 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import types._
+import org.apache.spark.sql.catalyst.types._
 
 /** Cast the child expression to the target data type. */
 case class Cast(child: Expression, dataType: DataType) extends UnaryExpression {
@@ -40,7 +40,7 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression {
     case (StringType, ShortType) => a: Any => castOrNull(a, _.toShort)
     case (StringType, ByteType) => a: Any => castOrNull(a, _.toByte)
     case (StringType, DecimalType) => a: Any => castOrNull(a, BigDecimal(_))
-    case (BooleanType, ByteType) => a: Any => a match {
+    case (BooleanType, ByteType) => {
       case null => null
       case true => 1.toByte
       case false => 0.toByte

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
index 78aaaee..2454a33 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
@@ -19,9 +19,9 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import errors._
-import trees._
-import types._
+import org.apache.spark.sql.catalyst.trees.TreeNode
+import org.apache.spark.sql.catalyst.types.{DataType, FractionalType, IntegralType, NumericType}
+import org.apache.spark.sql.catalyst.errors.TreeNodeException
 
 abstract class Expression extends TreeNode[Expression] {
   self: Product =>

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Rand.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Rand.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Rand.scala
index a5d0ecf..0d173af 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Rand.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Rand.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import types.DoubleType
+import org.apache.spark.sql.catalyst.types.DoubleType
 
 case object Rand extends LeafExpression {
   def dataType = DoubleType

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala
index 3529675..79c91eb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import types._
+import org.apache.spark.sql.catalyst.types.NativeType
 
 /**
  * Represents one row of output from a relational operator.  Allows both generic access by ordinal,

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
index a3c7ca1..cc33948 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import types._
+import org.apache.spark.sql.catalyst.types.DataType
 
 case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expression])
   extends Expression {

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/WrapDynamic.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/WrapDynamic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/WrapDynamic.scala
index 2ad8d6f..01b7a14 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/WrapDynamic.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/WrapDynamic.scala
@@ -21,7 +21,7 @@ package expressions
 
 import scala.language.dynamics
 
-import types._
+import org.apache.spark.sql.catalyst.types.DataType
 
 case object DynamicType extends DataType
 

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
index 2287a84..a16bb80 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import catalyst.types._
+import org.apache.spark.sql.catalyst.types._
 
 abstract class AggregateExpression extends Expression {
   self: Product =>

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
index db23564..81e4a48 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
@@ -19,8 +19,8 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import catalyst.analysis.UnresolvedException
-import catalyst.types._
+import org.apache.spark.sql.catalyst.analysis.UnresolvedException
+import org.apache.spark.sql.catalyst.types._
 
 case class UnaryMinus(child: Expression) extends UnaryExpression {
   type EvaluatedType = Any

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala
index d3feb6c..9ec0f6a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import types._
+import org.apache.spark.sql.catalyst.types._
 
 /**
  * Returns the item at `ordinal` in the Array `child` or the Key `ordinal` in Map `child`.

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
index c367de2..9097c63 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import catalyst.types._
+import org.apache.spark.sql.catalyst.types._
 
 /**
  * An expression that produces zero or more rows given a single input row.

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
index 229d8f7..0d01312 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import types._
+import org.apache.spark.sql.catalyst.types._
 
 object Literal {
   def apply(v: Any): Literal = v match {

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
index 0a06e85..47b1241 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
@@ -19,8 +19,8 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import catalyst.analysis.UnresolvedAttribute
-import types._
+import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
+import org.apache.spark.sql.catalyst.types._
 
 object NamedExpression {
   private val curId = new java.util.concurrent.atomic.AtomicLong()
@@ -30,7 +30,7 @@ object NamedExpression {
 /**
  * A globally (within this JVM) id for a given named expression.
  * Used to identify with attribute output by a relation is being
- * referenced in a subsuqent computation.
+ * referenced in a subsequent computation.
  */
 case class ExprId(id: Long)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala
index e869a4d..38e3837 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import catalyst.analysis.UnresolvedException
+import org.apache.spark.sql.catalyst.analysis.UnresolvedException
 
 case class Coalesce(children: Seq[Expression]) extends Expression {
   type EvaluatedType = Any

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
index 561396e..e7f3e8c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
@@ -19,8 +19,8 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import types._
-import catalyst.analysis.UnresolvedException
+import org.apache.spark.sql.catalyst.types.{BooleanType, StringType}
+import org.apache.spark.sql.catalyst.analysis.UnresolvedException
 
 trait Predicate extends Expression {
   self: Product =>

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
index 6e58523..7584fe0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 package catalyst
 package expressions
 
-import catalyst.types.BooleanType
+import org.apache.spark.sql.catalyst.types.BooleanType
 
 case class Like(left: Expression, right: Expression) extends BinaryExpression {
   def dataType = BooleanType

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
index 4db2803..c120197 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
@@ -19,11 +19,11 @@ package org.apache.spark.sql
 package catalyst
 package optimizer
 
-import catalyst.expressions._
-import catalyst.plans.logical._
-import catalyst.rules._
-import catalyst.types.BooleanType
-import catalyst.plans.Inner
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.Inner
+import org.apache.spark.sql.catalyst.plans.logical._
+import org.apache.spark.sql.catalyst.rules._
+import org.apache.spark.sql.catalyst.types._
 
 object Optimizer extends RuleExecutor[LogicalPlan] {
   val batches =
@@ -73,7 +73,7 @@ object ConstantFolding extends Rule[LogicalPlan] {
 object BooleanSimplification extends Rule[LogicalPlan] {
   def apply(plan: LogicalPlan): LogicalPlan = plan transform {
     case q: LogicalPlan => q transformExpressionsUp {
-      case and @ And(left, right) => {
+      case and @ And(left, right) =>
         (left, right) match {
           case (Literal(true, BooleanType), r) => r
           case (l, Literal(true, BooleanType)) => l
@@ -81,8 +81,8 @@ object BooleanSimplification extends Rule[LogicalPlan] {
           case (_, Literal(false, BooleanType)) => Literal(false)
           case (_, _) => and
         }
-      }
-      case or @ Or(left, right) => {
+
+      case or @ Or(left, right) =>
         (left, right) match {
           case (Literal(true, BooleanType), _) => Literal(true)
           case (_, Literal(true, BooleanType)) => Literal(true)
@@ -90,7 +90,6 @@ object BooleanSimplification extends Rule[LogicalPlan] {
           case (l, Literal(false, BooleanType)) => l
           case (_, _) => or
         }
-      }
     }
   }
 }
@@ -101,7 +100,7 @@ object BooleanSimplification extends Rule[LogicalPlan] {
  */
 object CombineFilters extends Rule[LogicalPlan] {
   def apply(plan: LogicalPlan): LogicalPlan = plan transform {
-    case ff@Filter(fc, nf@Filter(nc, grandChild)) => Filter(And(nc, fc), grandChild)
+    case ff @ Filter(fc, nf @ Filter(nc, grandChild)) => Filter(And(nc, fc), grandChild)
   }
 }
 
@@ -114,8 +113,10 @@ object CombineFilters extends Rule[LogicalPlan] {
  */
 object PushPredicateThroughProject extends Rule[LogicalPlan] {
   def apply(plan: LogicalPlan): LogicalPlan = plan transform {
-    case filter@Filter(condition, project@Project(fields, grandChild)) =>
-      val sourceAliases = fields.collect { case a@Alias(c, _) => a.toAttribute -> c }.toMap
+    case filter @ Filter(condition, project @ Project(fields, grandChild)) =>
+      val sourceAliases = fields.collect { case a @ Alias(c, _) =>
+        (a.toAttribute: Attribute) -> c
+      }.toMap
       project.copy(child = filter.copy(
         replaceAlias(condition, sourceAliases),
         grandChild))

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/QueryPlanner.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/QueryPlanner.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/QueryPlanner.scala
index 22f8ea0..d50b963 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/QueryPlanner.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/QueryPlanner.scala
@@ -19,9 +19,8 @@ package org.apache.spark.sql
 package catalyst
 package planning
 
-
-import plans.logical.LogicalPlan
-import trees._
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+import org.apache.spark.sql.catalyst.trees.TreeNode
 
 /**
  * Abstract class for transforming [[plans.logical.LogicalPlan LogicalPlan]]s into physical plans.

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
index 613b028..ff0ea90 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
@@ -21,8 +21,8 @@ package planning
 
 import scala.annotation.tailrec
 
-import expressions._
-import plans.logical._
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical._
 
 /**
  * A pattern that matches any number of filter operations on top of another relational operator.

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
index 20f230c..848db24 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
@@ -19,8 +19,8 @@ package org.apache.spark.sql
 package catalyst
 package plans
 
-import catalyst.expressions.{SortOrder, Attribute, Expression}
-import catalyst.trees._
+import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression}
+import org.apache.spark.sql.catalyst.trees.TreeNode
 
 abstract class QueryPlan[PlanType <: TreeNode[PlanType]] extends TreeNode[PlanType] {
   self: PlanType with Product =>

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
index bc7b687..225dd26 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
@@ -20,9 +20,9 @@ package catalyst
 package plans
 package logical
 
-import catalyst.expressions._
-import catalyst.errors._
-import catalyst.types.StructType
+import org.apache.spark.sql.catalyst.errors.TreeNodeException
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.types.StructType
 
 abstract class LogicalPlan extends QueryPlan[LogicalPlan] {
   self: Product =>

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/ScriptTransformation.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/ScriptTransformation.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/ScriptTransformation.scala
index 1a1a2b9..5a3ea9f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/ScriptTransformation.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/ScriptTransformation.scala
@@ -20,7 +20,7 @@ package catalyst
 package plans
 package logical
 
-import expressions._
+import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression}
 
 /**
  * Transforms the input by forking and running the specified script.

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/TestRelation.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/TestRelation.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/TestRelation.scala
index b5905a4..ac7d2d6 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/TestRelation.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/TestRelation.scala
@@ -20,8 +20,7 @@ package catalyst
 package plans
 package logical
 
-import expressions._
-import rules._
+import org.apache.spark.sql.catalyst.expressions.Attribute
 
 object LocalRelation {
   def apply(output: Attribute*) =

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
index 8e98aab..6480cca 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
@@ -20,7 +20,7 @@ package catalyst
 package plans
 package logical
 
-import expressions._
+import org.apache.spark.sql.catalyst.expressions._
 
 case class Project(projectList: Seq[NamedExpression], child: LogicalPlan) extends UnaryNode {
   def output = projectList.map(_.toAttribute)

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala
index f7fcdc5..775e50b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala
@@ -20,7 +20,7 @@ package catalyst
 package plans
 package logical
 
-import expressions._
+import org.apache.spark.sql.catalyst.expressions.{Expression, SortOrder}
 
 /**
  * Performs a physical redistribution of the data.  Used when the consumer of the query

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala
index 2d8f3ad..20e2a45 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala
@@ -20,8 +20,8 @@ package catalyst
 package plans
 package physical
 
-import expressions._
-import types._
+import org.apache.spark.sql.catalyst.expressions.{Expression, SortOrder}
+import org.apache.spark.sql.catalyst.types.IntegerType
 
 /**
  * Specifies how tuples that share common expressions will be distributed when a query is executed

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/Rule.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/Rule.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/Rule.scala
index 6ff4891..c7632a6 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/Rule.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/Rule.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 package catalyst
 package rules
 
-import trees._
+import org.apache.spark.sql.catalyst.trees.TreeNode
 
 abstract class Rule[TreeType <: TreeNode[_]] extends Logging {
 

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
index 68ae30c..9db96f8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
@@ -19,8 +19,8 @@ package org.apache.spark.sql
 package catalyst
 package rules
 
-import trees._
-import util._
+import org.apache.spark.sql.catalyst.trees.TreeNode
+import org.apache.spark.sql.catalyst.util.sideBySide
 
 abstract class RuleExecutor[TreeType <: TreeNode[_]] extends Logging {
 
@@ -52,19 +52,19 @@ abstract class RuleExecutor[TreeType <: TreeNode[_]] extends Logging {
     batches.foreach { batch =>
       var iteration = 1
       var lastPlan = curPlan
-      curPlan = batch.rules.foldLeft(curPlan) { case (curPlan, rule) => rule(curPlan) }
+      curPlan = batch.rules.foldLeft(curPlan) { case (plan, rule) => rule(plan) }
 
       // Run until fix point (or the max number of iterations as specified in the strategy.
       while (iteration < batch.strategy.maxIterations && !curPlan.fastEquals(lastPlan)) {
         lastPlan = curPlan
         curPlan = batch.rules.foldLeft(curPlan) {
-          case (curPlan, rule) =>
-            val result = rule(curPlan)
-            if (!result.fastEquals(curPlan)) {
+          case (plan, rule) =>
+            val result = rule(plan)
+            if (!result.fastEquals(plan)) {
               logger.debug(
                 s"""
                   |=== Applying Rule ${rule.ruleName} ===
-                  |${sideBySide(curPlan.treeString, result.treeString).mkString("\n")}
+                  |${sideBySide(plan.treeString, result.treeString).mkString("\n")}
                 """.stripMargin)
             }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
index 37e5574..89e27d8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 package catalyst
 package trees
 
-import errors._
+import org.apache.spark.sql.catalyst.errors._
 
 object TreeNode {
   private val currentId = new java.util.concurrent.atomic.AtomicLong

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/AnalysisSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/AnalysisSuite.scala
deleted file mode 100644
index 1fd0d26..0000000
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/AnalysisSuite.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql
-package catalyst
-package analysis
-
-import org.scalatest.FunSuite
-
-import analysis._
-import expressions._
-import plans.logical._
-import types._
-
-import dsl._
-import dsl.expressions._
-
-class AnalysisSuite extends FunSuite {
-  val analyze = SimpleAnalyzer
-
-  val testRelation = LocalRelation('a.int)
-
-  test("analyze project") {
-    assert(analyze(Project(Seq(UnresolvedAttribute("a")), testRelation)) === Project(testRelation.output, testRelation))
-
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ExpressionEvaluationSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ExpressionEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ExpressionEvaluationSuite.scala
deleted file mode 100644
index f06618a..0000000
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ExpressionEvaluationSuite.scala
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql
-package catalyst
-package expressions
-
-import org.scalatest.FunSuite
-
-import org.apache.spark.sql.catalyst.expressions._
-import org.apache.spark.sql.catalyst.types._
-
-/* Implict conversions */
-import org.apache.spark.sql.catalyst.dsl.expressions._
-
-class ExpressionEvaluationSuite extends FunSuite {
-
-  test("literals") {
-    assert((Literal(1) + Literal(1)).apply(null) === 2)
-  }
-
-  /**
-   * Checks for three-valued-logic.  Based on:
-   * http://en.wikipedia.org/wiki/Null_(SQL)#Comparisons_with_NULL_and_the_three-valued_logic_.283VL.29
-   *
-   * p       q       p OR q  p AND q  p = q
-   * True    True    True    True     True
-   * True    False   True    False    False
-   * True    Unknown True    Unknown  Unknown
-   * False   True    True    False    False
-   * False   False   False   False    True
-   * False   Unknown Unknown False    Unknown
-   * Unknown True    True    Unknown  Unknown
-   * Unknown False   Unknown False    Unknown
-   * Unknown Unknown Unknown Unknown  Unknown
-   *
-   * p       NOT p
-   * True    False
-   * False   True
-   * Unknown Unknown
-   */
-
-  val notTrueTable =
-    (true, false) ::
-    (false, true) ::
-    (null, null) :: Nil
-
-  test("3VL Not") {
-    notTrueTable.foreach {
-      case (v, answer) =>
-        val expr = Not(Literal(v, BooleanType))
-        val result = expr.apply(null)
-        if (result != answer)
-          fail(s"$expr should not evaluate to $result, expected: $answer")    }
-  }
-
-  booleanLogicTest("AND", _ && _,
-    (true,  true,  true) ::
-    (true,  false, false) ::
-    (true,  null,  null) ::
-    (false, true,  false) ::
-    (false, false, false) ::
-    (false, null,  false) ::
-    (null,  true,  null) ::
-    (null,  false, false) ::
-    (null,  null,  null) :: Nil)
-
-  booleanLogicTest("OR", _ || _,
-    (true,  true,  true) ::
-    (true,  false, true) ::
-    (true,  null,  true) ::
-    (false, true,  true) ::
-    (false, false, false) ::
-    (false, null,  null) ::
-    (null,  true,  true) ::
-    (null,  false, null) ::
-    (null,  null,  null) :: Nil)
-
-  booleanLogicTest("=", _ === _,
-    (true,  true,  true) ::
-    (true,  false, false) ::
-    (true,  null,  null) ::
-    (false, true,  false) ::
-    (false, false, true) ::
-    (false, null,  null) ::
-    (null,  true,  null) ::
-    (null,  false, null) ::
-    (null,  null,  null) :: Nil)
-
-  def booleanLogicTest(name: String, op: (Expression, Expression) => Expression,  truthTable: Seq[(Any, Any, Any)]) {
-    test(s"3VL $name") {
-      truthTable.foreach {
-        case (l,r,answer) =>
-          val expr = op(Literal(l, BooleanType), Literal(r, BooleanType))
-          val result = expr.apply(null)
-          if (result != answer)
-            fail(s"$expr should not evaluate to $result, expected: $answer")
-      }
-    }
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/HiveTypeCoercionSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/HiveTypeCoercionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/HiveTypeCoercionSuite.scala
deleted file mode 100644
index f595bf7..0000000
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/HiveTypeCoercionSuite.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql
-package catalyst
-package analysis
-
-import org.scalatest.FunSuite
-
-import catalyst.types._
-
-
-class HiveTypeCoercionSuite extends FunSuite {
-
-  val rules = new HiveTypeCoercion { }
-  import rules._
-
-  test("tightest common bound for numeric and boolean types") {
-    def widenTest(t1: DataType, t2: DataType, tightestCommon: Option[DataType]) {
-      var found = WidenTypes.findTightestCommonType(t1, t2)
-      assert(found == tightestCommon,
-        s"Expected $tightestCommon as tightest common type for $t1 and $t2, found $found")
-      // Test both directions to make sure the widening is symmetric.
-      found = WidenTypes.findTightestCommonType(t2, t1)
-      assert(found == tightestCommon,
-        s"Expected $tightestCommon as tightest common type for $t2 and $t1, found $found")
-    }
-
-    // Boolean
-    widenTest(NullType, BooleanType, Some(BooleanType))
-    widenTest(BooleanType, BooleanType, Some(BooleanType))
-    widenTest(IntegerType, BooleanType, None)
-    widenTest(LongType, BooleanType, None)
-
-    // Integral
-    widenTest(NullType, ByteType, Some(ByteType))
-    widenTest(NullType, IntegerType, Some(IntegerType))
-    widenTest(NullType, LongType, Some(LongType))
-    widenTest(ShortType, IntegerType, Some(IntegerType))
-    widenTest(ShortType, LongType, Some(LongType))
-    widenTest(IntegerType, LongType, Some(LongType))
-    widenTest(LongType, LongType, Some(LongType))
-
-    // Floating point
-    widenTest(NullType, FloatType, Some(FloatType))
-    widenTest(NullType, DoubleType, Some(DoubleType))
-    widenTest(FloatType, DoubleType, Some(DoubleType))
-    widenTest(FloatType, FloatType, Some(FloatType))
-    widenTest(DoubleType, DoubleType, Some(DoubleType))
-
-    // Integral mixed with floating point.
-    widenTest(NullType, FloatType, Some(FloatType))
-    widenTest(NullType, DoubleType, Some(DoubleType))
-    widenTest(IntegerType, FloatType, Some(FloatType))
-    widenTest(IntegerType, DoubleType, Some(DoubleType))
-    widenTest(IntegerType, DoubleType, Some(DoubleType))
-    widenTest(LongType, FloatType, Some(FloatType))
-    widenTest(LongType, DoubleType, Some(DoubleType))
-  }
-}

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/RuleExecutorSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/RuleExecutorSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/RuleExecutorSuite.scala
deleted file mode 100644
index ff7c15b..0000000
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/RuleExecutorSuite.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql
-package catalyst
-package trees
-
-import org.scalatest.FunSuite
-
-import expressions._
-import rules._
-
-class RuleExecutorSuite extends FunSuite {
-  object DecrementLiterals extends Rule[Expression] {
-    def apply(e: Expression): Expression = e transform {
-      case IntegerLiteral(i) if i > 0 => Literal(i - 1)
-    }
-  }
-
-  test("only once") {
-    object ApplyOnce extends RuleExecutor[Expression] {
-      val batches = Batch("once", Once, DecrementLiterals) :: Nil
-    }
-
-    assert(ApplyOnce(Literal(10)) === Literal(9))
-  }
-
-  test("to fixed point") {
-    object ToFixedPoint extends RuleExecutor[Expression] {
-      val batches = Batch("fixedPoint", FixedPoint(100), DecrementLiterals) :: Nil
-    }
-
-    assert(ToFixedPoint(Literal(10)) === Literal(0))
-  }
-
-  test("to maxIterations") {
-    object ToFixedPoint extends RuleExecutor[Expression] {
-      val batches = Batch("fixedPoint", FixedPoint(10), DecrementLiterals) :: Nil
-    }
-
-    assert(ToFixedPoint(Literal(100)) === Literal(90))
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/TreeNodeSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/TreeNodeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/TreeNodeSuite.scala
deleted file mode 100644
index 98bb090..0000000
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/TreeNodeSuite.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql
-package catalyst
-package trees
-
-import scala.collection.mutable.ArrayBuffer
-
-import expressions._
-
-import org.scalatest.{FunSuite}
-
-class TreeNodeSuite extends FunSuite {
-
-  test("top node changed") {
-    val after = Literal(1) transform { case Literal(1, _) => Literal(2) }
-    assert(after === Literal(2))
-  }
-
-  test("one child changed") {
-    val before = Add(Literal(1), Literal(2))
-    val after = before transform { case Literal(2, _) => Literal(1) }
-
-    assert(after === Add(Literal(1), Literal(1)))
-  }
-
-  test("no change") {
-    val before = Add(Literal(1), Add(Literal(2), Add(Literal(3), Literal(4))))
-    val after = before transform { case Literal(5, _) => Literal(1)}
-
-    assert(before === after)
-    assert(before.map(_.id) === after.map(_.id))
-  }
-
-  test("collect") {
-    val tree = Add(Literal(1), Add(Literal(2), Add(Literal(3), Literal(4))))
-    val literals = tree collect {case l: Literal => l}
-
-    assert(literals.size === 4)
-    (1 to 4).foreach(i => assert(literals contains Literal(i)))
-  }
-
-  test("pre-order transform") {
-    val actual = new ArrayBuffer[String]()
-    val expected = Seq("+", "1", "*", "2", "-", "3", "4")
-    val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
-    expression transformDown {
-      case b: BinaryExpression => {actual.append(b.symbol); b}
-      case l: Literal => {actual.append(l.toString); l}
-    }
-
-    assert(expected === actual)
-  }
-
-  test("post-order transform") {
-    val actual = new ArrayBuffer[String]()
-    val expected = Seq("1", "2", "3", "4", "-", "*", "+")
-    val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
-    expression transformUp {
-      case b: BinaryExpression => {actual.append(b.symbol); b}
-      case l: Literal => {actual.append(l.toString); l}
-    }
-
-    assert(expected === actual)
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
new file mode 100644
index 0000000..78ec48b
--- /dev/null
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql
+package catalyst
+package analysis
+
+import org.scalatest.FunSuite
+
+import org.apache.spark.sql.catalyst.plans.logical._
+
+/* Implicit conversions */
+import org.apache.spark.sql.catalyst.dsl.expressions._
+
+class AnalysisSuite extends FunSuite {
+  val analyze = SimpleAnalyzer
+
+  val testRelation = LocalRelation('a.int)
+
+  test("analyze project") {
+    assert(
+      analyze(Project(Seq(UnresolvedAttribute("a")), testRelation)) ===
+        Project(testRelation.output, testRelation))
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
new file mode 100644
index 0000000..b85b72a
--- /dev/null
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql
+package catalyst
+package analysis
+
+import org.scalatest.FunSuite
+
+import org.apache.spark.sql.catalyst.types._
+
+class HiveTypeCoercionSuite extends FunSuite {
+
+  val rules = new HiveTypeCoercion { }
+  import rules._
+
+  test("tightest common bound for numeric and boolean types") {
+    def widenTest(t1: DataType, t2: DataType, tightestCommon: Option[DataType]) {
+      var found = WidenTypes.findTightestCommonType(t1, t2)
+      assert(found == tightestCommon,
+        s"Expected $tightestCommon as tightest common type for $t1 and $t2, found $found")
+      // Test both directions to make sure the widening is symmetric.
+      found = WidenTypes.findTightestCommonType(t2, t1)
+      assert(found == tightestCommon,
+        s"Expected $tightestCommon as tightest common type for $t2 and $t1, found $found")
+    }
+
+    // Boolean
+    widenTest(NullType, BooleanType, Some(BooleanType))
+    widenTest(BooleanType, BooleanType, Some(BooleanType))
+    widenTest(IntegerType, BooleanType, None)
+    widenTest(LongType, BooleanType, None)
+
+    // Integral
+    widenTest(NullType, ByteType, Some(ByteType))
+    widenTest(NullType, IntegerType, Some(IntegerType))
+    widenTest(NullType, LongType, Some(LongType))
+    widenTest(ShortType, IntegerType, Some(IntegerType))
+    widenTest(ShortType, LongType, Some(LongType))
+    widenTest(IntegerType, LongType, Some(LongType))
+    widenTest(LongType, LongType, Some(LongType))
+
+    // Floating point
+    widenTest(NullType, FloatType, Some(FloatType))
+    widenTest(NullType, DoubleType, Some(DoubleType))
+    widenTest(FloatType, DoubleType, Some(DoubleType))
+    widenTest(FloatType, FloatType, Some(FloatType))
+    widenTest(DoubleType, DoubleType, Some(DoubleType))
+
+    // Integral mixed with floating point.
+    widenTest(NullType, FloatType, Some(FloatType))
+    widenTest(NullType, DoubleType, Some(DoubleType))
+    widenTest(IntegerType, FloatType, Some(FloatType))
+    widenTest(IntegerType, DoubleType, Some(DoubleType))
+    widenTest(IntegerType, DoubleType, Some(DoubleType))
+    widenTest(LongType, FloatType, Some(FloatType))
+    widenTest(LongType, DoubleType, Some(DoubleType))
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
new file mode 100644
index 0000000..c8fd581
--- /dev/null
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql
+package catalyst
+package expressions
+
+import org.scalatest.FunSuite
+
+import org.apache.spark.sql.catalyst.types._
+
+/* Implicit conversions */
+import org.apache.spark.sql.catalyst.dsl.expressions._
+
+class ExpressionEvaluationSuite extends FunSuite {
+
+  test("literals") {
+    assert((Literal(1) + Literal(1)).apply(null) === 2)
+  }
+
+  /**
+   * Checks for three-valued-logic.  Based on:
+   * http://en.wikipedia.org/wiki/Null_(SQL)#Comparisons_with_NULL_and_the_three-valued_logic_.283VL.29
+   *
+   * p       q       p OR q  p AND q  p = q
+   * True    True    True    True     True
+   * True    False   True    False    False
+   * True    Unknown True    Unknown  Unknown
+   * False   True    True    False    False
+   * False   False   False   False    True
+   * False   Unknown Unknown False    Unknown
+   * Unknown True    True    Unknown  Unknown
+   * Unknown False   Unknown False    Unknown
+   * Unknown Unknown Unknown Unknown  Unknown
+   *
+   * p       NOT p
+   * True    False
+   * False   True
+   * Unknown Unknown
+   */
+
+  val notTrueTable =
+    (true, false) ::
+    (false, true) ::
+    (null, null) :: Nil
+
+  test("3VL Not") {
+    notTrueTable.foreach {
+      case (v, answer) =>
+        val expr = Not(Literal(v, BooleanType))
+        val result = expr.apply(null)
+        if (result != answer)
+          fail(s"$expr should not evaluate to $result, expected: $answer")    }
+  }
+
+  booleanLogicTest("AND", _ && _,
+    (true,  true,  true) ::
+    (true,  false, false) ::
+    (true,  null,  null) ::
+    (false, true,  false) ::
+    (false, false, false) ::
+    (false, null,  false) ::
+    (null,  true,  null) ::
+    (null,  false, false) ::
+    (null,  null,  null) :: Nil)
+
+  booleanLogicTest("OR", _ || _,
+    (true,  true,  true) ::
+    (true,  false, true) ::
+    (true,  null,  true) ::
+    (false, true,  true) ::
+    (false, false, false) ::
+    (false, null,  null) ::
+    (null,  true,  true) ::
+    (null,  false, null) ::
+    (null,  null,  null) :: Nil)
+
+  booleanLogicTest("=", _ === _,
+    (true,  true,  true) ::
+    (true,  false, false) ::
+    (true,  null,  null) ::
+    (false, true,  false) ::
+    (false, false, true) ::
+    (false, null,  null) ::
+    (null,  true,  null) ::
+    (null,  false, null) ::
+    (null,  null,  null) :: Nil)
+
+  def booleanLogicTest(name: String, op: (Expression, Expression) => Expression,  truthTable: Seq[(Any, Any, Any)]) {
+    test(s"3VL $name") {
+      truthTable.foreach {
+        case (l,r,answer) =>
+          val expr = op(Literal(l, BooleanType), Literal(r, BooleanType))
+          val result = expr.apply(null)
+          if (result != answer)
+            fail(s"$expr should not evaluate to $result, expected: $answer")
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
index 7ce42b2..2c107b8 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
@@ -19,13 +19,14 @@ package org.apache.spark.sql
 package catalyst
 package optimizer
 
-import types.IntegerType
-import util._
-import plans.logical.{LogicalPlan, LocalRelation}
-import rules._
-import expressions._
-import dsl.plans._
-import dsl.expressions._
+import org.apache.spark.sql.catalyst.dsl.plans._
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, LogicalPlan}
+import org.apache.spark.sql.catalyst.rules.RuleExecutor
+import org.apache.spark.sql.catalyst.types.IntegerType
+
+// For implicit conversions
+import org.apache.spark.sql.catalyst.dsl.expressions._
 
 class ConstantFoldingSuite extends OptimizerTest {
 
@@ -106,7 +107,7 @@ class ConstantFoldingSuite extends OptimizerTest {
           Literal(5) + 'a as Symbol("c1"),
           'a + Literal(2) + Literal(3) as Symbol("c2"),
           Literal(2) * 'a + Literal(4) as Symbol("c3"),
-          'a * (Literal(7)) as Symbol("c4"))
+          'a * Literal(7) as Symbol("c4"))
         .analyze
 
     comparePlans(optimized, correctAnswer)
@@ -173,4 +174,4 @@ class ConstantFoldingSuite extends OptimizerTest {
 
     comparePlans(optimized, correctAnswer)
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
index cd611b3..cfbef53 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
@@ -2,13 +2,12 @@ package org.apache.spark.sql
 package catalyst
 package optimizer
 
-import expressions._
-import plans.logical._
-import rules._
-import util._
+import org.apache.spark.sql.catalyst.plans.logical._
+import org.apache.spark.sql.catalyst.rules._
 
-import dsl.plans._
-import dsl.expressions._
+/* Implicit conversions */
+import org.apache.spark.sql.catalyst.dsl.plans._
+import org.apache.spark.sql.catalyst.dsl.expressions._
 
 class FilterPushdownSuite extends OptimizerTest {
 
@@ -219,4 +218,4 @@ class FilterPushdownSuite extends OptimizerTest {
 
     comparePlans(optimized, optimizer.EliminateSubqueries(correctAnswer))
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerTest.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerTest.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerTest.scala
index 7b3653d..8ec1d3d 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerTest.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerTest.scala
@@ -4,13 +4,9 @@ package optimizer
 
 import org.scalatest.FunSuite
 
-import types.IntegerType
-import util._
-import plans.logical.{LogicalPlan, LocalRelation}
-import expressions._
-import dsl._
-
-/* Implicit conversions for creating query plans */
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+import org.apache.spark.sql.catalyst.util._
 
 /**
  * Provides helper methods for comparing plans produced by optimization rules with the expected
@@ -41,4 +37,4 @@ class OptimizerTest extends FunSuite {
           |${sideBySide(normalized1.treeString, normalized2.treeString).mkString("\n")}
         """.stripMargin)
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/8265dc77/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala
new file mode 100644
index 0000000..738cfa8
--- /dev/null
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql
+package catalyst
+package trees
+
+import org.scalatest.FunSuite
+
+import org.apache.spark.sql.catalyst.expressions.{Expression, IntegerLiteral, Literal}
+import org.apache.spark.sql.catalyst.rules.{Rule, RuleExecutor}
+
+class RuleExecutorSuite extends FunSuite {
+  object DecrementLiterals extends Rule[Expression] {
+    def apply(e: Expression): Expression = e transform {
+      case IntegerLiteral(i) if i > 0 => Literal(i - 1)
+    }
+  }
+
+  test("only once") {
+    object ApplyOnce extends RuleExecutor[Expression] {
+      val batches = Batch("once", Once, DecrementLiterals) :: Nil
+    }
+
+    assert(ApplyOnce(Literal(10)) === Literal(9))
+  }
+
+  test("to fixed point") {
+    object ToFixedPoint extends RuleExecutor[Expression] {
+      val batches = Batch("fixedPoint", FixedPoint(100), DecrementLiterals) :: Nil
+    }
+
+    assert(ToFixedPoint(Literal(10)) === Literal(0))
+  }
+
+  test("to maxIterations") {
+    object ToFixedPoint extends RuleExecutor[Expression] {
+      val batches = Batch("fixedPoint", FixedPoint(10), DecrementLiterals) :: Nil
+    }
+
+    assert(ToFixedPoint(Literal(100)) === Literal(90))
+  }
+}