You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@drill.apache.org by ja...@apache.org on 2013/08/04 04:49:50 UTC

[1/2] Update to add additional Julian SQL work. Update to Parquet 0.4.9 OrderBy and Union (distinct) not currently working.

Updated Branches:
  refs/heads/master 103072a61 -> d405c70df


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillRel.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillRel.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillRel.java
index 567378d..0a81f5e 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillRel.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillRel.java
@@ -29,30 +29,25 @@ import org.apache.drill.optiq.EnumerableDrillFullEngine;
 import org.eigenbase.rel.RelNode;
 import org.eigenbase.rel.SingleRel;
 import org.eigenbase.relopt.*;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.lang.reflect.Method;
 import java.util.*;
 
-
 /**
- * Relational expression that converts from Drill to Enumerable. At runtime
- * it executes a Drill query and returns the results as an
- * {@link net.hydromatic.linq4j.Enumerable}.
+ * Relational expression that converts from Drill to Enumerable. At runtime it executes a Drill query and returns the
+ * results as an {@link net.hydromatic.linq4j.Enumerable}.
  */
 public class EnumerableDrillRel extends SingleRel implements EnumerableRel {
-  private static final Logger LOG =
-      LoggerFactory.getLogger(EnumerableDrillRel.class);
+  private static final Logger LOG = LoggerFactory.getLogger(EnumerableDrillRel.class);
 
-  private static final Function1<String, Expression> TO_LITERAL =
-      new Function1<String, Expression>() {
-        @Override
-        public Expression apply(String a0) {
-          return Expressions.constant(a0);
-        }
-      };
+  private static final Function1<String, Expression> TO_LITERAL = new Function1<String, Expression>() {
+    @Override
+    public Expression apply(String a0) {
+      return Expressions.constant(a0);
+    }
+  };
 
   private static final Method OF_METHOD;
 
@@ -60,23 +55,19 @@ public class EnumerableDrillRel extends SingleRel implements EnumerableRel {
 
   static {
     try {
-      OF_METHOD =
-          EnumerableDrillFullEngine.class.getMethod("of", String.class, List.class, Class.class, DataContext.class);
-      //EnumerableDrillFullEngine.class.getMethod("of", String.class, List.class, Class.class);
+      OF_METHOD = EnumerableDrillFullEngine.class.getMethod("of", String.class, List.class, Class.class,
+          DataContext.class);
+      // EnumerableDrillFullEngine.class.getMethod("of", String.class, List.class, Class.class);
     } catch (NoSuchMethodException e) {
       throw new RuntimeException(e);
     }
   }
 
-  public EnumerableDrillRel(RelOptCluster cluster,
-                            RelTraitSet traitSet,
-                            RelNode input) {
+  public EnumerableDrillRel(RelOptCluster cluster, RelTraitSet traitSet, RelNode input) {
     super(cluster, traitSet, input);
     assert getConvention() instanceof EnumerableConvention;
     assert input.getConvention() == DrillRel.CONVENTION;
-    physType = PhysTypeImpl.of((JavaTypeFactory) cluster.getTypeFactory(),
-        input.getRowType(),
-        (EnumerableConvention) getConvention());
+    physType = PhysTypeImpl.of((JavaTypeFactory) cluster.getTypeFactory(), input.getRowType(), JavaRowFormat.ARRAY);
   }
 
   public PhysType getPhysType() {
@@ -93,7 +84,7 @@ public class EnumerableDrillRel extends SingleRel implements EnumerableRel {
     return new EnumerableDrillRel(getCluster(), traitSet, sole(inputs));
   }
 
-  public BlockExpression implement(EnumerableRelImplementor implementor) {
+  public Result implement(EnumerableRelImplementor implementor, Prefer pref) {
     LOG.debug("implementing enumerable");
 
     final DrillImplementor drillImplementor = new DrillImplementor();
@@ -105,24 +96,18 @@ public class EnumerableDrillRel extends SingleRel implements EnumerableRel {
 
     // not quite sure where this list was supposed to be set earlier, leaving it null got me back the full result set
 
-    final List<String> fieldNameList = RelOptUtil.getFieldNameList(rowType);
-    //final List<String> fieldNameList = null;
-    return new BlockBuilder()
-        .append(
-            Expressions.call(
-                OF_METHOD,
-                Expressions.constant(plan),
-                Expressions.call(
-                    Arrays.class,
-                    "asList",
-                    Expressions.newArrayInit(
-                        String.class,
-                        Functions.apply(fieldNameList, TO_LITERAL))),
-                Expressions.constant(Object.class),
-                Expressions.variable(DataContext.class, "root")
-            ))
-        .toBlock();
+    final List<String> fieldNameList = rowType.getFieldNames();
+    // final List<String> fieldNameList = null;
+    BlockStatement expr = new BlockBuilder().append(
+        Expressions.call(
+            OF_METHOD,
+            Expressions.constant(plan),
+            Expressions.call(Arrays.class, "asList",
+                Expressions.newArrayInit(String.class, Functions.apply(fieldNameList, TO_LITERAL))),
+            Expressions.constant(Object.class), Expressions.variable(DataContext.class, "root"))).toBlock();
+    final PhysType physType = PhysTypeImpl.of(implementor.getTypeFactory(), getRowType(),
+        pref.prefer(JavaRowFormat.ARRAY));
+    return new Result(expr, physType, JavaRowFormat.ARRAY);
   }
-}
 
-// End EnumerableDrillRel.java
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillRule.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillRule.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillRule.java
index 6057163..a448171 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillRule.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillRule.java
@@ -25,8 +25,7 @@ import org.eigenbase.rel.convert.ConverterRule;
  * Rule that converts any Drill relational expression to enumerable format by adding a {@link EnumerableDrillRel}.
  */
 public class EnumerableDrillRule extends ConverterRule {
-  public static final EnumerableDrillRule ARRAY_INSTANCE = new EnumerableDrillRule(EnumerableConvention.ARRAY);
-  public static final EnumerableDrillRule CUSTOM_INSTANCE = new EnumerableDrillRule(EnumerableConvention.CUSTOM);
+  public static final EnumerableDrillRule ARRAY_INSTANCE = new EnumerableDrillRule(EnumerableConvention.INSTANCE);
 
   private EnumerableDrillRule(EnumerableConvention outConvention) {
     super(RelNode.class, DrillRel.CONVENTION, outConvention, "EnumerableDrillRule." + outConvention);
@@ -43,5 +42,3 @@ public class EnumerableDrillRule extends ConverterRule {
     return new EnumerableDrillRel(rel.getCluster(), rel.getTraitSet().replace(getOutConvention()), rel);
   }
 }
-
-// End EnumerableDrillRule.java

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/sql/client/full/ResultEnumerator.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/sql/client/full/ResultEnumerator.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/sql/client/full/ResultEnumerator.java
index 8839e92..8f75e5f 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/sql/client/full/ResultEnumerator.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/sql/client/full/ResultEnumerator.java
@@ -45,4 +45,8 @@ import org.apache.drill.exec.server.DrillbitContext;
     public void reset() {
       throw new UnsupportedOperationException();
     }
+    
+    public void close(){
+      
+    }
   }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/sql/client/ref/DrillRefImpl.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/sql/client/ref/DrillRefImpl.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/sql/client/ref/DrillRefImpl.java
index 651dd69..0d01085 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/sql/client/ref/DrillRefImpl.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/sql/client/ref/DrillRefImpl.java
@@ -74,6 +74,10 @@ public class DrillRefImpl<E> {
       this.fields = fields;
     }
 
+    public void close(){
+      
+    }
+    
     public Object current() {
       return current;
     }
@@ -237,4 +241,5 @@ public class DrillRefImpl<E> {
     return Collections.unmodifiableSortedMap(map);
   }
 
+  
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/FullEngineTest.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/FullEngineTest.java b/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/FullEngineTest.java
new file mode 100644
index 0000000..281871e
--- /dev/null
+++ b/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/FullEngineTest.java
@@ -0,0 +1,37 @@
+package org.apache.drill.jdbc.test;
+
+import java.io.IOException;
+
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.google.common.base.Charsets;
+import com.google.common.io.Resources;
+
+public class FullEngineTest {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FullEngineTest.class);
+
+  private static String MODEL_FULL_ENGINE;
+
+  @BeforeClass
+  public static void setupFixtures() throws IOException {
+    MODEL_FULL_ENGINE = Resources.toString(Resources.getResource("full-model.json"), Charsets.UTF_8);
+  }
+  
+  @Test
+  public void fullSelectStarEngine() throws Exception {
+    JdbcAssert.withModel(MODEL_FULL_ENGINE, "DONUTS")
+    // .sql("select cast(_MAP['red'] as bigint) + 1 as red_inc from donuts ")
+        .sql("select * from donuts ").displayResults(50);
+  }
+
+  @Test
+  public void fullEngine() throws Exception {
+    JdbcAssert
+        .withModel(MODEL_FULL_ENGINE, "DONUTS")
+        // .sql("select cast(_MAP['red'] as bigint) + 1 as red_inc from donuts ")
+        .sql(
+            "select cast(_MAP['RED'] as bigint)  as RED, cast(_MAP['GREEN'] as bigint)  as GREEN from donuts where cast(_MAP['red'] as BIGINT) > 1 ")
+        .displayResults(50);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/JdbcAssert.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/JdbcAssert.java b/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/JdbcAssert.java
index a30ce65..6768dda 100644
--- a/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/JdbcAssert.java
+++ b/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/JdbcAssert.java
@@ -17,25 +17,34 @@
  ******************************************************************************/
 package org.apache.drill.jdbc.test;
 
-import com.google.common.base.Function;
-import junit.framework.Assert;
-import org.apache.drill.common.util.Hook;
-
-import java.sql.*;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
 import java.util.Properties;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import net.hydromatic.linq4j.Ord;
 
-import static org.junit.Assert.assertEquals;
+import org.apache.drill.common.util.Hook;
+import org.junit.Assert;
+
+import com.google.common.base.Function;
 
 /**
  * Fluent interface for writing JDBC and query-planning tests.
  */
 public class JdbcAssert {
-
-  public static One withModel(String model, String schema) {
+  public static ModelAndSchema withModel(String model, String schema) {
     final Properties info = new Properties();
     info.setProperty("schema", schema);
     info.setProperty("model", "inline:" + model);
-    return new One(info);
+    return new ModelAndSchema(info);
   }
 
   static String toString(ResultSet resultSet, int expectedRecordCount) throws SQLException {
@@ -54,46 +63,65 @@ public class JdbcAssert {
       }
       buf.append("\n");
     }
-    if (false && expectedRecordCount > 0){
-      assertEquals("Expected record count not matched.", total, expectedRecordCount);
-    }
     return buf.toString();
   }
-
+  
   static String toString(ResultSet resultSet) throws SQLException {
-    return toString(resultSet, -1);
+    StringBuilder buf = new StringBuilder();
+    final List<Ord<String>> columns = columnLabels(resultSet);
+    while (resultSet.next()) {
+      for (Ord<String> column : columns) {
+        buf.append(column.i == 1 ? "" : "; ").append(column.e).append("=").append(resultSet.getObject(column.i));
+      }
+      buf.append("\n");
+    }
+    return buf.toString();
   }
 
-  static int countRecords(ResultSet resultSet) throws SQLException {
+
+
+  static List<String> toStrings(ResultSet resultSet) throws SQLException {
+    final List<String> list = new ArrayList<>();
     StringBuilder buf = new StringBuilder();
-    int total = 0, n;
+    final List<Ord<String>> columns = columnLabels(resultSet);
     while (resultSet.next()) {
-      n = resultSet.getMetaData().getColumnCount();
-      total += n;
+      buf.setLength(0);
+      for (Ord<String> column : columns) {
+        buf.append(column.i == 1 ? "" : "; ").append(column.e).append("=").append(resultSet.getObject(column.i));
+      }
+      list.add(buf.toString());
     }
-    return total;
+    return list;
   }
 
-  public static class One {
+  private static List<Ord<String>> columnLabels(ResultSet resultSet) throws SQLException {
+    int n = resultSet.getMetaData().getColumnCount();
+    List<Ord<String>> columns = new ArrayList<>();
+    for (int i = 1; i <= n; i++) {
+      columns.add(Ord.of(i, resultSet.getMetaData().getColumnLabel(i)));
+    }
+    return columns;
+  }
+
+  public static class ModelAndSchema {
     private final Properties info;
     private final ConnectionFactory connectionFactory;
 
-    public One(Properties info) {
+    public ModelAndSchema(Properties info) {
       this.info = info;
       this.connectionFactory = new ConnectionFactory() {
         public Connection createConnection() throws Exception {
           Class.forName("org.apache.drill.jdbc.Driver");
-          return DriverManager.getConnection("jdbc:drill:", One.this.info);
+          return DriverManager.getConnection("jdbc:drill:", ModelAndSchema.this.info);
         }
       };
     }
 
-    public Two sql(String sql) {
-      return new Two(connectionFactory, sql);
+    public TestDataConnection sql(String sql) {
+      return new TestDataConnection(connectionFactory, sql);
     }
 
-    public <T> T withConnection(Function<Connection, T> function)
-        throws Exception {
+    public <T> T withConnection(Function<Connection, T> function) throws Exception {
       Connection connection = null;
       try {
         connection = connectionFactory.createConnection();
@@ -104,27 +132,30 @@ public class JdbcAssert {
         }
       }
     }
-
-
   }
 
-  public static class Two {
+  public static class TestDataConnection {
     private final ConnectionFactory connectionFactory;
     private final String sql;
 
-    Two(ConnectionFactory connectionFactory, String sql) {
+    TestDataConnection(ConnectionFactory connectionFactory, String sql) {
       this.connectionFactory = connectionFactory;
       this.sql = sql;
     }
 
-    public Two returns(String expected) throws Exception {
+    /** Checks that the current SQL statement returns the expected result. */
+    public TestDataConnection returns(String expected) throws Exception {
       Connection connection = null;
       Statement statement = null;
       try {
         connection = connectionFactory.createConnection();
         statement = connection.createStatement();
         ResultSet resultSet = statement.executeQuery(sql);
-        Assert.assertEquals(expected, JdbcAssert.toString(resultSet));
+        expected = expected.trim();
+        String result = JdbcAssert.toString(resultSet).trim();
+        
+        Assert.assertTrue(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected), expected.equals(result));
+        Assert.assertEquals(expected, result);
         resultSet.close();
         return this;
       } finally {
@@ -136,15 +167,42 @@ public class JdbcAssert {
         }
       }
     }
+    
+
 
-    public Two displayResults(int recordCount) throws Exception {
+    /**
+     * Checks that the current SQL statement returns the expected result lines. Lines are compared unordered; the test
+     * succeeds if the query returns these lines in any order.
+     */
+    public TestDataConnection returnsUnordered(String... expecteds) throws Exception {
+      Connection connection = null;
+      Statement statement = null;
+      try {
+        connection = connectionFactory.createConnection();
+        statement = connection.createStatement();
+        ResultSet resultSet = statement.executeQuery(sql);
+        Assert.assertEquals(unsortedList(Arrays.asList(expecteds)), unsortedList(JdbcAssert.toStrings(resultSet)));
+        resultSet.close();
+        return this;
+      } finally {
+        if (statement != null) {
+          statement.close();
+        }
+        if (connection != null) {
+          connection.close();
+        }
+      }
+    }
+    
+    public TestDataConnection displayResults(int recordCount) throws Exception {
+      // record count check is done in toString method
+      
       Connection connection = null;
       Statement statement = null;
       try {
         connection = connectionFactory.createConnection();
         statement = connection.createStatement();
         ResultSet resultSet = statement.executeQuery(sql);
-        // record count check is done in toString method
         System.out.println(JdbcAssert.toString(resultSet, recordCount));
         resultSet.close();
         return this;
@@ -156,31 +214,57 @@ public class JdbcAssert {
           connection.close();
         }
       }
+      
     }
 
-    public void planContains(String expected) {
-      final String[] plan0 = {null};
+    private SortedSet<String> unsortedList(List<String> strings) {
+      final SortedSet<String> set = new TreeSet<>();
+      for (String string : strings) {
+        set.add(string + "\n");
+      }
+      return set;
+    }
+    
+
+
+    public TestDataConnection planContains(String expected) {
+      final String[] plan0 = { null };
       Connection connection = null;
       Statement statement = null;
-      Hook.Closeable x =
-          Hook.LOGICAL_PLAN.add(
-              new Function<String, Void>() {
-                public Void apply(String o) {
-                  plan0[0] = o;
-                  return null;
-                }
-              });
+      final Hook.Closeable x = Hook.LOGICAL_PLAN.add(new Function<String, Void>() {
+        public Void apply(String o) {
+          plan0[0] = o;
+          return null;
+        }
+      });
       try {
         connection = connectionFactory.createConnection();
         statement = connection.prepareStatement(sql);
         statement.close();
-        final String plan = plan0[0];
+        final String plan = plan0[0].trim();
         // it's easier to write java strings containing single quotes than
         // double quotes
-        String expected2 = expected.replace("'", "\"");
-        Assert.assertTrue(plan, plan.contains(expected2));
+        String expected2 = expected.replace("'", "\"").trim();
+        Assert.assertTrue(String.format("Plan of: \n%s \n does not contain expected string of: \n%s",plan, expected2), plan.contains(expected2));
+        return this;
       } catch (Exception e) {
         throw new RuntimeException(e);
+      } finally {
+        if (statement != null) {
+          try {
+            statement.close();
+          } catch (SQLException e) {
+            // ignore
+          }
+        }
+        if (connection != null) {
+          try {
+            connection.close();
+          } catch (SQLException e) {
+            // ignore
+          }
+        }
+        x.close();
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/JdbcTest.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/JdbcTest.java b/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/JdbcTest.java
index 26c7a06..043d4fc 100644
--- a/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/JdbcTest.java
+++ b/sandbox/prototype/sqlparser/src/test/java/org/apache/drill/jdbc/test/JdbcTest.java
@@ -17,261 +17,369 @@
  ******************************************************************************/
 package org.apache.drill.jdbc.test;
 
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.Statement;
+
+import org.apache.drill.exec.ref.ReferenceInterpreter;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import com.google.common.base.Charsets;
 import com.google.common.base.Function;
+import com.google.common.io.Resources;
 
-import junit.framework.TestCase;
-
-import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.jdbc.DrillTable;
-import org.junit.Rule;
-import org.junit.rules.TestName;
-import org.junit.rules.TestRule;
-import org.junit.rules.Timeout;
-
-import java.sql.*;
-
-/**
- * Unit tests for Drill's JDBC driver.
- */
-public class JdbcTest extends TestCase {
-
-  // Determine if we are in Eclipse Debug mode.
-  static final boolean IS_DEBUG = java.lang.management.ManagementFactory.getRuntimeMXBean().getInputArguments().toString().indexOf("-agentlib:jdwp") > 0;
-
-  // Set a timeout unless we're debugging.
-  @Rule public TestRule globalTimeout = IS_DEBUG ? new TestName() : new Timeout(10000);
-
-  
-  private static final String MODEL =
-      "{\n"
-          + "  version: '1.0',\n"
-          + "   schemas: [\n"
-          + "     {\n"
-          + "       name: 'DONUTS',\n"
-          + "       tables: [\n"
-          + "         {\n"
-          + "           name: 'DONUTS',\n"
-          + "           type: 'custom',\n"
-          + "           factory: '" + DrillTable.Factory.class.getName() + "'\n,"
-          + "           operand: {\n"
-          + "             path: '/donuts.json',\n"
-          + "             useReferenceInterpreter: 'true'\n"
-          + "           }\n"
-          + "         }\n"
-          + "       ]\n"
-          + "     }\n"
-          + "   ]\n"
-          + "}";
-
-  private static final String MODEL_FULL_ENGINE =
-      "{\n"
-          + "  version: '1.0',\n"
-          + "   schemas: [\n"
-          + "     {\n"
-          + "       name: 'DONUTS',\n"
-          + "       tables: [\n"
-          + "         {\n"
-          + "           name: 'DONUTS',\n"
-          + "           type: 'custom',\n"
-          + "           factory: '" + DrillTable.Factory.class.getName() + "'\n,"
-          + "           operand: {\n"
-          + "             path: '/donuts.json'\n"
-          + "           }\n"
-          + "         }\n"
-          + "       ]\n"
-          + "     }\n"
-          + "   ]\n"
-          + "}";
-
-  private static final String EXPECTED =
-      "_MAP={batters={batter=[{id=1001, type=Regular}, {id=1002, type=Chocolate}, {id=1003, type=Blueberry}, {id=1004, type=Devil's Food}]}, id=0001, name=Cake, ppu=0.55, sales=35, topping=[{id=5001, type=None}, {id=5002, type=Glazed}, {id=5005, type=Sugar}, {id=5007, type=Powdered Sugar}, {id=5006, type=Chocolate with Sprinkles}, {id=5003, type=Chocolate}, {id=5004, type=Maple}], type=donut}\n"
-          + "_MAP={batters={batter=[{id=1001, type=Regular}]}, id=0002, name=Raised, ppu=0.69, sales=145, topping=[{id=5001, type=None}, {id=5002, type=Glazed}, {id=5005, type=Sugar}, {id=5003, type=Chocolate}, {id=5004, type=Maple}], type=donut}\n"
-          + "_MAP={batters={batter=[{id=1001, type=Regular}, {id=1002, type=Chocolate}]}, id=0003, name=Old Fashioned, ppu=0.55, sales=300, topping=[{id=5001, type=None}, {id=5002, type=Glazed}, {id=5003, type=Chocolate}, {id=5004, type=Maple}], type=donut}\n"
-          + "_MAP={batters={batter=[{id=1001, type=Regular}, {id=1002, type=Chocolate}, {id=1003, type=Blueberry}, {id=1004, type=Devil's Food}]}, filling=[{id=6001, type=None}, {id=6002, type=Raspberry}, {id=6003, type=Lemon}, {id=6004, type=Chocolate}, {id=6005, type=Kreme}], id=0004, name=Filled, ppu=0.69, sales=14, topping=[{id=5001, type=None}, {id=5002, type=Glazed}, {id=5005, type=Sugar}, {id=5007, type=Powdered Sugar}, {id=5006, type=Chocolate with Sprinkles}, {id=5003, type=Chocolate}, {id=5004, type=Maple}], type=donut}\n"
-          + "_MAP={batters={batter=[{id=1001, type=Regular}]}, id=0005, name=Apple Fritter, ppu=1.0, sales=700, topping=[{id=5002, type=Glazed}], type=donut}\n";
+/** Unit tests for Drill's JDBC driver. */
+public class JdbcTest {
+  private static String MODEL;
+  private static String EXPECTED;
+
+  @BeforeClass
+  public static void setupFixtures() throws IOException {
+    MODEL = Resources.toString(Resources.getResource("test-models.json"), Charsets.UTF_8);
+    EXPECTED = Resources.toString(Resources.getResource("donuts-output-data.txt"), Charsets.UTF_8);
+  }
 
   /**
-   * Load driver.
+   * Command-line utility to execute a logical plan.
+   * 
+   * <p>
+   * The forwarding method ensures that the IDE calls this method with the right classpath.
+   * </p>
    */
+  public static void main(String[] args) throws Exception {
+    ReferenceInterpreter.main(args);
+  }
+
+  /** Load driver. */
+  @Test
   public void testLoadDriver() throws ClassNotFoundException {
     Class.forName("org.apache.drill.jdbc.Driver");
   }
 
-  /**
-   * Load driver and make a connection.
-   */
+  /** Load driver and make a connection. */
+  @Test
   public void testConnect() throws Exception {
     Class.forName("org.apache.drill.jdbc.Driver");
-    final Connection connection = DriverManager.getConnection(
-        "jdbc:drill:schema=DONUTS");
+    final Connection connection = DriverManager.getConnection("jdbc:drill:schema=DONUTS");
     connection.close();
   }
 
-  /**
-   * Load driver, make a connection, prepare a statement.
-   */
+  /** Load driver, make a connection, prepare a statement. */
+  @Test
   public void testPrepare() throws Exception {
-    JdbcAssert.withModel(MODEL, "DONUTS")
-        .withConnection(
-            new Function<Connection, Void>() {
-              public Void apply(Connection connection) {
-                try {
-                  final Statement statement = connection.prepareStatement(
-                      "select * from donuts");
-                  statement.close();
-                  return null;
-                } catch (Exception e) {
-                  throw new RuntimeException(e);
-                }
-              }
-            });
+    JdbcAssert.withModel(MODEL, "DONUTS").withConnection(new Function<Connection, Void>() {
+      public Void apply(Connection connection) {
+        try {
+          final Statement statement = connection.prepareStatement("select * from donuts");
+          statement.close();
+          return null;
+        } catch (Exception e) {
+          throw new RuntimeException(e);
+        }
+      }
+    });
   }
 
-  /**
-   * Simple query against JSON.
-   */
+  /** Simple query against JSON. */
+  @Test
   public void testSelectJson() throws Exception {
-    JdbcAssert.withModel(MODEL, "DONUTS")
-        .sql("select * from donuts")
-        .returns(EXPECTED);
+    JdbcAssert.withModel(MODEL, "DONUTS").sql("select * from donuts").returns(EXPECTED);
   }
 
-  public void testFullSelectStarEngine() throws Exception {
-    JdbcAssert.withModel(MODEL_FULL_ENGINE, "DONUTS")
-        //.sql("select cast(_MAP['red'] as bigint) + 1 as red_inc from donuts ")
-        .sql("select * from donuts ")
-        .displayResults(50);
+  /** Simple query against EMP table in HR database. */
+  @Test
+  public void testSelectEmployees() throws Exception {
+    JdbcAssert
+        .withModel(MODEL, "HR")
+        .sql("select * from employees")
+        .returns(
+            "_MAP={deptId=31, lastName=Rafferty}\n" + "_MAP={deptId=33, lastName=Jones}\n"
+                + "_MAP={deptId=33, lastName=Steinberg}\n" + "_MAP={deptId=34, lastName=Robinson}\n"
+                + "_MAP={deptId=34, lastName=Smith}\n" + "_MAP={lastName=John}\n");
   }
-  
-  public void testFullEngine() throws Exception {
-    JdbcAssert.withModel(MODEL_FULL_ENGINE, "DONUTS")
-        //.sql("select cast(_MAP['red'] as bigint) + 1 as red_inc from donuts ")
-        .sql("select cast(_MAP['RED'] as bigint)  as RED, cast(_MAP['GREEN'] as bigint)  as GREEN from donuts where cast(_MAP['red'] as BIGINT) > 1 ")
-        .displayResults(50);
+
+  /** Simple query against EMP table in HR database. */
+  @Test
+  public void testSelectEmpView() throws Exception {
+    JdbcAssert
+        .withModel(MODEL, "HR")
+        .sql("select * from emp")
+        .returns(
+            "DEPTID=31; LASTNAME=Rafferty\n" + "DEPTID=33; LASTNAME=Jones\n" + "DEPTID=33; LASTNAME=Steinberg\n"
+                + "DEPTID=34; LASTNAME=Robinson\n" + "DEPTID=34; LASTNAME=Smith\n" + "DEPTID=null; LASTNAME=John\n");
   }
 
-  /**
-   * Query with project list. No field references yet.
-   */
+  /** Simple query against EMP table in HR database. */
+  @Test
+  public void testSelectDept() throws Exception {
+    JdbcAssert
+        .withModel(MODEL, "HR")
+        .sql("select * from departments")
+        .returns(
+            "_MAP={deptId=31, name=Sales}\n" + "_MAP={deptId=33, name=Engineering}\n"
+                + "_MAP={deptId=34, name=Clerical}\n" + "_MAP={deptId=35, name=Marketing}\n");
+  }
+
+  /** Query with project list. No field references yet. */
+  @Test
   public void testProjectConstant() throws Exception {
-    JdbcAssert.withModel(MODEL, "DONUTS")
-        .sql("select 1 + 3 as c from donuts")
-        .returns("C=4\n"
-            + "C=4\n"
-            + "C=4\n"
-            + "C=4\n"
-            + "C=4\n");
+    JdbcAssert.withModel(MODEL, "DONUTS").sql("select 1 + 3 as c from donuts")
+        .returns("C=4\n" + "C=4\n" + "C=4\n" + "C=4\n" + "C=4\n");
   }
 
-  /**
-   * Query that projects an element from the map.
-   */
+  /** Query that projects an element from the map. */
+  @Test
   public void testProject() throws Exception {
-    JdbcAssert.withModel(MODEL, "DONUTS")
-        .sql("select _MAP['ppu'] as ppu from donuts")
-        .returns("PPU=0.55\n"
-            + "PPU=0.69\n"
-            + "PPU=0.55\n"
-            + "PPU=0.69\n"
-            + "PPU=1.0\n");
+    JdbcAssert.withModel(MODEL, "DONUTS").sql("select _MAP['ppu'] as ppu from donuts")
+        .returns("PPU=0.55\n" + "PPU=0.69\n" + "PPU=0.55\n" + "PPU=0.69\n" + "PPU=1.0\n");
   }
 
-  /**
-   * Same logic as {@link #testProject()}, but using a subquery.
-   */
+  /** Same logic as {@link #testProject()}, but using a subquery. */
+  @Test
   public void testProjectOnSubquery() throws Exception {
-    JdbcAssert.withModel(MODEL, "DONUTS")
-        .sql("select d['ppu'] as ppu from (\n"
-            + " select _MAP as d from donuts)")
-        .returns("PPU=0.55\n"
-            + "PPU=0.69\n"
-            + "PPU=0.55\n"
-            + "PPU=0.69\n"
-            + "PPU=1.0\n");
+    JdbcAssert.withModel(MODEL, "DONUTS").sql("select d['ppu'] as ppu from (\n" + " select _MAP as d from donuts)")
+        .returns("PPU=0.55\n" + "PPU=0.69\n" + "PPU=0.55\n" + "PPU=0.69\n" + "PPU=1.0\n");
   }
 
-  /**
-   * Checks the logical plan.
-   */
+  /** Checks the logical plan. */
+  @Test
   public void testProjectPlan() throws Exception {
-    JdbcAssert.withModel(MODEL, "DONUTS")
+    JdbcAssert
+        .withModel(MODEL, "DONUTS")
         .sql("select _MAP['ppu'] as ppu from donuts")
         .planContains(
-            "{'head':{'type':'APACHE_DRILL_LOGICAL','version':'1','generator':{'type':'manual','info':'na'}},"
+            "{'head':{'type':'APACHE_DRILL_LOGICAL','version':'1','generator':{'type':'optiq','info':'na'}},"
                 + "'storage':{'donuts-json':{'type':'classpath'},'queue':{'type':'queue'}},"
                 + "'query':["
-                + "{'op':'sequence','do':["
-                + "{'op':'scan','memo':'initial_scan','ref':'_MAP','storageengine':'donuts-json','selection':{'path':'/donuts.json','type':'JSON'}},"
-                + "{'op':'project','projections':[{'expr':'_MAP.ppu','ref':'output.PPU'}]},"
-                + "{'op':'store','storageengine':'queue','memo':'output sink','target':{'number':0}}]}]}");
+                + "{'op':'scan','memo':'initial_scan','ref':'_MAP','storageengine':'donuts-json','selection':{'path':'/donuts.json','type':'JSON'},'@id':1},"
+                + "{'op':'project','input':1,'projections':[{'expr':'_MAP.ppu','ref':'output.PPU'}],'@id':2},"
+                + "{'op':'store','input':2,'storageengine':'queue','memo':'output sink','target':{'number':0},'@id':3}]}");
   }
 
   /**
-   * Query with subquery, filter, and projection of one real and one
-   * nonexistent field from a map field.
+   * Query with subquery, filter, and projection of one real and one nonexistent field from a map field.
    */
+  @Test
   public void testProjectFilterSubquery() throws Exception {
-    JdbcAssert.withModel(MODEL, "DONUTS")
-        .sql("select d['name'] as name, d['xx'] as xx from (\n"
-            + " select _MAP as d from donuts)\n"
-            + "where cast(d['ppu'] as double) > 0.6")
-        .returns("NAME=Raised; XX=null\n"
-            + "NAME=Filled; XX=null\n"
-            + "NAME=Apple Fritter; XX=null\n");
+    JdbcAssert
+        .withModel(MODEL, "DONUTS")
+        .sql(
+            "select d['name'] as name, d['xx'] as xx from (\n" + " select _MAP as d from donuts)\n"
+                + "where cast(d['ppu'] as double) > 0.6")
+        .returns("NAME=Raised; XX=null\n" + "NAME=Filled; XX=null\n" + "NAME=Apple Fritter; XX=null\n");
   }
 
+  @Test
   public void testProjectFilterSubqueryPlan() throws Exception {
-    JdbcAssert.withModel(MODEL, "DONUTS")
-        .sql("select d['name'] as name, d['xx'] as xx from (\n"
-            + " select _MAP['donuts'] as d from donuts)\n"
-            + "where cast(d['ppu'] as double) > 0.6")
+    JdbcAssert
+        .withModel(MODEL, "DONUTS")
+        .sql(
+            "select d['name'] as name, d['xx'] as xx from (\n" + " select _MAP['donuts'] as d from donuts)\n"
+                + "where cast(d['ppu'] as double) > 0.6")
         .planContains(
-            "{'head':{'type':'APACHE_DRILL_LOGICAL','version':'1','generator':{'type':'manual','info':'na'}},'storage':{'donuts-json':{'type':'classpath'},'queue':{'type':'queue'}},"
+            "{'head':{'type':'APACHE_DRILL_LOGICAL','version':'1','generator':{'type':'optiq','info':'na'}},'storage':{'donuts-json':{'type':'classpath'},'queue':{'type':'queue'}},"
                 + "'query':["
-                + "{'op':'sequence','do':["
-                + "{'op':'scan','memo':'initial_scan','ref':'_MAP','storageengine':'donuts-json','selection':{'path':'/donuts.json','type':'JSON'}},"
-                + "{'op':'filter','expr':'(_MAP.donuts.ppu > 0.6)'},"
-                + "{'op':'project','projections':[{'expr':'_MAP.donuts','ref':'output.D'}]},"
-                + "{'op':'project','projections':[{'expr':'D.name','ref':'output.NAME'},{'expr':'D.xx','ref':'output.XX'}]},"
-                + "{'op':'store','storageengine':'queue','memo':'output sink','target':{'number':0}}]}]}");
+                + "{'op':'scan','memo':'initial_scan','ref':'_MAP','storageengine':'donuts-json','selection':{'path':'/donuts.json','type':'JSON'},'@id':1},"
+                + "{'op':'filter','input':1,'expr':'(_MAP.donuts.ppu > 0.6)','@id':2},"
+                + "{'op':'project','input':2,'projections':[{'expr':'_MAP.donuts','ref':'output.D'}],'@id':3},"
+                + "{'op':'project','input':3,'projections':[{'expr':'D.name','ref':'output.NAME'},{'expr':'D.xx','ref':'output.XX'}],'@id':4},"
+                + "{'op':'store','input':4,'storageengine':'queue','memo':'output sink','target':{'number':0},'@id':5}]}");
   }
 
-  /**
-   * Query that projects one field. (Disabled; uses sugared syntax.)
-   */
-  public void _testProjectNestedFieldSugared() throws Exception {
-    JdbcAssert.withModel(MODEL, "DONUTS")
-        .sql("select donuts.ppu from donuts")
-        .returns("C=4\n"
-            + "C=4\n"
-            + "C=4\n"
-            + "C=4\n"
-            + "C=4\n");
+  /** Query that projects one field. (Disabled; uses sugared syntax.) */
+  @Test @Ignore
+  public void testProjectNestedFieldSugared() throws Exception {
+    JdbcAssert.withModel(MODEL, "DONUTS").sql("select donuts.ppu from donuts")
+        .returns("C=4\n" + "C=4\n" + "C=4\n" + "C=4\n" + "C=4\n");
   }
 
-  /**
-   * Query with filter. No field references yet.
-   */
+  /** Query with filter. No field references yet. */
+  @Test
   public void testFilterConstantFalse() throws Exception {
-    JdbcAssert.withModel(MODEL, "DONUTS")
-        .sql("select * from donuts where 3 > 4")
-        .returns("");
+    JdbcAssert.withModel(MODEL, "DONUTS").sql("select * from donuts where 3 > 4").returns("");
   }
 
+  @Test
   public void testFilterConstant() throws Exception {
-    JdbcAssert.withModel(MODEL, "DONUTS")
-        .sql("select * from donuts where 3 < 4")
-        .returns(EXPECTED);
+    JdbcAssert.withModel(MODEL, "DONUTS").sql("select * from donuts where 3 < 4").returns(EXPECTED);
   }
 
+  @Test
   public void testValues() throws Exception {
-    JdbcAssert.withModel(MODEL, "DONUTS")
-        .sql("values (1)")
-        .returns("EXPR$0=1\n");
+    JdbcAssert.withModel(MODEL, "DONUTS").sql("values (1)").returns("EXPR$0=1\n");
 
     // Enable when https://issues.apache.org/jira/browse/DRILL-57 fixed
     // .planContains("store");
   }
+
+  @Test
+  public void testDistinct() throws Exception {
+    JdbcAssert.withModel(MODEL, "HR").sql("select distinct deptId from emp")
+        .returnsUnordered("DEPTID=null", "DEPTID=31", "DEPTID=34", "DEPTID=33")
+        .planContains("\"op\":\"collapsingaggregate\"");
+  }
+
+  @Test
+  public void testCountNoGroupBy() throws Exception {
+    // 5 out of 6 employees have a not-null deptId
+    JdbcAssert.withModel(MODEL, "HR").sql("select count(deptId) as cd, count(*) as c from emp").returns("CD=5; C=6\n")
+        .planContains("\"op\":\"collapsingaggregate\"");
+  }
+
+  @Test
+  public void testDistinctCountNoGroupBy() throws Exception {
+    JdbcAssert.withModel(MODEL, "HR").sql("select count(distinct deptId) as c from emp").returns("C=3\n")
+        .planContains("\"op\":\"collapsingaggregate\"");
+  }
+
+  @Test
+  public void testDistinctCountGroupByEmpty() throws Exception {
+    JdbcAssert.withModel(MODEL, "HR").sql("select count(distinct deptId) as c from emp group by ()").returns("C=3\n")
+        .planContains("\"op\":\"collapsingaggregate\"");
+  }
+
+  @Test
+  public void testCountNull() throws Exception {
+    JdbcAssert.withModel(MODEL, "HR").sql("select count(distinct deptId) as c from emp group by ()").returns("C=3\n")
+        .planContains("\"op\":\"collapsingaggregate\"");
+  }
+
+  @Test
+  public void testCount() throws Exception {
+    JdbcAssert.withModel(MODEL, "HR").sql("select deptId, count(*) as c from emp group by deptId")
+        .returnsUnordered("DEPTID=31; C=1", "DEPTID=33; C=2", "DEPTID=34; C=2", "DEPTID=null; C=1")
+        .planContains("\"op\":\"collapsingaggregate\""); // make sure using drill
+  }
+
+  @Test
+  public void testJoin() throws Exception {
+    JdbcAssert
+        .withModel(MODEL, "HR")
+        .sql("select * from emp join dept on emp.deptId = dept.deptId")
+        .returnsUnordered("DEPTID=31; LASTNAME=Rafferty; DEPTID0=31; NAME=Sales",
+            "DEPTID=33; LASTNAME=Jones; DEPTID0=33; NAME=Engineering",
+            "DEPTID=33; LASTNAME=Steinberg; DEPTID0=33; NAME=Engineering",
+            "DEPTID=34; LASTNAME=Robinson; DEPTID0=34; NAME=Clerical",
+            "DEPTID=34; LASTNAME=Smith; DEPTID0=34; NAME=Clerical").planContains("'type':'inner'");
+  }
+
+  @Test
+  public void testLeftJoin() throws Exception {
+    JdbcAssert
+        .withModel(MODEL, "HR")
+        .sql("select * from emp left join dept on emp.deptId = dept.deptId")
+        .returnsUnordered("DEPTID=31; LASTNAME=Rafferty; DEPTID0=31; NAME=Sales",
+            "DEPTID=33; LASTNAME=Jones; DEPTID0=33; NAME=Engineering",
+            "DEPTID=33; LASTNAME=Steinberg; DEPTID0=33; NAME=Engineering",
+            "DEPTID=34; LASTNAME=Robinson; DEPTID0=34; NAME=Clerical",
+            "DEPTID=34; LASTNAME=Smith; DEPTID0=34; NAME=Clerical",
+            "DEPTID=null; LASTNAME=John; DEPTID0=null; NAME=null").planContains("'type':'left'");
+  }
+
+  /**
+   * Right join is tricky because Drill's "join" operator only supports "left", so we have to flip inputs.
+   */
+  @Test @Ignore
+  public void testRightJoin() throws Exception {
+    JdbcAssert.withModel(MODEL, "HR").sql("select * from emp right join dept on emp.deptId = dept.deptId")
+        .returnsUnordered("xx").planContains("'type':'left'");
+  }
+
+  @Test
+  public void testFullJoin() throws Exception {
+    JdbcAssert
+        .withModel(MODEL, "HR")
+        .sql("select * from emp full join dept on emp.deptId = dept.deptId")
+        .returnsUnordered("DEPTID=31; LASTNAME=Rafferty; DEPTID0=31; NAME=Sales",
+            "DEPTID=33; LASTNAME=Jones; DEPTID0=33; NAME=Engineering",
+            "DEPTID=33; LASTNAME=Steinberg; DEPTID0=33; NAME=Engineering",
+            "DEPTID=34; LASTNAME=Robinson; DEPTID0=34; NAME=Clerical",
+            "DEPTID=34; LASTNAME=Smith; DEPTID0=34; NAME=Clerical",
+            "DEPTID=null; LASTNAME=John; DEPTID0=null; NAME=null",
+            "DEPTID=null; LASTNAME=null; DEPTID0=35; NAME=Marketing").planContains("'type':'outer'");
+  }
+
+  /**
+   * Join on subquery; also tests that if a field of the same name exists in both inputs, both fields make it through
+   * the join.
+   */
+  @Test
+  public void testJoinOnSubquery() throws Exception {
+    JdbcAssert
+        .withModel(MODEL, "HR")
+        .sql(
+            "select * from (\n" + "select deptId, lastname, 'x' as name from emp) as e\n"
+                + " join dept on e.deptId = dept.deptId")
+        .returnsUnordered("DEPTID=31; LASTNAME=Rafferty; NAME=x; DEPTID0=31; NAME0=Sales",
+            "DEPTID=33; LASTNAME=Jones; NAME=x; DEPTID0=33; NAME0=Engineering",
+            "DEPTID=33; LASTNAME=Steinberg; NAME=x; DEPTID0=33; NAME0=Engineering",
+            "DEPTID=34; LASTNAME=Robinson; NAME=x; DEPTID0=34; NAME0=Clerical",
+            "DEPTID=34; LASTNAME=Smith; NAME=x; DEPTID0=34; NAME0=Clerical").planContains("'type':'inner'");
+  }
+
+  /** Tests that one of the FoodMart tables is present. */
+  @Test @Ignore
+  public void testFoodMart() throws Exception {
+    JdbcAssert
+        .withModel(MODEL, "FOODMART")
+        .sql("select * from product_class where cast(_map['product_class_id'] as integer) < 3")
+        .returnsUnordered(
+            "_MAP={product_category=Seafood, product_class_id=2, product_department=Seafood, product_family=Food, product_subcategory=Shellfish}",
+            "_MAP={product_category=Specialty, product_class_id=1, product_department=Produce, product_family=Food, product_subcategory=Nuts}");
+  }
+
+  @Test
+  public void testUnionAll() throws Exception {
+    JdbcAssert.withModel(MODEL, "HR").sql("select deptId from dept\n" + "union all\n" + "select deptId from emp")
+        .returnsUnordered("DEPTID=31", "DEPTID=33", "DEPTID=34", "DEPTID=35", "DEPTID=null")
+        .planContains("'op':'union','distinct':false");
+  }
+
+  @Test @Ignore
+  public void testUnion() throws Exception {
+    JdbcAssert.withModel(MODEL, "HR").sql("select deptId from dept\n" + "union\n" + "select deptId from emp")
+        .returnsUnordered("DEPTID=31", "DEPTID=33", "DEPTID=34", "DEPTID=35", "DEPTID=null")
+        .planContains("'op':'union','distinct':true");
+  }
+
+  @Test @Ignore
+  public void testOrderBy() throws Exception {
+    // desc nulls last
+    JdbcAssert
+        .withModel(MODEL, "HR")
+        .sql("select * from emp order by deptId desc nulls first")
+        .returns(
+            "DEPTID=null; LASTNAME=John\n" + "DEPTID=34; LASTNAME=Robinson\n" + "DEPTID=34; LASTNAME=Smith\n"
+                + "DEPTID=33; LASTNAME=Jones\n" + "DEPTID=33; LASTNAME=Steinberg\n" + "DEPTID=31; LASTNAME=Rafferty\n")
+        .planContains("'op':'order'");
+    // desc nulls first
+    JdbcAssert
+        .withModel(MODEL, "HR")
+        .sql("select * from emp order by deptId desc nulls last")
+        .returns(
+            "DEPTID=34; LASTNAME=Robinson\n" + "DEPTID=34; LASTNAME=Smith\n" + "DEPTID=33; LASTNAME=Jones\n"
+                + "DEPTID=33; LASTNAME=Steinberg\n" + "DEPTID=31; LASTNAME=Rafferty\n" + "DEPTID=null; LASTNAME=John\n")
+        .planContains("'op':'order'");
+    // desc is implicitly "nulls first"
+    JdbcAssert
+        .withModel(MODEL, "HR")
+        .sql("select * from emp order by deptId desc")
+        .returns(
+            "DEPTID=null; LASTNAME=John\n" + "DEPTID=34; LASTNAME=Robinson\n" + "DEPTID=34; LASTNAME=Smith\n"
+                + "DEPTID=33; LASTNAME=Jones\n" + "DEPTID=33; LASTNAME=Steinberg\n" + "DEPTID=31; LASTNAME=Rafferty\n")
+        .planContains("'op':'order'");
+    // no sort order specified is implicitly "asc", and asc is "nulls last"
+    JdbcAssert
+        .withModel(MODEL, "HR")
+        .sql("select * from emp order by deptId")
+        .returns(
+            "DEPTID=null; LASTNAME=John\n" + "DEPTID=31; LASTNAME=Rafferty\n" + "DEPTID=33; LASTNAME=Jones\n"
+                + "DEPTID=33; LASTNAME=Steinberg\n" + "DEPTID=34; LASTNAME=Robinson\n" + "DEPTID=34; LASTNAME=Smith\n")
+        .planContains("'op':'order'");
+  }
 }
 
 // End JdbcTest.java

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/test/resources/donuts-output-data.txt
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/test/resources/donuts-output-data.txt b/sandbox/prototype/sqlparser/src/test/resources/donuts-output-data.txt
new file mode 100644
index 0000000..6934600
--- /dev/null
+++ b/sandbox/prototype/sqlparser/src/test/resources/donuts-output-data.txt
@@ -0,0 +1,5 @@
+_MAP={batters={batter=[{id=1001, type=Regular}, {id=1002, type=Chocolate}, {id=1003, type=Blueberry}, {id=1004, type=Devil's Food}]}, id=0001, name=Cake, ppu=0.55, sales=35, topping=[{id=5001, type=None}, {id=5002, type=Glazed}, {id=5005, type=Sugar}, {id=5007, type=Powdered Sugar}, {id=5006, type=Chocolate with Sprinkles}, {id=5003, type=Chocolate}, {id=5004, type=Maple}], type=donut}
+_MAP={batters={batter=[{id=1001, type=Regular}]}, id=0002, name=Raised, ppu=0.69, sales=145, topping=[{id=5001, type=None}, {id=5002, type=Glazed}, {id=5005, type=Sugar}, {id=5003, type=Chocolate}, {id=5004, type=Maple}], type=donut}
+_MAP={batters={batter=[{id=1001, type=Regular}, {id=1002, type=Chocolate}]}, id=0003, name=Old Fashioned, ppu=0.55, sales=300, topping=[{id=5001, type=None}, {id=5002, type=Glazed}, {id=5003, type=Chocolate}, {id=5004, type=Maple}], type=donut}
+_MAP={batters={batter=[{id=1001, type=Regular}, {id=1002, type=Chocolate}, {id=1003, type=Blueberry}, {id=1004, type=Devil's Food}]}, filling=[{id=6001, type=None}, {id=6002, type=Raspberry}, {id=6003, type=Lemon}, {id=6004, type=Chocolate}, {id=6005, type=Kreme}], id=0004, name=Filled, ppu=0.69, sales=14, topping=[{id=5001, type=None}, {id=5002, type=Glazed}, {id=5005, type=Sugar}, {id=5007, type=Powdered Sugar}, {id=5006, type=Chocolate with Sprinkles}, {id=5003, type=Chocolate}, {id=5004, type=Maple}], type=donut}
+_MAP={batters={batter=[{id=1001, type=Regular}]}, id=0005, name=Apple Fritter, ppu=1.0, sales=700, topping=[{id=5002, type=Glazed}], type=donut}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/test/resources/full-model.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/test/resources/full-model.json b/sandbox/prototype/sqlparser/src/test/resources/full-model.json
new file mode 100644
index 0000000..9f3d482
--- /dev/null
+++ b/sandbox/prototype/sqlparser/src/test/resources/full-model.json
@@ -0,0 +1,18 @@
+{
+  version: '1.0',
+   schemas: [
+     {
+       name: 'DONUTS',
+       tables: [
+         {
+           name: 'DONUTS',
+           type: 'custom',
+           factory: 'org.apache.drill.jdbc.DrillTable$Factory'
+,           operand: {
+             path: '/donuts.json'
+           }
+         }
+       ]
+     }
+   ]
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/test/resources/test-models.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/test/resources/test-models.json b/sandbox/prototype/sqlparser/src/test/resources/test-models.json
new file mode 100644
index 0000000..23895c9
--- /dev/null
+++ b/sandbox/prototype/sqlparser/src/test/resources/test-models.json
@@ -0,0 +1,77 @@
+{
+  version: '1.0',
+   schemas: [
+     {
+       name: 'DONUTS',
+       tables: [
+         {
+           name: 'DONUTS',
+           type: 'custom',
+           factory: 'org.apache.drill.jdbc.DrillTable$Factory',
+           operand: {
+             path: '/donuts.json',
+             useReferenceInterpreter: 'true'
+           }
+         }
+       ]
+     },
+     {
+       name: 'HR',
+       tables: [
+         {
+           name: 'EMPLOYEES',
+           type: 'custom',
+           factory: 'org.apache.drill.jdbc.DrillTable$Factory',
+           operand: {
+             path: '/donuts.json',
+             useReferenceInterpreter: 'true'
+           }
+         },
+         {
+           name: 'DEPARTMENTS',
+           type: 'custom',
+           factory: 'org.apache.drill.jdbc.DrillTable$Factory',
+           operand: {
+             path: '/donuts.json',
+             useReferenceInterpreter: 'true'
+           }
+         },
+         {
+           name: 'EMP',
+           type: 'view',
+           sql: 'select _MAP[\'deptId\'] as deptid, cast(_MAP[\'lastName\'] as varchar) as lastName from employees'
+         },
+         {
+           name: 'DEPT',
+           type: 'view',
+           sql: 'select _MAP[\'deptId\'] as deptid, _MAP[\'name\'] as name from departments'
+         }
+       ]
+     },
+     {
+       name: 'FOODMART',
+       tables: [
+         {
+           name: 'PRODUCT_CLASS',
+           type: 'custom',
+           factory: 'org.apache.drill.jdbc.DrillTable$Factory',
+           operand: {
+             path: '/donuts.json',
+             useReferenceInterpreter: 'true'
+           }
+           
+         },
+         {
+           name: 'TIME_BY_DAY',
+           type: 'custom',
+           factory: 'org.apache.drill.jdbc.DrillTable$Factory',
+           operand: {
+             path: '/donuts.json',
+             useReferenceInterpreter: 'true'
+           }
+           
+         }
+       ]
+     }
+   ]
+}


[2/2] git commit: Update to add additional Julian SQL work. Update to Parquet 0.4.9 OrderBy and Union (distinct) not currently working.

Posted by ja...@apache.org.
Update to add additional Julian SQL work.
Update to Parquet 0.4.9
OrderBy and Union (distinct) not currently working.


Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/d405c70d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/d405c70d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/d405c70d

Branch: refs/heads/master
Commit: d405c70df958629f70c6378241314876bcd1b7ab
Parents: 103072a
Author: Jacques Nadeau <ja...@apache.org>
Authored: Fri Aug 2 20:17:06 2013 -0700
Committer: Jacques Nadeau <ja...@apache.org>
Committed: Sat Aug 3 19:40:26 2013 -0700

----------------------------------------------------------------------
 .../drill/common/logical/LogicalPlan.java       |   3 +-
 sandbox/prototype/sqlparser/pom.xml             | 167 +++----
 .../java/org/apache/drill/jdbc/DrillTable.java  | 156 +++---
 .../main/java/org/apache/drill/jdbc/Driver.java |   1 +
 .../apache/drill/optiq/DrillAggregateRel.java   | 119 +++++
 .../apache/drill/optiq/DrillAggregateRule.java  |  53 ++
 .../org/apache/drill/optiq/DrillFilterRel.java  |  30 +-
 .../org/apache/drill/optiq/DrillFilterRule.java |  16 +-
 .../apache/drill/optiq/DrillImplementor.java    |  33 +-
 .../org/apache/drill/optiq/DrillJoinRel.java    | 158 ++++++
 .../org/apache/drill/optiq/DrillJoinRule.java   |  57 +++
 .../java/org/apache/drill/optiq/DrillOptiq.java |  50 +-
 .../apache/drill/optiq/DrillPrepareImpl.java    |   3 -
 .../org/apache/drill/optiq/DrillProjectRel.java |  50 +-
 .../apache/drill/optiq/DrillProjectRule.java    |  29 +-
 .../java/org/apache/drill/optiq/DrillRel.java   |   4 +-
 .../java/org/apache/drill/optiq/DrillScan.java  |  11 +-
 .../org/apache/drill/optiq/DrillSortRel.java    |  88 ++++
 .../org/apache/drill/optiq/DrillSortRule.java   |  42 ++
 .../org/apache/drill/optiq/DrillUnionRel.java   |  70 +++
 .../org/apache/drill/optiq/DrillUnionRule.java  |  48 ++
 .../org/apache/drill/optiq/DrillValuesRel.java  |   9 +-
 .../org/apache/drill/optiq/DrillValuesRule.java |  17 +-
 .../org/apache/drill/optiq/EnumerableDrill.java | 250 ++++++++++
 .../drill/optiq/EnumerableDrillFullEngine.java  |   5 +-
 .../apache/drill/optiq/EnumerableDrillRel.java  |  71 ++-
 .../apache/drill/optiq/EnumerableDrillRule.java |   5 +-
 .../drill/sql/client/full/ResultEnumerator.java |   4 +
 .../drill/sql/client/ref/DrillRefImpl.java      |   5 +
 .../apache/drill/jdbc/test/FullEngineTest.java  |  37 ++
 .../org/apache/drill/jdbc/test/JdbcAssert.java  | 178 +++++--
 .../org/apache/drill/jdbc/test/JdbcTest.java    | 494 +++++++++++--------
 .../src/test/resources/donuts-output-data.txt   |   5 +
 .../src/test/resources/full-model.json          |  18 +
 .../src/test/resources/test-models.json         |  77 +++
 35 files changed, 1780 insertions(+), 583 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/LogicalPlan.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/LogicalPlan.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/LogicalPlan.java
index cf15f48..05fbd1f 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/LogicalPlan.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/LogicalPlan.java
@@ -93,7 +93,8 @@ public class LogicalPlan {
       LogicalPlan plan = mapper.readValue(planString, LogicalPlan.class);
       return plan;
     } catch (IOException e) {
-      throw new RuntimeException(e);
+      
+      throw new RuntimeException(String.format("Failure while parsing plan: \n %s}", planString), e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/pom.xml
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/pom.xml b/sandbox/prototype/sqlparser/pom.xml
index 9be576e..f30944c 100644
--- a/sandbox/prototype/sqlparser/pom.xml
+++ b/sandbox/prototype/sqlparser/pom.xml
@@ -1,94 +1,95 @@
 <?xml version="1.0"?>
 <project
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
-	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-	<modelVersion>4.0.0</modelVersion>
-	<parent>
-		<artifactId>prototype-parent</artifactId>
-		<groupId>org.apache.drill</groupId>
-		<version>1.0-SNAPSHOT</version>
-	</parent>
-	<artifactId>sqlparser</artifactId>
-	<name>sqlparser</name>
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>prototype-parent</artifactId>
+    <groupId>org.apache.drill</groupId>
+    <version>1.0-SNAPSHOT</version>
+  </parent>
+  <artifactId>sqlparser</artifactId>
+  <name>sqlparser</name>
 
-	<repositories>
-		<repository>
-			<releases>
-				<enabled>true</enabled>
-				<updatePolicy>always</updatePolicy>
-				<checksumPolicy>warn</checksumPolicy>
-			</releases>
-			<id>conjars</id>
-			<name>Conjars</name>
-			<url>http://conjars.org/repo</url>
-			<layout>default</layout>
-		</repository>
-	</repositories>
+  <repositories>
+    <repository>
+      <releases>
+        <enabled>true</enabled>
+        <updatePolicy>always</updatePolicy>
+        <checksumPolicy>warn</checksumPolicy>
+      </releases>
+      <id>conjars</id>
+      <name>Conjars</name>
+      <url>http://conjars.org/repo</url>
+      <layout>default</layout>
+    </repository>
+  </repositories>
 
-	<dependencies>
-    	<dependency>
-			<groupId>net.hydromatic</groupId>
-			<artifactId>optiq</artifactId>
-			<version>0.4.2</version>
-		</dependency>
-		<dependency>
-			<groupId>net.hydromatic</groupId>
-			<artifactId>linq4j</artifactId>
-			<version>0.1.2</version>
-		</dependency>
-        <dependency>
-			<groupId>org.apache.drill</groupId>
-			<artifactId>common</artifactId>
-			<version>1.0-SNAPSHOT</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.drill.exec</groupId>
-			<artifactId>ref</artifactId>
+  <dependencies>
+    <dependency>
+      <groupId>net.hydromatic</groupId>
+      <artifactId>optiq</artifactId>
+      <version>0.4.9</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.drill</groupId>
+      <artifactId>common</artifactId>
+      <version>1.0-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.drill.exec</groupId>
+      <artifactId>ref</artifactId>
       <version>${project.version}</version>
-      </dependency>
-      <dependency>
-    	<groupId>org.apache.drill.exec</groupId>
-			<artifactId>java-exec</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.drill.exec</groupId>
+      <artifactId>java-exec</artifactId>
       <version>${project.version}</version>
-		</dependency>
-      <dependency>
-    	<groupId>org.apache.drill.exec</groupId>
-			<artifactId>java-exec</artifactId>
-            <version>${project.version}</version>
-			<classifier>tests</classifier>
-			<scope>test</scope>
-		</dependency>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.drill.exec</groupId>
+      <artifactId>java-exec</artifactId>
+      <version>${project.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>pentaho</groupId>
+      <artifactId>mondrian-data-foodmart-queries</artifactId>
+      <version>0.3</version>
+      <scope>test</scope>
+    </dependency>
 
 
-		<dependency>
-			<groupId>org.apache.drill.exec</groupId>
-			<artifactId>ref</artifactId>
+    <dependency>
+      <groupId>org.apache.drill.exec</groupId>
+      <artifactId>ref</artifactId>
       <version>${project.version}</version>
-			<classifier>tests</classifier>
-			<scope>test</scope>
-		</dependency>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
 
-		<dependency>
-			<groupId>org.codehaus.janino</groupId>
-			<artifactId>janino</artifactId>
-			<version>2.6.1</version>
-		</dependency>
-		<dependency>
-			<groupId>sqlline</groupId>
-			<artifactId>sqlline</artifactId>
-			<version>1.1.0</version>
-			<scope>test</scope>
-		</dependency>
-		<!-- Specify xalan and xerces versions to avoid setXIncludeAware error. -->
-		<dependency>
-			<groupId>xerces</groupId>
-			<artifactId>xercesImpl</artifactId>
-			<version>2.9.1</version>
-		</dependency>
-		<dependency>
-			<groupId>xalan</groupId>
-			<artifactId>xalan</artifactId>
-			<version>2.7.1</version>
-		</dependency>
-	</dependencies>
+    <dependency>
+      <groupId>org.codehaus.janino</groupId>
+      <artifactId>janino</artifactId>
+      <version>2.6.1</version>
+    </dependency>
+    <dependency>
+      <groupId>sqlline</groupId>
+      <artifactId>sqlline</artifactId>
+      <version>1.1.0</version>
+      <scope>test</scope>
+    </dependency>
+    <!-- Specify xalan and xerces versions to avoid setXIncludeAware error. -->
+    <dependency>
+      <groupId>xerces</groupId>
+      <artifactId>xercesImpl</artifactId>
+      <version>2.9.1</version>
+    </dependency>
+    <dependency>
+      <groupId>xalan</groupId>
+      <artifactId>xalan</artifactId>
+      <version>2.7.1</version>
+    </dependency>
+  </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/DrillTable.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/DrillTable.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/DrillTable.java
index 602a55d..dd86372 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/DrillTable.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/DrillTable.java
@@ -19,7 +19,9 @@ package org.apache.drill.jdbc;
 
 import java.io.IOException;
 import java.lang.reflect.Type;
+import java.util.Arrays;
 import java.util.Collections;
+import java.util.List;
 import java.util.Map;
 
 import net.hydromatic.linq4j.BaseQueryable;
@@ -28,8 +30,15 @@ import net.hydromatic.linq4j.Linq4j;
 import net.hydromatic.linq4j.expressions.Expression;
 import net.hydromatic.linq4j.expressions.Expressions;
 import net.hydromatic.linq4j.expressions.MethodCallExpression;
-
-import net.hydromatic.optiq.*;
+import net.hydromatic.optiq.BuiltinMethod;
+import net.hydromatic.optiq.DataContext;
+import net.hydromatic.optiq.MutableSchema;
+import net.hydromatic.optiq.Schema;
+import net.hydromatic.optiq.Statistic;
+import net.hydromatic.optiq.Statistics;
+import net.hydromatic.optiq.TableFactory;
+import net.hydromatic.optiq.TranslatableTable;
+import net.hydromatic.optiq.impl.java.JavaTypeFactory;
 
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.logical.StorageEngineConfig;
@@ -37,7 +46,6 @@ import org.apache.drill.exec.client.DrillClient;
 import org.apache.drill.exec.ref.rops.DataWriter;
 import org.apache.drill.exec.ref.rse.ClasspathRSE;
 import org.apache.drill.exec.ref.rse.ClasspathRSE.ClasspathInputConfig;
-
 import org.apache.drill.exec.server.Drillbit;
 import org.apache.drill.exec.server.RemoteServiceSet;
 import org.apache.drill.optiq.DrillRel;
@@ -48,38 +56,42 @@ import org.eigenbase.reltype.RelDataType;
 import org.eigenbase.reltype.RelDataTypeFactory;
 import org.eigenbase.sql.type.SqlTypeName;
 
-/**
- * Optiq Table used by Drill.
- */
+/** Optiq Table used by Drill. */
 public class DrillTable extends BaseQueryable<Object>
-    implements TranslatableTable<Object> {
+    implements TranslatableTable<Object>
+{
   private final Schema schema;
   private final String name;
-  private final String storageEngineName;
+  private final String storageEngineName;  
   private final RelDataType rowType;
   public final StorageEngineConfig storageEngineConfig;
   public final Object selection;
   private boolean useReferenceInterpreter;
-
   // full engine connection information
   public Drillbit bit;
   public DrillClient client;
 
-  /**
-   * Creates a DrillTable.
-   */
+  /** Creates a DrillTable. */
   public DrillTable(Schema schema,
-                    Type elementType,
-                    Expression expression,
-                    RelDataType rowType,
-                    String name,
-                    StorageEngineConfig storageEngineConfig,
-                    Object selection,
-                    String storageEngineName,
-                    boolean useReferenceInterpreter
-  ) {
-
+      Type elementType,
+      Expression expression,
+      RelDataType rowType,
+      String name,
+      StorageEngineConfig storageEngineConfig,
+      Object selection,
+      String storageEngineName,
+      boolean useReferenceInterpreter     
+      ) {
     super(schema.getQueryProvider(), elementType, expression);
+    this.schema = schema;
+    this.name = name;
+    this.rowType = rowType;
+    this.storageEngineConfig = storageEngineConfig;
+    this.selection = selection;
+    this.storageEngineName = storageEngineName;
+    this.useReferenceInterpreter = useReferenceInterpreter;
+
+    
     DrillConfig config = DrillConfig.create();
     RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
     try {
@@ -91,13 +103,7 @@ public class DrillTable extends BaseQueryable<Object>
     } catch (Exception e) {
       System.out.println("Error creating drill client or connecting to drillbit.");
     }
-    this.schema = schema;
-    this.name = name;
-    this.rowType = rowType;
-    this.storageEngineConfig = storageEngineConfig;
-    this.selection = selection;
-    this.storageEngineName = storageEngineName;
-    this.useReferenceInterpreter = useReferenceInterpreter;
+
   }
 
   private static DrillTable createTable(
@@ -108,7 +114,7 @@ public class DrillTable extends BaseQueryable<Object>
       Object selection,
       String storageEngineName,
       boolean useReferenceInterpreter
-  ) {
+      ) {
     final MethodCallExpression call = Expressions.call(schema.getExpression(),
         BuiltinMethod.DATA_CONTEXT_GET_TABLE.method,
         Expressions.constant(name),
@@ -120,8 +126,8 @@ public class DrillTable extends BaseQueryable<Object>
                     typeFactory.createSqlType(SqlTypeName.VARCHAR),
                     typeFactory.createSqlType(SqlTypeName.ANY))),
             Collections.singletonList("_MAP"));
-    return new DrillTable(schema, Object.class, call, rowType, name,
-        storageEngineConfig, selection, storageEngineName, useReferenceInterpreter);
+      return new DrillTable(schema, Object.class, call, rowType, name,
+          storageEngineConfig, selection, storageEngineName, useReferenceInterpreter);
   }
 
   @Override
@@ -129,6 +135,15 @@ public class DrillTable extends BaseQueryable<Object>
     return schema;
   }
 
+  
+  public String getStorageEngineName() {
+    return storageEngineName;
+  }
+
+  public boolean useReferenceInterpreter() {
+    return useReferenceInterpreter;
+  }
+
   @Override
   public RelDataType getRowType() {
     return rowType;
@@ -139,10 +154,6 @@ public class DrillTable extends BaseQueryable<Object>
     return Statistics.UNKNOWN;
   }
 
-  public String getStorageEngineName() {
-    return storageEngineName;
-  }
-
   @Override
   public Enumerator<Object> enumerator() {
     return Linq4j.emptyEnumerator();
@@ -154,36 +165,69 @@ public class DrillTable extends BaseQueryable<Object>
         table);
   }
 
-  private static <T> T last(T t0, T t1) {
-    return t0 != null ? t0 : t1;
-  }
-
-  public boolean useReferenceInterpreter() {
-    return useReferenceInterpreter;
-  }
-
-  /**
-   * Factory for custom tables in Optiq schema.
-   */
+  /** Factory for custom tables in Optiq schema. */
   @SuppressWarnings("UnusedDeclaration")
   public static class Factory implements TableFactory<DrillTable> {
+
+    private static final List<String> DONUTS_TABLES = Arrays.asList(
+        "DONUTS");
+
+    private static final List<String> HR_TABLES = Arrays.asList(
+        "EMPLOYEES", "DEPARTMENTS");
+
+    private static final List<String> FOODMART_TABLES = Arrays.asList(
+        "ACCOUNT", "CATEGORY", "CURRENCY", "CUSTOMER", "DAYS", "DEPARTMENT",
+        "EMPLOYEE_CLOSURE", "EMPLOYEE", "EXPENSE_FACT", "INVENTORY_FACT_1997",
+        "INVENTORY_FACT_1998", "POSITION", "PRODUCT_CLASS", "PRODUCT",
+        "PROMOTION", "REGION", "RESERVE_EMPLOYEE", "SALARY", "SALES_FACT_1997",
+        "SALES_FACT_1998", "SALES_FACT_DEC_1998", "STORE", "STORE_RAGGED",
+        "TIME_BY_DAY", "WAREHOUSE", "WAREHOUSE_CLASS");
+
+//    public DrillTable create(
+//        JavaTypeFactory typeFactory,
+//        Schema schema,
+//        String name,
+//        Map<String, Object> operand, 
+//        RelDataType rowType) {
+//      final ClasspathRSE.ClasspathRSEConfig rseConfig = new ClasspathRSE.ClasspathRSEConfig();
+//      final ClasspathInputConfig inputConfig = new ClasspathInputConfig();
+//      assert DONUTS_TABLES.contains(name)
+//          || HR_TABLES.contains(name)
+//          || FOODMART_TABLES.contains(name)
+//          : name;
+//      inputConfig.path = "/" + name.toLowerCase() + ".json";
+//      inputConfig.type = DataWriter.ConverterType.JSON;
+//      boolean useReferenceInterpreter;
+//      if (operand.get("useReferenceInterpreter") != null){
+//        useReferenceInterpreter = operand.get("useReferenceInterpreter").equals("true") ? true : false;
+//      }
+//      else{
+//        useReferenceInterpreter = false;
+//      }      
+//      return createTable(typeFactory, (MutableSchema) schema, name, rseConfig,
+//          inputConfig, "donuts-json", useReferenceInterpreter);
+//    }
+//
     @Override
-    public DrillTable create(Schema schema, String name,
-                             Map<String, Object> operand, RelDataType rowType) {
-      final ClasspathRSE.ClasspathRSEConfig rseConfig =
-          new ClasspathRSE.ClasspathRSEConfig();
+    public DrillTable create(Schema schema, String name, Map<String, Object> operand, RelDataType rowType) {
+      
+      final ClasspathRSE.ClasspathRSEConfig rseConfig = new ClasspathRSE.ClasspathRSEConfig();
       final ClasspathInputConfig inputConfig = new ClasspathInputConfig();
-      inputConfig.path = last((String) operand.get("path"), "/donuts.json");
+      assert DONUTS_TABLES.contains(name)
+          || HR_TABLES.contains(name)
+          || FOODMART_TABLES.contains(name)
+          : name;
+      inputConfig.path = "/" + name.toLowerCase() + ".json";
+      inputConfig.type = DataWriter.ConverterType.JSON;
       boolean useReferenceInterpreter;
       if (operand.get("useReferenceInterpreter") != null){
         useReferenceInterpreter = operand.get("useReferenceInterpreter").equals("true") ? true : false;
       }
       else{
         useReferenceInterpreter = false;
-      }
-      inputConfig.type = DataWriter.ConverterType.JSON;
-      return createTable(schema.getTypeFactory(), (MutableSchema) schema, name,
-          rseConfig, inputConfig, "donuts-json", useReferenceInterpreter);
+      }      
+      return createTable(schema.getTypeFactory(), (MutableSchema) schema, name, rseConfig,
+          inputConfig, "donuts-json", useReferenceInterpreter);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/Driver.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/Driver.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/Driver.java
index 33a7294..dba2162 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/Driver.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/Driver.java
@@ -64,6 +64,7 @@ public class Driver extends UnregisteredDriver {
     {
       super.onConnectionInit(connection);
 
+      
       final String model = connection.getProperties().getProperty("model");
       if (model != null) {
         try {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillAggregateRel.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillAggregateRel.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillAggregateRel.java
new file mode 100644
index 0000000..6670933
--- /dev/null
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillAggregateRel.java
@@ -0,0 +1,119 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.optiq;
+
+import java.util.BitSet;
+import java.util.List;
+
+import net.hydromatic.linq4j.Ord;
+
+import org.eigenbase.rel.AggregateCall;
+import org.eigenbase.rel.AggregateRelBase;
+import org.eigenbase.rel.InvalidRelException;
+import org.eigenbase.rel.RelNode;
+import org.eigenbase.relopt.RelOptCluster;
+import org.eigenbase.relopt.RelTraitSet;
+import org.eigenbase.util.Util;
+
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+/**
+ * Aggregation implemented in Drill.
+ */
+public class DrillAggregateRel extends AggregateRelBase implements DrillRel {
+  /** Creates a DrillAggregateRel. */
+  public DrillAggregateRel(RelOptCluster cluster, RelTraitSet traits, RelNode child, BitSet groupSet,
+      List<AggregateCall> aggCalls) throws InvalidRelException {
+    super(cluster, traits, child, groupSet, aggCalls);
+    for (AggregateCall aggCall : aggCalls) {
+      if (aggCall.isDistinct()) {
+        throw new InvalidRelException("DrillAggregateRel does not support DISTINCT aggregates");
+      }
+    }
+  }
+
+  @Override
+  public RelNode copy(RelTraitSet traitSet, List<RelNode> inputs) {
+    try {
+      return new DrillAggregateRel(getCluster(), traitSet, sole(inputs), getGroupSet(), aggCalls);
+    } catch (InvalidRelException e) {
+      throw new AssertionError(e);
+    }
+  }
+
+  @Override
+  public int implement(DrillImplementor implementor) {
+    int inputId = implementor.visitChild(this, 0, getChild());
+    final List<String> childFields = getChild().getRowType().getFieldNames();
+    final List<String> fields = getRowType().getFieldNames();
+    /*
+     * E.g. { op: "segment", ref: "segment", exprs: ["deptId"] }, { op: "collapsingaggregate", within: "segment",
+     * carryovers: ["deptId"], aggregations: [ {ref: "c", expr: "count(1)"} ] }
+     */
+    final ObjectNode segment = implementor.mapper.createObjectNode();
+    segment.put("op", "segment");
+    segment.put("input", inputId);
+    // TODO: choose different name for field if there is already a field
+    // called "segment"
+    segment.put("ref", "segment");
+    final ArrayNode exprs = implementor.mapper.createArrayNode();
+    segment.put("exprs", exprs);
+    for (int group : Util.toIter(groupSet)) {
+      exprs.add(childFields.get(group));
+    }
+
+    final int segmentId = implementor.add(segment);
+
+    final ObjectNode aggregate = implementor.mapper.createObjectNode();
+    aggregate.put("op", "collapsingaggregate");
+    aggregate.put("input", segmentId);
+    aggregate.put("within", "segment");
+    final ArrayNode carryovers = implementor.mapper.createArrayNode();
+    aggregate.put("carryovers", carryovers);
+    for (int group : Util.toIter(groupSet)) {
+      carryovers.add(childFields.get(group));
+    }
+    final ArrayNode aggregations = implementor.mapper.createArrayNode();
+    aggregate.put("aggregations", aggregations);
+    for (Ord<AggregateCall> aggCall : Ord.zip(aggCalls)) {
+      final ObjectNode aggregation = implementor.mapper.createObjectNode();
+      aggregation.put("ref", fields.get(groupSet.cardinality() + aggCall.i));
+      aggregation.put("expr", toDrill(aggCall.e, childFields));
+      aggregations.add(aggregation);
+    }
+
+    return implementor.add(aggregate);
+  }
+
+  private String toDrill(AggregateCall call, List<String> fn) {
+    final StringBuilder buf = new StringBuilder();
+    buf.append(call.getAggregation().getName().toLowerCase()).append("(");
+    for (Ord<Integer> arg : Ord.zip(call.getArgList())) {
+      if (arg.i > 0) {
+        buf.append(", ");
+      }
+      buf.append(fn.get(arg.e));
+    }
+    if (call.getArgList().isEmpty()) {
+      buf.append("1"); // dummy arg to implement COUNT(*)
+    }
+    buf.append(")");
+    return buf.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillAggregateRule.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillAggregateRule.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillAggregateRule.java
new file mode 100644
index 0000000..3267917
--- /dev/null
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillAggregateRule.java
@@ -0,0 +1,53 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.optiq;
+
+import org.eigenbase.rel.AggregateRel;
+import org.eigenbase.rel.InvalidRelException;
+import org.eigenbase.rel.RelNode;
+import org.eigenbase.relopt.*;
+import org.eigenbase.trace.EigenbaseTrace;
+
+import java.util.logging.Logger;
+
+/**
+ * Rule that converts an {@link AggregateRel} to a {@link DrillAggregateRel}, implemented by a Drill "segment" operation
+ * followed by a "collapseaggregate" operation.
+ */
+public class DrillAggregateRule extends RelOptRule {
+  public static final RelOptRule INSTANCE = new DrillAggregateRule();
+  protected static final Logger tracer = EigenbaseTrace.getPlannerTracer();
+
+  private DrillAggregateRule() {
+    super(RelOptRule.some(AggregateRel.class, Convention.NONE, RelOptRule.any(RelNode.class)), "DrillAggregateRule");
+  }
+
+  @Override
+  public void onMatch(RelOptRuleCall call) {
+    final AggregateRel aggregate = (AggregateRel) call.rel(0);
+    final RelNode input = call.rel(1);
+    final RelTraitSet traits = aggregate.getTraitSet().plus(DrillRel.CONVENTION);
+    final RelNode convertedInput = convert(input, traits);
+    try {
+      call.transformTo(new DrillAggregateRel(aggregate.getCluster(), traits, convertedInput, aggregate.getGroupSet(),
+          aggregate.getAggCallList()));
+    } catch (InvalidRelException e) {
+      tracer.warning(e.toString());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillFilterRel.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillFilterRel.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillFilterRel.java
index b29bcb5..cd6b1c9 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillFilterRel.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillFilterRel.java
@@ -30,16 +30,14 @@ import java.util.List;
  * Filter implemented in Drill.
  */
 public class DrillFilterRel extends FilterRelBase implements DrillRel {
-  protected DrillFilterRel(RelOptCluster cluster, RelTraitSet traits,
-      RelNode child, RexNode condition) {
+  protected DrillFilterRel(RelOptCluster cluster, RelTraitSet traits, RelNode child, RexNode condition) {
     super(cluster, traits, child, condition);
     assert getConvention() == CONVENTION;
   }
 
   @Override
   public RelNode copy(RelTraitSet traitSet, List<RelNode> inputs) {
-    return new DrillFilterRel(getCluster(), traitSet, sole(inputs),
-        getCondition());
+    return new DrillFilterRel(getCluster(), traitSet, sole(inputs), getCondition());
   }
 
   @Override
@@ -48,19 +46,15 @@ public class DrillFilterRel extends FilterRelBase implements DrillRel {
   }
 
   @Override
-  public void implement(DrillImplementor implementor) {
-    implementor.visitChild(this, 0, getChild());
-    final ObjectNode node = implementor.mapper.createObjectNode();
-/*
-      E.g. {
-	      op: "filter",
-	      expr: "donuts.ppu < 1.00"
-	    }
-*/
-    node.put("op", "filter");
-    node.put("expr", DrillOptiq.toDrill(getChild(), getCondition()));
-    implementor.add(node);
+  public int implement(DrillImplementor implementor) {
+    final int inputId = implementor.visitChild(this, 0, getChild());
+    final ObjectNode filter = implementor.mapper.createObjectNode();
+    /*
+     * E.g. { op: "filter", expr: "donuts.ppu < 1.00" }
+     */
+    filter.put("op", "filter");
+    filter.put("input", inputId);
+    filter.put("expr", DrillOptiq.toDrill(getChild(), getCondition()));
+    return implementor.add(filter);
   }
 }
-
-// End DrillFilterRel.java

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillFilterRule.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillFilterRule.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillFilterRule.java
index 8687304..4fdbed2 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillFilterRule.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillFilterRule.java
@@ -22,19 +22,13 @@ import org.eigenbase.rel.RelNode;
 import org.eigenbase.relopt.*;
 
 /**
- * Rule that converts a {@link org.eigenbase.rel.FilterRel} to a Drill
- * "filter" operation.
+ * Rule that converts a {@link org.eigenbase.rel.FilterRel} to a Drill "filter" operation.
  */
 public class DrillFilterRule extends RelOptRule {
   public static final RelOptRule INSTANCE = new DrillFilterRule();
 
   private DrillFilterRule() {
-    super(
-        new RelOptRuleOperand(
-            FilterRel.class,
-            Convention.NONE,
-            new RelOptRuleOperand(RelNode.class, ANY)),
-        "DrillFilterRule");
+    super(RelOptRule.some(FilterRel.class, Convention.NONE, RelOptRule.any(RelNode.class)), "DrillFilterRule");
   }
 
   @Override
@@ -43,10 +37,6 @@ public class DrillFilterRule extends RelOptRule {
     final RelNode input = call.getRels()[1];
     final RelTraitSet traits = filter.getTraitSet().plus(DrillRel.CONVENTION);
     final RelNode convertedInput = convert(input, traits);
-    call.transformTo(
-        new DrillFilterRel(filter.getCluster(), traits, convertedInput,
-            filter.getCondition()));
+    call.transformTo(new DrillFilterRel(filter.getCluster(), traits, convertedInput, filter.getCondition()));
   }
 }
-
-// End DrillFilterRule.java

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillImplementor.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillImplementor.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillImplementor.java
index cbeed5f..57091c8 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillImplementor.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillImplementor.java
@@ -28,8 +28,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
 import org.eigenbase.rel.RelNode;
 
 /**
- * Context for converting a tree of {@link DrillRel} nodes into a Drill logical
- * plan.
+ * Context for converting a tree of {@link DrillRel} nodes into a Drill logical plan.
  */
 public class DrillImplementor {
   final ObjectMapper mapper = new ObjectMapper();
@@ -49,14 +48,14 @@ public class DrillImplementor {
 
     final ObjectNode generatorNode = mapper.createObjectNode();
     headNode.put("generator", generatorNode);
-    generatorNode.put("type", "manual");
+    generatorNode.put("type", "optiq");
     generatorNode.put("info", "na");
 
     // TODO: populate sources based on the sources of scans that occur in
     // the query
     final ObjectNode sourcesNode = mapper.createObjectNode();
     rootNode.put("storage", sourcesNode);
-    
+
     // input file source
     {
       final ObjectNode sourceNode = mapper.createObjectNode();
@@ -68,36 +67,33 @@ public class DrillImplementor {
       sourceNode.put("type", "queue");
       sourcesNode.put("queue", sourceNode);
     }
-    
 
-    
     final ArrayNode queryNode = mapper.createArrayNode();
     rootNode.put("query", queryNode);
 
-    final ObjectNode sequenceOpNode = mapper.createObjectNode();
-    queryNode.add(sequenceOpNode);
-    sequenceOpNode.put("op", "sequence");
-
-    this.operatorsNode = mapper.createArrayNode();
-    sequenceOpNode.put("do", operatorsNode);
+    this.operatorsNode = queryNode;
   }
 
-  public void go(DrillRel root) {
-    root.implement(this);
+  public int go(DrillRel root) {
+    int inputId = root.implement(this);
 
     // Add a last node, to write to the output queue.
     final ObjectNode writeOp = mapper.createObjectNode();
     writeOp.put("op", "store");
+    writeOp.put("input", inputId);
     writeOp.put("storageengine", "queue");
     writeOp.put("memo", "output sink");
     QueueOutputInfo output = new QueueOutputInfo();
     output.number = 0;
     writeOp.put("target", mapper.convertValue(output, JsonNode.class));
-    add(writeOp);
+    return add(writeOp);
   }
 
-  public void add(ObjectNode operator) {
+  public int add(ObjectNode operator) {
     operatorsNode.add(operator);
+    final int id = operatorsNode.size();
+    operator.put("@id", id);
+    return id;
   }
 
   /** Returns the generated plan. */
@@ -107,9 +103,8 @@ public class DrillImplementor {
     return s;
   }
 
-  public void visitChild(DrillRel parent, int ordinal, RelNode child) {
+  public int visitChild(DrillRel parent, int ordinal, RelNode child) {
     ((DrillRel) child).implement(this);
+    return operatorsNode.size();
   }
 }
-
-// End DrillImplementor.java

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillJoinRel.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillJoinRel.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillJoinRel.java
new file mode 100644
index 0000000..79e2da9
--- /dev/null
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillJoinRel.java
@@ -0,0 +1,158 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.optiq;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+
+import org.eigenbase.rel.InvalidRelException;
+import org.eigenbase.rel.JoinRelBase;
+import org.eigenbase.rel.JoinRelType;
+import org.eigenbase.rel.RelNode;
+import org.eigenbase.relopt.RelOptCluster;
+import org.eigenbase.relopt.RelOptUtil;
+import org.eigenbase.relopt.RelTraitSet;
+import org.eigenbase.reltype.RelDataType;
+import org.eigenbase.rex.RexNode;
+import org.eigenbase.util.Pair;
+
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+/**
+ * Join implemented in Drill.
+ */
+public class DrillJoinRel extends JoinRelBase implements DrillRel {
+  private final List<Integer> leftKeys = new ArrayList<>();
+  private final List<Integer> rightKeys = new ArrayList<>();
+
+  /** Creates a DrillJoinRel. */
+  public DrillJoinRel(RelOptCluster cluster, RelTraitSet traits, RelNode left, RelNode right, RexNode condition,
+      JoinRelType joinType) throws InvalidRelException {
+    super(cluster, traits, left, right, condition, joinType, Collections.<String> emptySet());
+    switch (joinType) {
+    case RIGHT:
+      throw new InvalidRelException("DrillJoinRel does not support RIGHT join");
+    }
+    RexNode remaining = RelOptUtil.splitJoinCondition(left, right, condition, leftKeys, rightKeys);
+    if (!remaining.isAlwaysTrue()) {
+      throw new InvalidRelException("DrillJoinRel only supports equi-join");
+    }
+  }
+
+  @Override
+  public DrillJoinRel copy(RelTraitSet traitSet, RexNode condition, RelNode left, RelNode right) {
+    try {
+      return new DrillJoinRel(getCluster(), traitSet, left, right, condition, joinType);
+    } catch (InvalidRelException e) {
+      throw new AssertionError(e);
+    }
+  }
+
+  @Override
+  public int implement(DrillImplementor implementor) {
+    final List<String> fields = getRowType().getFieldNames();
+    assert isUnique(fields);
+    final int leftCount = left.getRowType().getFieldCount();
+    final List<String> leftFields = fields.subList(0, leftCount);
+    final List<String> rightFields = fields.subList(leftCount, fields.size());
+
+    final int leftId = implementInput(implementor, 0, 0, left);
+    final int rightId = implementInput(implementor, 1, leftCount, right);
+
+    /*
+     * E.g. { op: "join", left: 2, right: 4, conditions: [ {relationship: "==", left: "deptId", right: "deptId"} ] }
+     */
+    final ObjectNode join = implementor.mapper.createObjectNode();
+    join.put("op", "join");
+    join.put("left", leftId);
+    join.put("right", rightId);
+    join.put("type", toDrill(joinType));
+    final ArrayNode conditions = implementor.mapper.createArrayNode();
+    join.put("conditions", conditions);
+    for (Pair<Integer, Integer> pair : Pair.zip(leftKeys, rightKeys)) {
+      final ObjectNode condition = implementor.mapper.createObjectNode();
+      condition.put("relationship", "==");
+      condition.put("left", leftFields.get(pair.left));
+      condition.put("right", rightFields.get(pair.left));
+      conditions.add(condition);
+    }
+    return implementor.add(join);
+  }
+
+  private int implementInput(DrillImplementor implementor, int i, int offset, RelNode input) {
+    final int inputId = implementor.visitChild(this, i, input);
+    assert uniqueFieldNames(input.getRowType());
+    final List<String> fields = getRowType().getFieldNames();
+    final List<String> inputFields = input.getRowType().getFieldNames();
+    final List<String> outputFields = fields.subList(offset, offset + inputFields.size());
+    if (!outputFields.equals(inputFields)) {
+      // Ensure that input field names are the same as output field names.
+      // If there are duplicate field names on left and right, fields will get
+      // lost.
+      return rename(implementor, inputId, inputFields, outputFields);
+    } else {
+      return inputId;
+    }
+  }
+
+  private int rename(DrillImplementor implementor, int inputId, List<String> inputFields, List<String> outputFields) {
+    final ObjectNode project = implementor.mapper.createObjectNode();
+    project.put("op", "project");
+    project.put("input", inputId);
+    final ArrayNode transforms = implementor.mapper.createArrayNode();
+    project.put("projections", transforms);
+    for (Pair<String, String> pair : Pair.zip(inputFields, outputFields)) {
+      final ObjectNode objectNode = implementor.mapper.createObjectNode();
+      transforms.add(objectNode);
+      objectNode.put("expr", pair.left);
+      objectNode.put("ref", "output." + pair.right);
+    }
+    return implementor.add(project);
+  }
+
+  /**
+   * Returns whether there are any elements in common between left and right.
+   */
+  private static <T> boolean intersects(List<T> left, List<T> right) {
+    return new HashSet<>(left).removeAll(right);
+  }
+
+  private boolean uniqueFieldNames(RelDataType rowType) {
+    return isUnique(rowType.getFieldNames());
+  }
+
+  private static <T> boolean isUnique(List<T> list) {
+    return new HashSet<>(list).size() == list.size();
+  }
+
+  private static String toDrill(JoinRelType joinType) {
+    switch (joinType) {
+    case LEFT:
+      return "left";
+    case INNER:
+      return "inner";
+    case FULL:
+      return "outer";
+    default:
+      throw new AssertionError(joinType);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillJoinRule.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillJoinRule.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillJoinRule.java
new file mode 100644
index 0000000..ad858e2
--- /dev/null
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillJoinRule.java
@@ -0,0 +1,57 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.optiq;
+
+import org.eigenbase.rel.InvalidRelException;
+import org.eigenbase.rel.JoinRel;
+import org.eigenbase.rel.RelNode;
+import org.eigenbase.relopt.*;
+import org.eigenbase.trace.EigenbaseTrace;
+
+import java.util.logging.Logger;
+
+/**
+ * Rule that converts a {@link JoinRel} to a {@link DrillJoinRel}, which is implemented by Drill "join" operation.
+ */
+public class DrillJoinRule extends RelOptRule {
+  public static final RelOptRule INSTANCE = new DrillJoinRule();
+  protected static final Logger tracer = EigenbaseTrace.getPlannerTracer();
+
+  private DrillJoinRule() {
+    super(
+        RelOptRule.some(JoinRel.class, Convention.NONE, RelOptRule.any(RelNode.class), RelOptRule.any(RelNode.class)),
+        "DrillJoinRule");
+  }
+
+  @Override
+  public void onMatch(RelOptRuleCall call) {
+    final JoinRel join = (JoinRel) call.rel(0);
+    final RelNode left = call.rel(1);
+    final RelNode right = call.rel(2);
+    final RelTraitSet traits = join.getTraitSet().plus(DrillRel.CONVENTION);
+
+    final RelNode convertedLeft = convert(left, traits);
+    final RelNode convertedRight = convert(right, traits);
+    try {
+      call.transformTo(new DrillJoinRel(join.getCluster(), traits, convertedLeft, convertedRight, join.getCondition(),
+          join.getJoinType()));
+    } catch (InvalidRelException e) {
+      tracer.warning(e.toString());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillOptiq.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillOptiq.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillOptiq.java
index 73391b3..1a66c0c 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillOptiq.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillOptiq.java
@@ -17,6 +17,7 @@
  ******************************************************************************/
 package org.apache.drill.optiq;
 
+import net.hydromatic.linq4j.Ord;
 import org.eigenbase.rel.RelNode;
 import org.eigenbase.relopt.RelOptPlanner;
 import org.eigenbase.reltype.RelDataTypeField;
@@ -30,25 +31,25 @@ import org.eigenbase.sql.fun.SqlStdOperatorTable;
 public class DrillOptiq {
   static void registerStandardPlannerRules(RelOptPlanner planner) {
     planner.addRule(EnumerableDrillRule.ARRAY_INSTANCE);
-    planner.addRule(EnumerableDrillRule.CUSTOM_INSTANCE);
 
-//    planner.addRule(DrillTableModificationConverterRule.INSTANCE);
-//    planner.addRule(DrillAggregateConverterRule.INSTANCE);
-//    planner.addRule(DrillCalcConverterRule.INSTANCE);
+    // planner.addRule(DrillTableModificationConverterRule.INSTANCE);
+    // planner.addRule(DrillCalcConverterRule.INSTANCE);
 
     planner.addRule(DrillFilterRule.INSTANCE);
     planner.addRule(DrillProjectRule.INSTANCE);
+    planner.addRule(DrillAggregateRule.INSTANCE);
 
     // Enable when https://issues.apache.org/jira/browse/DRILL-57 fixed
     if (false) planner.addRule(DrillValuesRule.INSTANCE);
-//    planner.addRule(DrillSortRule.INSTANCE);
-//    planner.addRule(DrillJoinRule.INSTANCE);
-//    planner.addRule(DrillUnionRule.INSTANCE);
-//    planner.addRule(AbstractConverter.ExpandConversionRule.instance);
+    planner.addRule(DrillSortRule.INSTANCE);
+    planner.addRule(DrillJoinRule.INSTANCE);
+    planner.addRule(DrillUnionRule.INSTANCE);
+    // planner.addRule(AbstractConverter.ExpandConversionRule.instance);
   }
 
-  /** Converts a tree of {@link RexNode} operators into a scalar expression in
-   * Drill syntax. */
+  /**
+   * Converts a tree of {@link RexNode} operators into a scalar expression in Drill syntax.
+   */
   static String toDrill(RelNode input, RexNode expr) {
     final RexToDrill visitor = new RexToDrill(input);
     expr.accept(visitor);
@@ -70,21 +71,24 @@ public class DrillOptiq {
       switch (syntax) {
       case Binary:
         buf.append("(");
-        call.getOperandList().get(0).accept(this)
-            .append(" ")
-            .append(call.getOperator().getName())
-            .append(" ");
-        return call.getOperandList().get(1).accept(this)
-            .append(")");
+        call.getOperands().get(0).accept(this).append(" ").append(call.getOperator().getName()).append(" ");
+        return call.getOperands().get(1).accept(this).append(")");
+      case Function:
+        buf.append(call.getOperator().getName().toLowerCase()).append("(");
+        for (Ord<RexNode> operand : Ord.zip(call.getOperands())) {
+          buf.append(operand.i > 0 ? ", " : "");
+          operand.e.accept(this);
+        }
+        return buf.append(")");
       case Special:
         switch (call.getKind()) {
         case Cast:
           // Ignore casts. Drill is type-less.
-          return call.getOperandList().get(0).accept(this);
+          return call.getOperands().get(0).accept(this);
         }
         if (call.getOperator() == SqlStdOperatorTable.itemOp) {
-          final RexNode left = call.getOperandList().get(0);
-          final RexLiteral literal = (RexLiteral) call.getOperandList().get(1);
+          final RexNode left = call.getOperands().get(0);
+          final RexLiteral literal = (RexLiteral) call.getOperands().get(1);
           final String field = (String) literal.getValue2();
           final int length = buf.length();
           left.accept(this);
@@ -96,16 +100,14 @@ public class DrillOptiq {
         }
         // fall through
       default:
-        throw new AssertionError("todo: implement syntax " + syntax + "(" + call
-            + ")");
+        throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")");
       }
     }
 
     @Override
     public StringBuilder visitInputRef(RexInputRef inputRef) {
       final int index = inputRef.getIndex();
-      final RelDataTypeField field =
-          input.getRowType().getFieldList().get(index);
+      final RelDataTypeField field = input.getRowType().getFieldList().get(index);
       return buf.append(field.getName());
     }
 
@@ -115,5 +117,3 @@ public class DrillOptiq {
     }
   }
 }
-
-// End DrillOptiq.java

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillPrepareImpl.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillPrepareImpl.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillPrepareImpl.java
index 0b1136a..f5dce0a 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillPrepareImpl.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillPrepareImpl.java
@@ -33,7 +33,6 @@ public class DrillPrepareImpl extends OptiqPrepareImpl {
   protected RelOptPlanner createPlanner() {
     final RelOptPlanner planner = super.createPlanner();
     planner.addRule(EnumerableDrillRule.ARRAY_INSTANCE);
-    planner.addRule(EnumerableDrillRule.CUSTOM_INSTANCE);
 
     // Enable when https://issues.apache.org/jira/browse/DRILL-57 fixed
     if (false) {
@@ -43,5 +42,3 @@ public class DrillPrepareImpl extends OptiqPrepareImpl {
     return planner;
   }
 }
-
-// End DrillPrepareImpl.java

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillProjectRel.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillProjectRel.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillProjectRel.java
index e3a88a1..e2f1c28 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillProjectRel.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillProjectRel.java
@@ -23,9 +23,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
 import org.eigenbase.rel.*;
 import org.eigenbase.relopt.*;
 import org.eigenbase.reltype.RelDataType;
-import org.eigenbase.reltype.RelDataTypeField;
 import org.eigenbase.rex.RexNode;
-import org.eigenbase.sql.type.SqlTypeName;
 import org.eigenbase.util.Pair;
 
 import java.util.*;
@@ -34,17 +32,21 @@ import java.util.*;
  * Project implemented in Drill.
  */
 public class DrillProjectRel extends ProjectRelBase implements DrillRel {
-  protected DrillProjectRel(RelOptCluster cluster, RelTraitSet traits,
-      RelNode child, RexNode[] exps, RelDataType rowType) {
-    super(cluster, traits, child, exps, rowType, Flags.Boxed,
-        Collections.<RelCollation>emptyList());
+  protected DrillProjectRel(RelOptCluster cluster, RelTraitSet traits, RelNode child, List<RexNode> exps,
+      RelDataType rowType) {
+    super(cluster, traits, child, exps, rowType, Flags.Boxed, Collections.<RelCollation> emptyList());
     assert getConvention() == CONVENTION;
   }
 
+  public DrillProjectRel(RelOptCluster cluster, RelTraitSet traits, RelNode child, List<RexNode> exps,
+      RelDataType rowType, int flags, List<RelCollation> collationList) {
+    super(cluster, traits, child, exps, rowType, flags, collationList);
+
+  }
+
   @Override
   public RelNode copy(RelTraitSet traitSet, List<RelNode> inputs) {
-    return new DrillProjectRel(getCluster(), traitSet, sole(inputs),
-        exps.clone(), rowType);
+    return new DrillProjectRel(getCluster(), traitSet, sole(inputs), new ArrayList<RexNode>(exps), rowType);
   }
 
   @Override
@@ -53,36 +55,28 @@ public class DrillProjectRel extends ProjectRelBase implements DrillRel {
   }
 
   private List<Pair<RexNode, String>> projects() {
-    return Pair.zip(
-        Arrays.asList(exps),
-        RelOptUtil.getFieldNameList(getRowType()));
+    return Pair.zip(exps, getRowType().getFieldNames());
   }
 
   @Override
-  public void implement(DrillImplementor implementor) {
-    implementor.visitChild(this, 0, getChild());
-    final ObjectNode node = implementor.mapper.createObjectNode();
-/*
-    E.g. {
-      op: "project",
-	    projections: [
-	      { ref: "output.quantity", expr: "donuts.sales"}
-	    ]
-*/
-    node.put("op", "project");
+  public int implement(DrillImplementor implementor) {
+    int inputId = implementor.visitChild(this, 0, getChild());
+    final ObjectNode project = implementor.mapper.createObjectNode();
+    /*
+     * E.g. { op: "project", projections: [ { ref: "output.quantity", expr: "donuts.sales"} ]
+     */
+    project.put("op", "project");
+    project.put("input", inputId);
     final ArrayNode transforms = implementor.mapper.createArrayNode();
-    node.put("projections", transforms);
-    final String prefix = "output.";
+    project.put("projections", transforms);
     for (Pair<RexNode, String> pair : projects()) {
       final ObjectNode objectNode = implementor.mapper.createObjectNode();
       transforms.add(objectNode);
       String expr = DrillOptiq.toDrill(getChild(), pair.left);
       objectNode.put("expr", expr);
-      String ref = prefix + pair.right;
+      String ref = "output." + pair.right;
       objectNode.put("ref", ref);
     }
-    implementor.add(node);
+    return implementor.add(project);
   }
 }
-
-// End DrillProjectRel.java

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillProjectRule.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillProjectRule.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillProjectRule.java
index c0e983e..b294ad0 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillProjectRule.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillProjectRule.java
@@ -17,35 +17,30 @@
  ******************************************************************************/
 package org.apache.drill.optiq;
 
-import org.eigenbase.rel.*;
-import org.eigenbase.relopt.*;
+import org.eigenbase.rel.ProjectRel;
+import org.eigenbase.rel.RelNode;
+import org.eigenbase.relopt.Convention;
+import org.eigenbase.relopt.RelOptRule;
+import org.eigenbase.relopt.RelOptRuleCall;
+import org.eigenbase.relopt.RelTraitSet;
 
 /**
- * Rule that converts a {@link org.eigenbase.rel.ProjectRel} to a Drill
- * "project" operation.
+ * Rule that converts a {@link org.eigenbase.rel.ProjectRel} to a Drill "project" operation.
  */
 public class DrillProjectRule extends RelOptRule {
   public static final RelOptRule INSTANCE = new DrillProjectRule();
 
   private DrillProjectRule() {
-    super(
-        new RelOptRuleOperand(
-            ProjectRel.class,
-            Convention.NONE,
-            new RelOptRuleOperand(RelNode.class, ANY)),
-        "DrillProjectRule");
+    super(RelOptRule.some(ProjectRel.class, Convention.NONE, RelOptRule.any(RelNode.class)), "DrillProjectRule");
   }
 
   @Override
   public void onMatch(RelOptRuleCall call) {
-    final ProjectRel project = (ProjectRel) call.getRels()[0];
-    final RelNode input = call.getRels()[1];
+    final ProjectRel project = (ProjectRel) call.rel(0);
+    final RelNode input = call.rel(1);
     final RelTraitSet traits = project.getTraitSet().plus(DrillRel.CONVENTION);
     final RelNode convertedInput = convert(input, traits);
-    call.transformTo(
-        new DrillProjectRel(project.getCluster(), traits, convertedInput,
-            project.getProjectExps(), project.getRowType()));
+    call.transformTo(new DrillProjectRel(project.getCluster(), traits, convertedInput, project.getProjects(), project
+        .getRowType()));
   }
 }
-
-// End DrillProjectRule.java

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillRel.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillRel.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillRel.java
index b6dae18..aa6d3e0 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillRel.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillRel.java
@@ -28,7 +28,5 @@ public interface DrillRel extends RelNode {
    * generating Drill logical plans. */
   Convention CONVENTION = new Convention.Impl("DRILL", DrillRel.class);
 
-  void implement(DrillImplementor implementor);
+  int implement(DrillImplementor implementor);
 }
-
-// End DrillRel.java

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillScan.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillScan.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillScan.java
index 11897d5..ab622b1 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillScan.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillScan.java
@@ -17,10 +17,7 @@ public class DrillScan extends TableAccessRelBase implements DrillRel {
   private final DrillTable drillTable;
 
   /** Creates a DrillScan. */
-  public DrillScan(RelOptCluster cluster,
-      RelTraitSet traits,
-      RelOptTable table)
-  {
+  public DrillScan(RelOptCluster cluster, RelTraitSet traits, RelOptTable table) {
     super(cluster, traits, table);
     assert getConvention() == CONVENTION;
     this.drillTable = table.unwrap(DrillTable.class);
@@ -33,15 +30,13 @@ public class DrillScan extends TableAccessRelBase implements DrillRel {
     DrillOptiq.registerStandardPlannerRules(planner);
   }
 
-  public void implement(DrillImplementor implementor) {
+  public int implement(DrillImplementor implementor) {
     final ObjectNode node = implementor.mapper.createObjectNode();
     node.put("op", "scan");
     node.put("memo", "initial_scan");
     node.put("ref", "_MAP"); // output is a record with a single field, '_MAP'
     node.put("storageengine", drillTable.getStorageEngineName());
     node.put("selection", implementor.mapper.convertValue(drillTable.selection, JsonNode.class));
-    implementor.add(node);
+    return implementor.add(node);
   }
 }
-
-// End DrillScan.java

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillSortRel.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillSortRel.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillSortRel.java
new file mode 100644
index 0000000..64995c5
--- /dev/null
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillSortRel.java
@@ -0,0 +1,88 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.optiq;
+
+import java.util.List;
+
+import net.hydromatic.linq4j.Ord;
+
+import org.eigenbase.rel.RelCollation;
+import org.eigenbase.rel.RelFieldCollation;
+import org.eigenbase.rel.RelNode;
+import org.eigenbase.rel.SortRel;
+import org.eigenbase.relopt.RelOptCluster;
+import org.eigenbase.relopt.RelTraitSet;
+
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+/**
+ * Sort implemented in Drill.
+ */
+public class DrillSortRel extends SortRel implements DrillRel {
+  /** Creates a DrillSortRel. */
+  public DrillSortRel(RelOptCluster cluster, RelTraitSet traits, RelNode input, RelCollation collation) {
+    super(cluster, traits, input, collation);
+  }
+
+  @Override
+  public DrillSortRel copy(RelTraitSet traitSet, RelNode input, RelCollation collation) {
+    return new DrillSortRel(getCluster(), traitSet, input, collation);
+  }
+
+  @Override
+  public int implement(DrillImplementor implementor) {
+    int inputId = implementor.visitChild(this, 0, getChild());
+    final List<String> childFields = getChild().getRowType().getFieldNames();
+    /*
+     * E.g. { op: "order", input: 4, ordering: [ {order: "asc", expr: "deptId"} ] }
+     */
+    final ObjectNode order = implementor.mapper.createObjectNode();
+    order.put("op", "order");
+    order.put("input", inputId);
+    final ArrayNode orderings = implementor.mapper.createArrayNode();
+    order.put("orderings", orderings);
+    for (Ord<RelFieldCollation> fieldCollation : Ord.zip(this.collation.getFieldCollations())) {
+      final ObjectNode ordering = implementor.mapper.createObjectNode();
+      ordering.put("order", toDrill(fieldCollation.e));
+      ordering.put("expr", childFields.get(fieldCollation.e.getFieldIndex()));
+      switch (fieldCollation.e.nullDirection) {
+      case FIRST:
+        ordering.put("nullCollation", "first");
+        break;
+      default:
+        ordering.put("nullCollation", "last");
+        break;
+      }
+      orderings.add(ordering);
+    }
+
+    return implementor.add(order);
+  }
+
+  private static String toDrill(RelFieldCollation collation) {
+    switch (collation.getDirection()) {
+    case Ascending:
+      return "asc";
+    case Descending:
+      return "desc";
+    default:
+      throw new AssertionError(collation.getDirection());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillSortRule.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillSortRule.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillSortRule.java
new file mode 100644
index 0000000..469d8bb
--- /dev/null
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillSortRule.java
@@ -0,0 +1,42 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.optiq;
+
+import org.eigenbase.rel.SortRel;
+import org.eigenbase.rel.RelNode;
+import org.eigenbase.relopt.*;
+
+/**
+ * Rule that converts an {@link SortRel} to a {@link DrillSortRel}, implemented by a Drill "order" operation.
+ */
+public class DrillSortRule extends RelOptRule {
+  public static final RelOptRule INSTANCE = new DrillSortRule();
+
+  private DrillSortRule() {
+    super(RelOptRule.some(SortRel.class, Convention.NONE, RelOptRule.any(RelNode.class)), "DrillSortRule");
+  }
+
+  @Override
+  public void onMatch(RelOptRuleCall call) {
+    final SortRel sort = (SortRel) call.rel(0);
+    final RelNode input = call.rel(1);
+    final RelTraitSet traits = sort.getTraitSet().plus(DrillRel.CONVENTION);
+    final RelNode convertedInput = convert(input, traits);
+    call.transformTo(new DrillSortRel(sort.getCluster(), traits, convertedInput, sort.getCollation()));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillUnionRel.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillUnionRel.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillUnionRel.java
new file mode 100644
index 0000000..e8b35a6
--- /dev/null
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillUnionRel.java
@@ -0,0 +1,70 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.optiq;
+
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import net.hydromatic.linq4j.Ord;
+import org.eigenbase.rel.UnionRelBase;
+import org.eigenbase.rel.RelNode;
+import org.eigenbase.relopt.RelOptCluster;
+import org.eigenbase.relopt.RelTraitSet;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Union implemented in Drill.
+ */
+public class DrillUnionRel extends UnionRelBase implements DrillRel {
+  /** Creates a DrillUnionRel. */
+  public DrillUnionRel(RelOptCluster cluster, RelTraitSet traits,
+      List<RelNode> inputs, boolean all) {
+    super(cluster, traits, inputs, all);
+  }
+
+  @Override
+  public DrillUnionRel copy(RelTraitSet traitSet, List<RelNode> inputs,
+      boolean all) {
+    return new DrillUnionRel(getCluster(), traitSet, inputs, all);
+  }
+
+  @Override
+  public int implement(DrillImplementor implementor) {
+    List<Integer> inputIds = new ArrayList<>();
+    for (Ord<RelNode> input : Ord.zip(inputs)) {
+      inputIds.add(implementor.visitChild(this, input.i, input.e));
+    }
+/*
+    E.g. {
+      op: "union",
+      distinct: true,
+	    inputs: [2, 4]
+	  }
+*/
+    final ObjectNode union = implementor.mapper.createObjectNode();
+    union.put("op", "union");
+    union.put("distinct", !all);
+    final ArrayNode inputs = implementor.mapper.createArrayNode();
+    union.put("inputs", inputs);
+    for (Integer inputId : inputIds) {
+      inputs.add(inputId);
+    }
+    return implementor.add(union);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillUnionRule.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillUnionRule.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillUnionRule.java
new file mode 100644
index 0000000..f65b276
--- /dev/null
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillUnionRule.java
@@ -0,0 +1,48 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.optiq;
+
+import org.eigenbase.rel.UnionRel;
+import org.eigenbase.rel.RelNode;
+import org.eigenbase.relopt.*;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Rule that converts a {@link UnionRel} to a {@link DrillUnionRel}, implemented by a "union" operation.
+ */
+public class DrillUnionRule extends RelOptRule {
+  public static final RelOptRule INSTANCE = new DrillUnionRule();
+
+  private DrillUnionRule() {
+    super(RelOptRule.any(UnionRel.class, Convention.NONE), "DrillUnionRule");
+  }
+
+  @Override
+  public void onMatch(RelOptRuleCall call) {
+    final UnionRel union = (UnionRel) call.rel(0);
+    final RelTraitSet traits = union.getTraitSet().plus(DrillRel.CONVENTION);
+    final List<RelNode> convertedInputs = new ArrayList<>();
+    for (RelNode input : union.getInputs()) {
+      final RelNode convertedInput = convert(input, traits);
+      convertedInputs.add(convertedInput);
+    }
+    call.transformTo(new DrillUnionRel(union.getCluster(), traits, convertedInputs, union.all));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillValuesRel.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillValuesRel.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillValuesRel.java
index eb9fc72..b8a723a 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillValuesRel.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillValuesRel.java
@@ -29,10 +29,7 @@ import java.util.List;
  * Values implemented in Drill.
  */
 public class DrillValuesRel extends ValuesRelBase implements DrillRel {
-  protected DrillValuesRel(RelOptCluster cluster,
-      RelDataType rowType,
-      List<List<RexLiteral>> tuples,
-      RelTraitSet traits) {
+  protected DrillValuesRel(RelOptCluster cluster, RelDataType rowType, List<List<RexLiteral>> tuples, RelTraitSet traits) {
     super(cluster, rowType, tuples, traits);
     assert getConvention() == CONVENTION;
   }
@@ -49,10 +46,8 @@ public class DrillValuesRel extends ValuesRelBase implements DrillRel {
   }
 
   @Override
-  public void implement(DrillImplementor implementor) {
+  public int implement(DrillImplementor implementor) {
     // Update when https://issues.apache.org/jira/browse/DRILL-57 fixed
     throw new UnsupportedOperationException();
   }
 }
-
-// End DrillValuesRel.java

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillValuesRule.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillValuesRule.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillValuesRule.java
index faa93ee..280dc7f 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillValuesRule.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillValuesRule.java
@@ -21,28 +21,19 @@ import org.eigenbase.rel.ValuesRel;
 import org.eigenbase.relopt.*;
 
 /**
- * Rule that converts a {@link ValuesRel} to a Drill
- * "values" operation.
+ * Rule that converts a {@link ValuesRel} to a Drill "values" operation.
  */
 public class DrillValuesRule extends RelOptRule {
   public static final RelOptRule INSTANCE = new DrillValuesRule();
 
   private DrillValuesRule() {
-    super(
-        new RelOptRuleOperand(
-            ValuesRel.class,
-            Convention.NONE),
-        "DrillValuesRule");
+    super(RelOptRule.any(ValuesRel.class, Convention.NONE), "DrillValuesRule");
   }
 
   @Override
   public void onMatch(RelOptRuleCall call) {
-    final ValuesRel values = (ValuesRel) call.getRels()[0];
+    final ValuesRel values = (ValuesRel) call.rel(0);
     final RelTraitSet traits = values.getTraitSet().plus(DrillRel.CONVENTION);
-    call.transformTo(
-        new DrillValuesRel(values.getCluster(), values.getRowType(),
-            values.getTuples(), traits));
+    call.transformTo(new DrillValuesRel(values.getCluster(), values.getRowType(), values.getTuples(), traits));
   }
 }
-
-// End DrillValuesRule.java

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrill.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrill.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrill.java
new file mode 100644
index 0000000..c522b29
--- /dev/null
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrill.java
@@ -0,0 +1,250 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.optiq;
+
+import java.io.IOException;
+import java.util.*;
+import java.util.concurrent.*;
+
+import net.hydromatic.linq4j.AbstractEnumerable;
+import net.hydromatic.linq4j.Enumerable;
+import net.hydromatic.linq4j.Enumerator;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.logical.LogicalPlan;
+import org.apache.drill.exec.ref.IteratorRegistry;
+import org.apache.drill.exec.ref.ReferenceInterpreter;
+import org.apache.drill.exec.ref.RunOutcome;
+import org.apache.drill.exec.ref.eval.BasicEvaluatorFactory;
+import org.apache.drill.exec.ref.rse.RSERegistry;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+/**
+ * Runtime helper that executes a Drill query and converts it into an {@link Enumerable}.
+ */
+public class EnumerableDrill<E> extends AbstractEnumerable<E> implements Enumerable<E> {
+  private final LogicalPlan plan;
+  final BlockingQueue<Object> queue = new ArrayBlockingQueue<>(100);
+  final DrillConfig config;
+  private final String holder;
+  private final List<String> fields;
+
+  private static final ObjectMapper mapper = createMapper();
+
+  /**
+   * Creates a DrillEnumerable.
+   * 
+   * @param plan
+   *          Logical plan
+   * @param clazz
+   *          Type of elements returned from enumerable
+   * @param fields
+   *          Names of fields, or null to return the whole blob
+   */
+  public EnumerableDrill(DrillConfig config, LogicalPlan plan, Class<E> clazz, List<String> fields) {
+    this.plan = plan;
+    this.config = config;
+    this.holder = null;
+    this.fields = fields;
+    config.setSinkQueues(0, queue);
+  }
+
+  /**
+   * Creates a DrillEnumerable from a plan represented as a string. Each record returned is a {@link JsonNode}.
+   */
+  public static <E> EnumerableDrill<E> of(String plan, final List<String> fieldNames, Class<E> clazz) {
+    DrillConfig config = DrillConfig.create();
+    final LogicalPlan parse = LogicalPlan.parse(config, plan);
+    return new EnumerableDrill<>(config, parse, clazz, fieldNames);
+  }
+
+  /** Runs the plan as a background task. */
+  Future<Collection<RunOutcome>> runPlan(CompletionService<Collection<RunOutcome>> service) {
+    IteratorRegistry ir = new IteratorRegistry();
+    DrillConfig config = DrillConfig.create();
+    config.setSinkQueues(0, queue);
+    final ReferenceInterpreter i = new ReferenceInterpreter(plan, ir, new BasicEvaluatorFactory(ir), new RSERegistry(
+        config));
+    try {
+      i.setup();
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+    return service.submit(new Callable<Collection<RunOutcome>>() {
+      @Override
+      public Collection<RunOutcome> call() throws Exception {
+        Collection<RunOutcome> outcomes = i.run();
+
+        for (RunOutcome outcome : outcomes) {
+          System.out.println("============");
+          System.out.println(outcome);
+          if (outcome.outcome == RunOutcome.OutcomeType.FAILED && outcome.exception != null) {
+            outcome.exception.printStackTrace();
+          }
+        }
+        return outcomes;
+      }
+    });
+  }
+
+  @Override
+  public Enumerator<E> enumerator() {
+    // TODO: use a completion service from the container
+    final ExecutorCompletionService<Collection<RunOutcome>> service = new ExecutorCompletionService<Collection<RunOutcome>>(
+        new ThreadPoolExecutor(1, 1, 1, TimeUnit.SECONDS, new LinkedBlockingDeque<Runnable>(10)));
+
+    // Run the plan using an executor. It runs in a different thread, writing
+    // results to our queue.
+    //
+    // TODO: use the result of task, and check for exceptions
+    final Future<Collection<RunOutcome>> task = runPlan(service);
+
+    return new JsonEnumerator(queue, fields);
+  }
+
+  private static ObjectMapper createMapper() {
+    return new ObjectMapper();
+  }
+
+  /**
+   * Converts a JSON document, represented as an array of bytes, into a Java object (consisting of Map, List, String,
+   * Integer, Double, Boolean).
+   */
+  static Object parseJson(byte[] bytes) {
+    try {
+      return wrapper(mapper.readTree(bytes));
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  /**
+   * Converts a JSON node to Java objects ({@link List}, {@link Map}, {@link String}, {@link Integer}, {@link Double},
+   * {@link Boolean}.
+   */
+  static Object wrapper(JsonNode node) {
+    switch (node.asToken()) {
+    case START_OBJECT:
+      return map((ObjectNode) node);
+    case START_ARRAY:
+      return array((ArrayNode) node);
+    case VALUE_STRING:
+      return node.asText();
+    case VALUE_NUMBER_INT:
+      return node.asInt();
+    case VALUE_NUMBER_FLOAT:
+      return node.asDouble();
+    case VALUE_TRUE:
+      return Boolean.TRUE;
+    case VALUE_FALSE:
+      return Boolean.FALSE;
+    case VALUE_NULL:
+      return null;
+    default:
+      throw new AssertionError("unexpected: " + node + ": " + node.asToken());
+    }
+  }
+
+  private static List<Object> array(ArrayNode node) {
+    final List<Object> list = new ArrayList<>();
+    for (JsonNode jsonNode : node) {
+      list.add(wrapper(jsonNode));
+    }
+    return Collections.unmodifiableList(list);
+  }
+
+  private static SortedMap<String, Object> map(ObjectNode node) {
+    // TreeMap makes the results deterministic.
+    final TreeMap<String, Object> map = new TreeMap<>();
+    final Iterator<Map.Entry<String, JsonNode>> fields = node.fields();
+    while (fields.hasNext()) {
+      Map.Entry<String, JsonNode> next = fields.next();
+      map.put(next.getKey(), wrapper(next.getValue()));
+    }
+    return Collections.unmodifiableSortedMap(map);
+  }
+
+  private static class JsonEnumerator implements Enumerator {
+    private final BlockingQueue<Object> queue;
+    private final String holder;
+    private final List<String> fields;
+    private Object current;
+
+    public JsonEnumerator(BlockingQueue<Object> queue, List<String> fields) {
+      this.queue = queue;
+      this.holder = null;
+      this.fields = fields;
+    }
+
+    public Object current() {
+      return current;
+    }
+
+    public void close() {
+
+    }
+
+    public boolean moveNext() {
+      try {
+        Object o = queue.take();
+        if (o instanceof RunOutcome.OutcomeType) {
+          switch ((RunOutcome.OutcomeType) o) {
+          case SUCCESS:
+            return false; // end of data
+          case CANCELED:
+            throw new RuntimeException("canceled");
+          case FAILED:
+          default:
+            throw new RuntimeException("failed");
+          }
+        } else {
+          Object o1 = parseJson((byte[]) o);
+          if (holder != null) {
+            o1 = ((Map<String, Object>) o1).get(holder);
+          }
+          if (fields == null) {
+            current = o1;
+          } else {
+            final Map<String, Object> map = (Map<String, Object>) o1;
+            if (fields.size() == 1) {
+              current = map.get(fields.get(0));
+            } else {
+              Object[] os = new Object[fields.size()];
+              for (int i = 0; i < os.length; i++) {
+                os[i] = map.get(fields.get(i));
+              }
+              current = os;
+            }
+          }
+          return true;
+        }
+      } catch (InterruptedException e) {
+        Thread.interrupted();
+        throw new RuntimeException(e);
+      }
+    }
+
+    public void reset() {
+      throw new UnsupportedOperationException();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d405c70d/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillFullEngine.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillFullEngine.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillFullEngine.java
index 8c41b99..7cd1082 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillFullEngine.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/EnumerableDrillFullEngine.java
@@ -53,7 +53,8 @@ public class EnumerableDrillFullEngine<E> extends AbstractEnumerable<E> implemen
    * @param fields
    *          Names of fields, or null to return the whole blob
    */
-  public EnumerableDrillFullEngine(DrillConfig config, String plan, Class<E> clazz, List<String> fields, DataContext drillConnectionDataContext) {
+  public EnumerableDrillFullEngine(DrillConfig config, String plan, Class<E> clazz, List<String> fields,
+      DataContext drillConnectionDataContext) {
     this.plan = plan;
     this.config = config;
     this.fields = fields;
@@ -73,7 +74,7 @@ public class EnumerableDrillFullEngine<E> extends AbstractEnumerable<E> implemen
   @Override
   public Enumerator<E> enumerator() {
     DrillTable table = (DrillTable) drillConnectionDataContext.getSubSchema("DONUTS").getTable("DONUTS", Object.class);
-    if(table.useReferenceInterpreter()){
+    if (table.useReferenceInterpreter()) {
       DrillRefImpl<E> impl = new DrillRefImpl<E>(plan, config, fields, queue);
       return impl.enumerator(table);
     } else {