You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@calcite.apache.org by jh...@apache.org on 2015/10/07 21:15:28 UTC

[5/7] incubator-calcite git commit: [CALCITE-785] Add "Piglet", a subset of Pig Latin on top of Calcite algebra

[CALCITE-785] Add "Piglet", a subset of Pig Latin on top of Calcite algebra

Includes a parser, DISTINCT, ORDER, LIMIT and GROUP commands, and an
incomplete nested FOREACH command.

Add tests based on examples in PigLatin reference.


Project: http://git-wip-us.apache.org/repos/asf/incubator-calcite/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-calcite/commit/5cee2a1a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-calcite/tree/5cee2a1a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-calcite/diff/5cee2a1a

Branch: refs/heads/master
Commit: 5cee2a1ada46a38212122e562a9326a501d3187e
Parents: 82ac7b2
Author: Julian Hyde <jh...@apache.org>
Authored: Tue Oct 6 13:55:24 2015 -0700
Committer: Julian Hyde <jh...@apache.org>
Committed: Tue Oct 6 23:54:10 2015 -0700

----------------------------------------------------------------------
 .../org/apache/calcite/tools/PigRelBuilder.java |  161 +++
 .../org/apache/calcite/test/CalciteSuite.java   |    1 +
 .../apache/calcite/test/PigRelBuilderTest.java  |  148 +++
 piglet/pom.xml                                  |  157 +++
 .../java/org/apache/calcite/piglet/Ast.java     |  535 +++++++++
 .../java/org/apache/calcite/piglet/Handler.java |  327 +++++
 .../org/apache/calcite/piglet/package-info.java |   24 +
 piglet/src/main/javacc/PigletParser.jj          | 1112 ++++++++++++++++++
 .../java/org/apache/calcite/test/Fluent.java    |  113 ++
 .../org/apache/calcite/test/PigletTest.java     |  289 +++++
 pom.xml                                         |    3 +-
 11 files changed, 2869 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-calcite/blob/5cee2a1a/core/src/main/java/org/apache/calcite/tools/PigRelBuilder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/tools/PigRelBuilder.java b/core/src/main/java/org/apache/calcite/tools/PigRelBuilder.java
new file mode 100644
index 0000000..c4ef26a
--- /dev/null
+++ b/core/src/main/java/org/apache/calcite/tools/PigRelBuilder.java
@@ -0,0 +1,161 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.tools;
+
+import org.apache.calcite.linq4j.Ord;
+import org.apache.calcite.plan.Context;
+import org.apache.calcite.plan.RelOptCluster;
+import org.apache.calcite.plan.RelOptSchema;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.core.JoinRelType;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.sql.fun.SqlStdOperatorTable;
+import org.apache.calcite.util.Util;
+
+import com.google.common.collect.ImmutableList;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Extension to {@link RelBuilder} for Pig relational operators.
+ */
+public class PigRelBuilder extends RelBuilder {
+  private PigRelBuilder(Context context,
+      RelOptCluster cluster,
+      RelOptSchema relOptSchema) {
+    super(context, cluster, relOptSchema);
+  }
+
+  /** Creates a PigRelBuilder. */
+  public static PigRelBuilder create(FrameworkConfig config) {
+    final RelBuilder relBuilder = RelBuilder.create(config);
+    return new PigRelBuilder(config.getContext(), relBuilder.cluster,
+        relBuilder.relOptSchema);
+  }
+
+  @Override public PigRelBuilder scan(String tableName) {
+    return (PigRelBuilder) super.scan(tableName);
+  }
+
+  /** Loads a data set.
+   *
+   * <p>Equivalent to Pig Latin:
+   * <pre>{@code LOAD 'path' USING loadFunction AS rowType}</pre>
+   *
+   * <p>{@code loadFunction} and {@code rowType} are optional.
+   *
+   * @param path File path
+   * @param loadFunction Load function
+   * @param rowType Row type (what Pig calls 'schema')
+   *
+   * @return This builder
+   */
+  public PigRelBuilder load(String path, RexNode loadFunction,
+      RelDataType rowType) {
+    scan(path.replace(".csv", "")); // TODO: use a UDT
+    return this;
+  }
+
+  /** Removes duplicate tuples in a relation.
+   *
+   * <p>Equivalent Pig Latin:
+   * <blockquote>
+   *   <pre>alias = DISTINCT alias [PARTITION BY partitioner] [PARALLEL n];</pre>
+   * </blockquote>
+   *
+   * @param partitioner Partitioner; null means no partitioner
+   * @param parallel Degree of parallelism; negative means unspecified
+   *
+   * @return This builder
+   */
+  public PigRelBuilder distinct(Partitioner partitioner, int parallel) {
+    // TODO: Use partitioner and parallel
+    distinct();
+    return this;
+  }
+
+  /** Groups the data in one or more relations.
+   *
+   * <p>Pig Latin syntax:
+   * <blockquote>
+   * alias = GROUP alias { ALL | BY expression }
+   *   [, alias ALL | BY expression ...]
+   *   [USING 'collected' | 'merge'] [PARTITION BY partitioner] [PARALLEL n];
+   * </blockquote>
+   *
+   * @param groupKeys One of more group keys; use {@link #groupKey()} for ALL
+   * @param option Whether to use an optimized method combining the data
+   *              (COLLECTED for one input or MERGE for two or more inputs)
+   * @param partitioner Partitioner; null means no partitioner
+   * @param parallel Degree of parallelism; negative means unspecified
+   *
+   * @return This builder
+   */
+  public PigRelBuilder group(GroupOption option, Partitioner partitioner,
+      int parallel, GroupKey... groupKeys) {
+    return group(option, partitioner, parallel, ImmutableList.copyOf(groupKeys));
+  }
+
+  public PigRelBuilder group(GroupOption option, Partitioner partitioner,
+      int parallel, Iterable<? extends GroupKey> groupKeys) {
+    @SuppressWarnings("unchecked") final List<GroupKeyImpl> groupKeyList =
+        ImmutableList.copyOf((Iterable) groupKeys);
+    if (groupKeyList.isEmpty()) {
+      throw new IllegalArgumentException("must have at least one group");
+    }
+    final int groupCount = groupKeyList.get(0).nodes.size();
+    for (GroupKeyImpl groupKey : groupKeyList) {
+      if (groupKey.nodes.size() != groupCount) {
+        throw new IllegalArgumentException("group key size mismatch");
+      }
+    }
+    final int n = groupKeyList.size();
+    for (Ord<GroupKeyImpl> groupKey : Ord.reverse(groupKeyList)) {
+      RelNode r = null;
+      if (groupKey.i < n - 1) {
+        r = build();
+      }
+      String alias = getAlias();
+      aggregate(groupKey.e,
+          aggregateCall(SqlStdOperatorTable.COLLECT, false, alias,
+              fields()));
+      if (groupKey.i < n - 1) {
+        push(r);
+        List<RexNode> predicates = new ArrayList<>();
+        for (int key : Util.range(groupCount)) {
+          predicates.add(equals(field(2, 0, key), field(2, 1, key)));
+        }
+        join(JoinRelType.INNER, and(predicates));
+      }
+    }
+    return this;
+  }
+
+  /** Partitioner for group and join */
+  interface Partitioner {
+  }
+
+  /** Option for performing group efficiently if data set is already sorted */
+  public enum GroupOption {
+    MERGE,
+    COLLECTED
+  }
+}
+
+// End PigRelBuilder.java

http://git-wip-us.apache.org/repos/asf/incubator-calcite/blob/5cee2a1a/core/src/test/java/org/apache/calcite/test/CalciteSuite.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/calcite/test/CalciteSuite.java b/core/src/test/java/org/apache/calcite/test/CalciteSuite.java
index 0621a73..c78c0cc 100644
--- a/core/src/test/java/org/apache/calcite/test/CalciteSuite.java
+++ b/core/src/test/java/org/apache/calcite/test/CalciteSuite.java
@@ -114,6 +114,7 @@ import org.junit.runners.Suite;
     // slow tests (above 1s)
     PlannerTest.class,
     RelBuilderTest.class,
+    PigRelBuilderTest.class,
     RexImplicationCheckerTest.class,
     MaterializationTest.class,
     JdbcAdapterTest.class,

http://git-wip-us.apache.org/repos/asf/incubator-calcite/blob/5cee2a1a/core/src/test/java/org/apache/calcite/test/PigRelBuilderTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/calcite/test/PigRelBuilderTest.java b/core/src/test/java/org/apache/calcite/test/PigRelBuilderTest.java
new file mode 100644
index 0000000..c77b37d
--- /dev/null
+++ b/core/src/test/java/org/apache/calcite/test/PigRelBuilderTest.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.test;
+
+import org.apache.calcite.plan.RelOptUtil;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.tools.Frameworks;
+import org.apache.calcite.tools.PigRelBuilder;
+
+import org.junit.Test;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+
+/**
+ * Unit test for {@link PigRelBuilder}.
+ */
+public class PigRelBuilderTest {
+  /** Creates a config based on the "scott" schema. */
+  public static Frameworks.ConfigBuilder config() {
+    return RelBuilderTest.config();
+  }
+
+  @Test public void testScan() {
+    // Equivalent SQL:
+    //   SELECT *
+    //   FROM emp
+    final PigRelBuilder builder = PigRelBuilder.create(config().build());
+    final RelNode root = builder
+        .scan("EMP")
+        .build();
+    assertThat(RelOptUtil.toString(root),
+        is("LogicalTableScan(table=[[scott, EMP]])\n"));
+  }
+
+  @Test public void testCogroup() {}
+  @Test public void testCross() {}
+  @Test public void testCube() {}
+  @Test public void testDefine() {}
+  @Test public void testDistinct() {
+    // Syntax:
+    //   alias = DISTINCT alias [PARTITION BY partitioner] [PARALLEL n];
+    final PigRelBuilder builder = PigRelBuilder.create(config().build());
+    final RelNode root = builder
+        .scan("EMP")
+        .distinct()
+        .build();
+    final String plan = "LogicalAggregate(group=[{0, 1, 2, 3, 4, 5, 6, 7}])\n"
+        + "  LogicalTableScan(table=[[scott, EMP]])\n";
+    assertThat(RelOptUtil.toString(root), is(plan));
+  }
+
+  @Test public void testFilter() {
+    // Syntax:
+    //  FILTER name BY expr
+    // Example:
+    //  output_var = FILTER input_var BY (field1 is not null);
+    final PigRelBuilder builder = PigRelBuilder.create(config().build());
+    final RelNode root = builder
+        .load("EMP.csv", null, null)
+        .filter(builder.isNotNull(builder.field("MGR")))
+        .build();
+    assertThat(RelOptUtil.toString(root),
+        is("LogicalFilter(condition=[IS NOT NULL($3)])\n"
+            + "  LogicalTableScan(table=[[scott, EMP]])\n"));
+  }
+
+  @Test public void testForeach() {}
+  @Test public void testGroup() {
+    // Syntax:
+    //   alias = GROUP alias { ALL | BY expression}
+    //     [, alias ALL | BY expression …] [USING 'collected' | 'merge']
+    //     [PARTITION BY partitioner] [PARALLEL n];
+    // Equivalent to Pig Latin:
+    //   r = GROUP e BY (deptno, job);
+    final PigRelBuilder builder = PigRelBuilder.create(config().build());
+    final RelNode root = builder
+        .scan("EMP")
+        .group(null, null, -1, builder.groupKey("DEPTNO", "JOB").alias("e"))
+        .build();
+    final String plan = ""
+        + "LogicalAggregate(group=[{2, 7}], EMP=[COLLECT($8)])\n"
+        + "  LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], $f8=[ROW($0, $1, $2, $3, $4, $5, $6, $7)])\n"
+        + "    LogicalTableScan(table=[[scott, EMP]])\n";
+    assertThat(RelOptUtil.toString(root), is(plan));
+  }
+
+  @Test public void testGroup2() {
+    // Equivalent to Pig Latin:
+    //   r = GROUP e BY deptno, d BY deptno;
+    final PigRelBuilder builder = PigRelBuilder.create(config().build());
+    final RelNode root = builder
+        .scan("EMP")
+        .scan("DEPT")
+        .group(null, null, -1,
+            builder.groupKey("DEPTNO").alias("e"),
+            builder.groupKey("DEPTNO").alias("d"))
+        .build();
+    final String plan = "LogicalJoin(condition=[=($0, $0)], joinType=[inner])\n"
+        + "  LogicalAggregate(group=[{0}], EMP=[COLLECT($8)])\n"
+        + "    LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], $f8=[ROW($0, $1, $2, $3, $4, $5, $6, $7)])\n      LogicalTableScan(table=[[scott, EMP]])\n  LogicalAggregate(group=[{0}], DEPT=[COLLECT($3)])\n"
+        + "    LogicalProject(DEPTNO=[$0], DNAME=[$1], LOC=[$2], $f3=[ROW($0, $1, $2)])\n"
+        + "      LogicalTableScan(table=[[scott, DEPT]])\n";
+    assertThat(RelOptUtil.toString(root), is(plan));
+  }
+
+  @Test public void testImport() {}
+  @Test public void testJoinInner() {}
+  @Test public void testJoinOuter() {}
+  @Test public void testLimit() {}
+
+  @Test public void testLoad() {
+    // Syntax:
+    //   LOAD 'data' [USING function] [AS schema];
+    // Equivalent to Pig Latin:
+    //   LOAD 'EMPS.csv'
+    final PigRelBuilder builder = PigRelBuilder.create(config().build());
+    final RelNode root = builder
+        .load("EMP.csv", null, null)
+        .build();
+    assertThat(RelOptUtil.toString(root),
+        is("LogicalTableScan(table=[[scott, EMP]])\n"));
+  }
+
+  @Test public void testMapReduce() {}
+  @Test public void testOrderBy() {}
+  @Test public void testRank() {}
+  @Test public void testSample() {}
+  @Test public void testSplit() {}
+  @Test public void testStore() {}
+  @Test public void testUnion() {}
+}
+
+// End PigRelBuilderTest.java

http://git-wip-us.apache.org/repos/asf/incubator-calcite/blob/5cee2a1a/piglet/pom.xml
----------------------------------------------------------------------
diff --git a/piglet/pom.xml b/piglet/pom.xml
new file mode 100644
index 0000000..2a9c02f
--- /dev/null
+++ b/piglet/pom.xml
@@ -0,0 +1,157 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.calcite</groupId>
+    <artifactId>calcite</artifactId>
+    <version>1.5.0-incubating-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>calcite-piglet</artifactId>
+  <packaging>jar</packaging>
+  <version>1.5.0-incubating-SNAPSHOT</version>
+  <name>Calcite Piglet</name>
+  <description>Pig-like language built on top of Calcite algebra</description>
+
+  <properties>
+    <top.dir>${project.basedir}/..</top.dir>
+  </properties>
+
+  <dependencies>
+    <!-- Sorted by groupId, artifactId; calcite dependencies first. Put versions
+         in dependencyManagement in the root POM, not here. -->
+    <dependency>
+      <groupId>org.apache.calcite</groupId>
+      <artifactId>calcite-avatica</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.calcite</groupId>
+      <artifactId>calcite-core</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.calcite</groupId>
+      <artifactId>calcite-core</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.calcite</groupId>
+      <artifactId>calcite-linq4j</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>net.hydromatic</groupId>
+      <artifactId>scott-data-hsqldb</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.hamcrest</groupId>
+      <artifactId>hamcrest-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.hsqldb</groupId>
+      <artifactId>hsqldb</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <!-- Parent module has the same plugin and does the work of
+           generating -sources.jar for each project. But without the
+           plugin declared here, IDEs don't know the sources are
+           available. -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-source-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>attach-sources</id>
+            <phase>verify</phase>
+            <goals>
+              <goal>jar-no-fork</goal>
+              <goal>test-jar-no-fork</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <includes>
+            <include>org/apache/calcite/test/PigletTest.java</include>
+          </includes>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>javacc-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>javacc</id>
+            <goals>
+              <goal>javacc</goal>
+            </goals>
+            <configuration>
+              <includes>
+                <include>**/PigletParser.jj</include>
+              </includes>
+              <lookAhead>2</lookAhead>
+              <isStatic>false</isStatic>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.10</version>
+        <!-- configurations do not cascade, so all of the definition from
+             ../pom.xml:build:plugin-management:plugins:plugin must be repeated in child poms -->
+        <executions>
+          <execution>
+            <id>analyze</id>
+            <goals>
+              <goal>analyze-only</goal>
+            </goals>
+            <configuration>
+              <failOnWarning>true</failOnWarning>
+              <!-- ignore "unused but declared" warnings -->
+              <ignoredUnusedDeclaredDependencies>
+                <ignoredUnusedDeclaredDependency>net.hydromatic:scott-data-hsqldb</ignoredUnusedDeclaredDependency>
+                <ignoredUnusedDeclaredDependency>org.hsqldb:hsqldb</ignoredUnusedDeclaredDependency>
+              </ignoredUnusedDeclaredDependencies>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-calcite/blob/5cee2a1a/piglet/src/main/java/org/apache/calcite/piglet/Ast.java
----------------------------------------------------------------------
diff --git a/piglet/src/main/java/org/apache/calcite/piglet/Ast.java b/piglet/src/main/java/org/apache/calcite/piglet/Ast.java
new file mode 100644
index 0000000..8a27e37
--- /dev/null
+++ b/piglet/src/main/java/org/apache/calcite/piglet/Ast.java
@@ -0,0 +1,535 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.piglet;
+
+import org.apache.calcite.avatica.util.Spacer;
+import org.apache.calcite.linq4j.Ord;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.parser.SqlParserUtil;
+import org.apache.calcite.util.Pair;
+import org.apache.calcite.util.Util;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
+
+import java.math.BigDecimal;
+import java.util.List;
+
+/** Abstract syntax tree.
+ *
+ * <p>Contains inner classes for various kinds of parse tree node.
+ */
+public class Ast {
+  private Ast() {}
+
+  public static String toString(Node x) {
+    return new UnParser().append(x).buf.toString();
+  }
+
+  /** Formats a node and its children as a string. */
+  public static UnParser unParse(UnParser u, Node n) {
+    switch (n.op) {
+    case PROGRAM:
+      final Program program = (Program) n;
+      return u.append("{op: PROGRAM, stmts: ").appendList(program.stmtList)
+          .append("}");
+    case LOAD:
+      final LoadStmt load = (LoadStmt) n;
+      return u.append("{op: LOAD, target: " + load.target.value + ", name: "
+          + load.name.value + "}");
+    case DUMP:
+      final DumpStmt dump = (DumpStmt) n;
+      return u.append("{op: DUMP, relation: " + dump.relation.value + "}");
+    case DESCRIBE:
+      final DescribeStmt describe = (DescribeStmt) n;
+      return u.append("{op: DESCRIBE, relation: " + describe.relation.value
+          + "}");
+    case FOREACH:
+      final ForeachStmt foreach = (ForeachStmt) n;
+      return u.append("{op: FOREACH, target: " + foreach.target.value
+          + ", source: " + foreach.source.value + ", expList: ")
+          .appendList(foreach.expList)
+          .append("}");
+    case FOREACH_NESTED:
+      final ForeachNestedStmt foreachNested = (ForeachNestedStmt) n;
+      return u.append("{op: FOREACH, target: " + foreachNested.target.value
+          + ", source: " + foreachNested.source.value
+          + ", nestedOps: ")
+          .appendList(foreachNested.nestedStmtList)
+          .append(", expList: ")
+          .appendList(foreachNested.expList)
+          .append("}");
+    case FILTER:
+      final FilterStmt filter = (FilterStmt) n;
+      u.append("{op: FILTER, target: " + filter.target.value + ", source: "
+          + filter.source.value + ", condition: ");
+      u.in().append(filter.condition).out();
+      return u.append("}");
+    case DISTINCT:
+      final DistinctStmt distinct = (DistinctStmt) n;
+      return u.append("{op: DISTINCT, target: " + distinct.target.value
+          + ", source: " + distinct.source.value + "}");
+    case LIMIT:
+      final LimitStmt limit = (LimitStmt) n;
+      return u.append("{op: LIMIT, target: ").append(limit.target.value)
+          .append(", source: ").append(limit.source.value)
+          .append(", count: ").append(limit.count.value.toString())
+          .append("}");
+    case ORDER:
+      final OrderStmt order = (OrderStmt) n;
+      return u.append("{op: ORDER, target: " + order.target.value
+          + ", source: " + order.source.value + "}");
+    case GROUP:
+      final GroupStmt group = (GroupStmt) n;
+      u.append("{op: GROUP, target: " + group.target.value
+          + ", source: " + group.source.value);
+      if (group.keys != null) {
+        u.append(", keys: ").appendList(group.keys);
+      }
+      return u.append("}");
+    case LITERAL:
+      final Literal literal = (Literal) n;
+      return u.append(String.valueOf(literal.value));
+    case IDENTIFIER:
+      final Identifier id = (Identifier) n;
+      return u.append(id.value);
+    default:
+      throw new AssertionError("unknown op " + n.op);
+    }
+  }
+
+  /** Parse tree node type. */
+  public enum Op {
+    DESCRIBE, DISTINCT, DUMP, LITERAL, LOAD, FOREACH, IDENTIFIER, FILTER,
+    SCHEMA, TYPE, FIELD_SCHEMA, FOREACH_NESTED, LIMIT, ORDER, GROUP, PROGRAM,
+    DOT, VALUES, EQ, NE, GT, LT, GTE, LTE, PLUS, MINUS, AND, OR, NOT
+  }
+
+  /** Abstract base class for parse tree node. */
+  public abstract static class Node {
+    public final Op op;
+    public final SqlParserPos pos;
+
+    protected Node(SqlParserPos pos, Op op) {
+      this.op = Preconditions.checkNotNull(op);
+      this.pos = Preconditions.checkNotNull(pos);
+    }
+  }
+
+  /** Abstract base class for parse tree node representing a statement. */
+  public abstract static class Stmt extends Node {
+    protected Stmt(SqlParserPos pos, Op op) {
+      super(pos, op);
+    }
+  }
+
+  /** Abstract base class for statements that assign to a named relation. */
+  public abstract static class Assignment extends Stmt {
+    final Identifier target;
+
+    protected Assignment(SqlParserPos pos, Op op, Identifier target) {
+      super(pos, op);
+      this.target = Preconditions.checkNotNull(target);
+    }
+  }
+
+  /** Parse tree node for LOAD statement. */
+  public static class LoadStmt extends Assignment {
+    final Literal name;
+
+    public LoadStmt(SqlParserPos pos, Identifier target, Literal name) {
+      super(pos, Op.LOAD, target);
+      this.name = Preconditions.checkNotNull(name);
+    }
+  }
+
+  /** Parse tree node for VALUES statement.
+   *
+   * <p>VALUES is an extension to Pig, inspired by SQL's VALUES clause.
+   */
+  public static class ValuesStmt extends Assignment {
+    final List<List<Node>> tupleList;
+    final Schema schema;
+
+    public ValuesStmt(SqlParserPos pos, Identifier target, Schema schema,
+        List<List<Node>> tupleList) {
+      super(pos, Op.VALUES, target);
+      this.schema = schema;
+      this.tupleList = ImmutableList.copyOf(tupleList);
+    }
+  }
+
+  /** Abstract base class for an assignment with one source relation. */
+  public static class Assignment1 extends Assignment {
+    final Identifier source;
+
+    protected Assignment1(SqlParserPos pos, Op op, Identifier target,
+        Identifier source) {
+      super(pos, op, target);
+      this.source = source;
+    }
+  }
+
+  /** Parse tree node for FOREACH statement (non-nested).
+   *
+   * <p>Syntax:
+   * <blockquote><code>
+   * alias = FOREACH alias GENERATE expression [, expression]...
+   * [ AS schema ];</code>
+   * </blockquote>
+   *
+   * @see org.apache.calcite.piglet.Ast.ForeachNestedStmt
+   */
+  public static class ForeachStmt extends Assignment1 {
+    final List<Node> expList;
+
+    public ForeachStmt(SqlParserPos pos, Identifier target, Identifier source,
+        List<Node> expList, Schema schema) {
+      super(pos, Op.FOREACH, target, source);
+      this.expList = expList;
+      assert schema == null; // not supported yet
+    }
+  }
+
+  /** Parse tree node for FOREACH statement (nested).
+   *
+   * <p>Syntax:
+   * <blockquote><code>
+   * alias = FOREACH nested_alias {
+   *   alias = nested_op; [alias = nested_op; ]...
+   *   GENERATE expression [, expression]...
+   * };<br>
+   *
+   * nested_op ::= DISTINCT, FILTER, LIMIT, ORDER, SAMPLE
+   * </code>
+   * </blockquote>
+   *
+   * @see org.apache.calcite.piglet.Ast.ForeachStmt
+   */
+  public static class ForeachNestedStmt extends Assignment1 {
+    final List<Stmt> nestedStmtList;
+    final List<Node> expList;
+
+    public ForeachNestedStmt(SqlParserPos pos, Identifier target,
+        Identifier source, List<Stmt> nestedStmtList,
+        List<Node> expList, Schema schema) {
+      super(pos, Op.FOREACH_NESTED, target, source);
+      this.nestedStmtList = nestedStmtList;
+      this.expList = expList;
+      assert schema == null; // not supported yet
+    }
+  }
+
+  /** Parse tree node for FILTER statement.
+   *
+   * <p>Syntax:
+   * <blockquote><pre>alias = FILTER alias BY expression;</pre></blockquote>
+   */
+  public static class FilterStmt extends Assignment1 {
+    final Node condition;
+
+    public FilterStmt(SqlParserPos pos, Identifier target,
+        Identifier source, Node condition) {
+      super(pos, Op.FILTER, target, source);
+      this.condition = condition;
+    }
+  }
+
+  /** Parse tree node for DISTINCT statement.
+   *
+   * <p>Syntax:
+   * <blockquote><pre>alias = DISTINCT alias;</pre></blockquote>
+   */
+  public static class DistinctStmt extends Assignment1 {
+    public DistinctStmt(SqlParserPos pos, Identifier target,
+        Identifier source) {
+      super(pos, Op.DISTINCT, target, source);
+    }
+  }
+
+  /** Parse tree node for LIMIT statement.
+   *
+   * <p>Syntax:
+   * <blockquote><pre>alias = LIMIT alias n;</pre></blockquote>
+   */
+  public static class LimitStmt extends Assignment1 {
+    final Literal count;
+
+    public LimitStmt(SqlParserPos pos, Identifier target,
+        Identifier source, Literal count) {
+      super(pos, Op.LIMIT, target, source);
+      this.count = count;
+    }
+  }
+
+  /** Parse tree node for ORDER statement.
+   *
+   * <p>Syntax:
+   * <blockquote>
+   *   <code>alias = ORDER alias BY (* | field) [ASC | DESC]
+   *     [, field [ASC | DESC] ]...;</code>
+   * </blockquote>
+   */
+  public static class OrderStmt extends Assignment1 {
+    final List<Pair<Identifier, Direction>> fields;
+
+    public OrderStmt(SqlParserPos pos, Identifier target,
+        Identifier source, List<Pair<Identifier, Direction>> fields) {
+      super(pos, Op.ORDER, target, source);
+      this.fields = fields;
+    }
+  }
+
+  /** Parse tree node for GROUP statement.
+   *
+   * <p>Syntax:
+   * <blockquote>
+   *   <code>alias = GROUP alias
+   *   ( ALL | BY ( exp | '(' exp [, exp]... ')' ) );</code>
+   * </blockquote>
+   */
+  public static class GroupStmt extends Assignment1 {
+    /** Grouping keys. May be null (for ALL), or a list of one or more
+     * expressions. */
+    final List<Node> keys;
+
+    public GroupStmt(SqlParserPos pos, Identifier target,
+        Identifier source, List<Node> keys) {
+      super(pos, Op.GROUP, target, source);
+      this.keys = keys;
+      assert keys == null || keys.size() >= 1;
+    }
+  }
+
+  /** Parse tree node for DUMP statement. */
+  public static class DumpStmt extends Stmt {
+    final Identifier relation;
+
+    public DumpStmt(SqlParserPos pos, Identifier relation) {
+      super(pos, Op.DUMP);
+      this.relation = Preconditions.checkNotNull(relation);
+    }
+  }
+
+  /** Parse tree node for DESCRIBE statement. */
+  public static class DescribeStmt extends Stmt {
+    final Identifier relation;
+
+    public DescribeStmt(SqlParserPos pos, Identifier relation) {
+      super(pos, Op.DESCRIBE);
+      this.relation = Preconditions.checkNotNull(relation);
+    }
+  }
+
+  /** Parse tree node for Literal. */
+  public static class Literal extends Node {
+    final Object value;
+
+    public Literal(SqlParserPos pos, Object value) {
+      super(pos, Op.LITERAL);
+      this.value = Preconditions.checkNotNull(value);
+    }
+
+    public static NumericLiteral createExactNumeric(String s,
+        SqlParserPos pos) {
+      BigDecimal value;
+      int prec;
+      int scale;
+
+      int i = s.indexOf('.');
+      if ((i >= 0) && ((s.length() - 1) != i)) {
+        value = SqlParserUtil.parseDecimal(s);
+        scale = s.length() - i - 1;
+        assert scale == value.scale() : s;
+        prec = s.length() - 1;
+      } else if ((i >= 0) && ((s.length() - 1) == i)) {
+        value = SqlParserUtil.parseInteger(s.substring(0, i));
+        scale = 0;
+        prec = s.length() - 1;
+      } else {
+        value = SqlParserUtil.parseInteger(s);
+        scale = 0;
+        prec = s.length();
+      }
+      return new NumericLiteral(pos, value, prec, scale, true);
+    }
+
+  }
+
+  /** Parse tree node for NumericLiteral. */
+  public static class NumericLiteral extends Literal {
+    final int prec;
+    final int scale;
+    final boolean exact;
+
+    NumericLiteral(SqlParserPos pos, BigDecimal value, int prec, int scale,
+        boolean exact) {
+      super(pos, value);
+      this.prec = prec;
+      this.scale = scale;
+      this.exact = exact;
+    }
+
+    public NumericLiteral negate(SqlParserPos pos) {
+      BigDecimal value = (BigDecimal) this.value;
+      return new NumericLiteral(pos, value.negate(), prec, scale, exact);
+    }
+  }
+
+  /** Parse tree node for Identifier. */
+  public static class Identifier extends Node {
+    final String value;
+
+    public Identifier(SqlParserPos pos, String value) {
+      super(pos, Op.IDENTIFIER);
+      this.value = Preconditions.checkNotNull(value);
+    }
+
+    public boolean isStar() {
+      return false;
+    }
+  }
+
+  /** Parse tree node for "*", a special kind of identifier. */
+  public static class SpecialIdentifier extends Identifier {
+    public SpecialIdentifier(SqlParserPos pos) {
+      super(pos, "*");
+    }
+
+    @Override public boolean isStar() {
+      return true;
+    }
+  }
+
+  /** Parse tree node for a call to a function or operator. */
+  public static class Call extends Node {
+    final ImmutableList<Node> operands;
+
+    Call(SqlParserPos pos, Op op, ImmutableList<Node> operands) {
+      super(pos, op);
+      this.operands = operands;
+    }
+
+    public Call(SqlParserPos pos, Op op, Node... operands) {
+      this(pos, op, ImmutableList.copyOf(operands));
+    }
+  }
+
+  /** Parse tree node for a program. */
+  public static class Program extends Node {
+    public final List<Stmt> stmtList;
+
+    public Program(SqlParserPos pos, List<Stmt> stmtList) {
+      super(pos, Op.PROGRAM);
+      this.stmtList = stmtList;
+    }
+  }
+
+  /** Parse tree for field schema.
+   *
+   * <p>Syntax:
+   * <blockquote><pre>identifier:type</pre></blockquote>
+   */
+  public static class FieldSchema extends Node {
+    final Identifier id;
+    final Type type;
+
+    public FieldSchema(SqlParserPos pos, Identifier id, Type type) {
+      super(pos, Op.FIELD_SCHEMA);
+      this.id = Preconditions.checkNotNull(id);
+      this.type = Preconditions.checkNotNull(type);
+    }
+  }
+
+  /** Parse tree for schema.
+   *
+   * <p>Syntax:
+   * <blockquote>
+   *   <pre>AS ( identifier:type [, identifier:type]... )</pre>
+   * </blockquote>
+   */
+  public static class Schema extends Node {
+    final List<FieldSchema> fieldSchemaList;
+
+    public Schema(SqlParserPos pos, List<FieldSchema> fieldSchemaList) {
+      super(pos, Op.SCHEMA);
+      this.fieldSchemaList = ImmutableList.copyOf(fieldSchemaList);
+    }
+  }
+
+  /** Parse tree for type. */
+  public static class Type extends Node {
+    final String name;
+
+    public Type(SqlParserPos pos, String name) {
+      super(pos, Op.TYPE);
+      this.name = name;
+    }
+  }
+
+  /** Contains output and indentation level while a tree of nodes is
+   * being converted to text. */
+  static class UnParser {
+    final StringBuilder buf = new StringBuilder();
+    final Spacer spacer = new Spacer(0);
+
+    public UnParser in() {
+      spacer.add(2);
+      return this;
+    }
+
+    public UnParser out() {
+      spacer.subtract(2);
+      return this;
+    }
+
+    public UnParser newline() {
+      buf.append(Util.LINE_SEPARATOR);
+      spacer.spaces(buf);
+      return this;
+    }
+
+    public UnParser append(String s) {
+      buf.append(s);
+      return this;
+    }
+
+    public UnParser append(Node n) {
+      return unParse(this, n);
+    }
+
+    public UnParser appendList(List<? extends Node> list) {
+      append("[").in();
+      for (Ord<Node> n : Ord.zip(list)) {
+        newline().append(n.e);
+        if (n.i < list.size() - 1) {
+          append(",");
+        }
+      }
+      return out().append("]");
+    }
+  }
+
+  /** Sort direction. */
+  public enum Direction {
+    ASC,
+    DESC,
+    NOT_SPECIFIED
+  }
+}
+
+// End Ast.java

http://git-wip-us.apache.org/repos/asf/incubator-calcite/blob/5cee2a1a/piglet/src/main/java/org/apache/calcite/piglet/Handler.java
----------------------------------------------------------------------
diff --git a/piglet/src/main/java/org/apache/calcite/piglet/Handler.java b/piglet/src/main/java/org/apache/calcite/piglet/Handler.java
new file mode 100644
index 0000000..bfe4595
--- /dev/null
+++ b/piglet/src/main/java/org/apache/calcite/piglet/Handler.java
@@ -0,0 +1,327 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.piglet;
+
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rel.type.RelDataTypeField;
+import org.apache.calcite.rex.RexLiteral;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.fun.SqlStdOperatorTable;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.tools.PigRelBuilder;
+import org.apache.calcite.tools.RelBuilder;
+import org.apache.calcite.util.Pair;
+
+import com.google.common.collect.ImmutableList;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Walks over a Piglet AST and calls the corresponding methods in a
+ * {@link PigRelBuilder}.
+ */
+public class Handler {
+  private final PigRelBuilder builder;
+  private final Map<String, RelNode> map = new HashMap<>();
+
+  public Handler(PigRelBuilder builder) {
+    this.builder = builder;
+  }
+
+  /** Creates relational expressions for a given AST node. */
+  public Handler handle(Ast.Node node) {
+    final RelNode input;
+    final List<RexNode> rexNodes;
+    switch (node.op) {
+    case LOAD:
+      final Ast.LoadStmt load = (Ast.LoadStmt) node;
+      builder.scan((String) load.name.value);
+      register(load.target.value);
+      return this;
+    case VALUES:
+      final Ast.ValuesStmt values = (Ast.ValuesStmt) node;
+      final RelDataType rowType = toType(values.schema);
+      builder.values(tuples(values, rowType), rowType);
+      register(values.target.value);
+      return this;
+    case FOREACH:
+      final Ast.ForeachStmt foreach = (Ast.ForeachStmt) node;
+      builder.clear();
+      input = map.get(foreach.source.value);
+      builder.push(input);
+      rexNodes = new ArrayList<>();
+      for (Ast.Node exp : foreach.expList) {
+        rexNodes.add(toRex(exp));
+      }
+      builder.project(rexNodes);
+      register(foreach.target.value);
+      return this;
+    case FOREACH_NESTED:
+      final Ast.ForeachNestedStmt foreachNested = (Ast.ForeachNestedStmt) node;
+      builder.clear();
+      input = map.get(foreachNested.source.value);
+      builder.push(input);
+      System.out.println(input.getRowType());
+      for (RelDataTypeField field : input.getRowType().getFieldList()) {
+        switch (field.getType().getSqlTypeName()) {
+        case ARRAY:
+          System.out.println(field);
+        }
+      }
+      for (Ast.Stmt stmt : foreachNested.nestedStmtList) {
+        handle(stmt);
+      }
+      rexNodes = new ArrayList<>();
+      for (Ast.Node exp : foreachNested.expList) {
+        rexNodes.add(toRex(exp));
+      }
+      builder.project(rexNodes);
+      register(foreachNested.target.value);
+      return this;
+    case FILTER:
+      final Ast.FilterStmt filter = (Ast.FilterStmt) node;
+      builder.clear();
+      input = map.get(filter.source.value);
+      builder.push(input);
+      final RexNode rexNode = toRex(filter.condition);
+      builder.filter(rexNode);
+      register(filter.target.value);
+      return this;
+    case DISTINCT:
+      final Ast.DistinctStmt distinct = (Ast.DistinctStmt) node;
+      builder.clear();
+      input = map.get(distinct.source.value);
+      builder.push(input);
+      builder.distinct(null, -1);
+      register(distinct.target.value);
+      return this;
+    case ORDER:
+      final Ast.OrderStmt order = (Ast.OrderStmt) node;
+      builder.clear();
+      input = map.get(order.source.value);
+      builder.push(input);
+      final List<RexNode> nodes = new ArrayList<>();
+      for (Pair<Ast.Identifier, Ast.Direction> field : order.fields) {
+        toSortRex(nodes, field);
+      }
+      builder.sort(nodes);
+      register(order.target.value);
+      return this;
+    case LIMIT:
+      final Ast.LimitStmt limit = (Ast.LimitStmt) node;
+      builder.clear();
+      input = map.get(limit.source.value);
+      final int count = ((Number) limit.count.value).intValue();
+      builder.push(input);
+      builder.limit(0, count);
+      register(limit.target.value);
+      return this;
+    case GROUP:
+      final Ast.GroupStmt group = (Ast.GroupStmt) node;
+      builder.clear();
+      input = map.get(group.source.value);
+      builder.push(input).as(group.source.value);
+      final List<RelBuilder.GroupKey> groupKeys = new ArrayList<>();
+      final List<RexNode> keys = new ArrayList<>();
+      if (group.keys != null) {
+        for (Ast.Node key : group.keys) {
+          keys.add(toRex(key));
+        }
+      }
+      groupKeys.add(builder.groupKey(keys));
+      builder.group(PigRelBuilder.GroupOption.COLLECTED, null, -1, groupKeys);
+      register(group.target.value);
+      return this;
+    case PROGRAM:
+      final Ast.Program program = (Ast.Program) node;
+      for (Ast.Stmt stmt : program.stmtList) {
+        handle(stmt);
+      }
+      return this;
+    case DUMP:
+      final Ast.DumpStmt dump = (Ast.DumpStmt) node;
+      final RelNode relNode = map.get(dump.relation.value);
+      dump(relNode);
+      return this; // nothing to do; contains no algebra
+    default:
+      throw new AssertionError("unknown operation " + node.op);
+    }
+  }
+
+  /** Executes a relational expression and prints the output.
+   *
+   * <p>The default implementation does nothing.
+   *
+   * @param rel Relational expression
+   */
+  protected void dump(RelNode rel) {
+  }
+
+  private ImmutableList<ImmutableList<RexLiteral>>
+  tuples(Ast.ValuesStmt valuesStmt, RelDataType rowType) {
+    final ImmutableList.Builder<ImmutableList<RexLiteral>> listBuilder =
+        ImmutableList.builder();
+    for (List<Ast.Node> nodeList : valuesStmt.tupleList) {
+      listBuilder.add(tuple(nodeList, rowType));
+    }
+    return listBuilder.build();
+  }
+
+  private ImmutableList<RexLiteral> tuple(List<Ast.Node> nodeList,
+      RelDataType rowType) {
+    final ImmutableList.Builder<RexLiteral> listBuilder =
+        ImmutableList.builder();
+    for (Pair<Ast.Node, RelDataTypeField> pair
+        : Pair.zip(nodeList, rowType.getFieldList())) {
+      final Ast.Node node = pair.left;
+      if (node instanceof Ast.Literal) {
+        listBuilder.add(
+            (RexLiteral) builder.getRexBuilder().makeLiteral(
+                ((Ast.Literal) node).value, pair.right.getType(), false));
+      } else {
+        throw new IllegalArgumentException("not a literal: " + node);
+      }
+    }
+    return listBuilder.build();
+  }
+
+  private RelDataType toType(Ast.Schema schema) {
+    final RelDataTypeFactory.FieldInfoBuilder typeBuilder =
+        builder.getTypeFactory().builder();
+    for (Ast.FieldSchema fieldSchema : schema.fieldSchemaList) {
+      typeBuilder.add(fieldSchema.id.value, toType(fieldSchema.type));
+    }
+    return typeBuilder.build();
+  }
+
+  private RelDataType toType(Ast.Type type) {
+    final RelDataTypeFactory typeFactory = builder.getTypeFactory();
+    switch (type.name) {
+    case "int":
+      return typeFactory.createSqlType(SqlTypeName.INTEGER);
+    case "float":
+      return typeFactory.createSqlType(SqlTypeName.REAL);
+    default:
+      return typeFactory.createSqlType(SqlTypeName.VARCHAR);
+    }
+  }
+
+  private void toSortRex(List<RexNode> nodes,
+      Pair<Ast.Identifier, Ast.Direction> pair) {
+    if (pair.left.isStar()) {
+      for (RexNode node : builder.fields()) {
+        switch (pair.right) {
+        case DESC:
+          node = builder.desc(node);
+        }
+        nodes.add(node);
+      }
+    } else {
+      RexNode node = toRex(pair.left);
+      switch (pair.right) {
+      case DESC:
+        node = builder.desc(node);
+      }
+      nodes.add(node);
+    }
+  }
+
+  private RexNode toRex(Ast.Node exp) {
+    final Ast.Call call;
+    switch (exp.op) {
+    case LITERAL:
+      return builder.literal(((Ast.Literal) exp).value);
+    case IDENTIFIER:
+      final String value = ((Ast.Identifier) exp).value;
+      if (value.matches("^\\$[0-9]+")) {
+        int i = Integer.valueOf(value.substring(1));
+        return builder.field(i);
+      }
+      return builder.field(value);
+    case DOT:
+      call = (Ast.Call) exp;
+      final RexNode left = toRex(call.operands.get(0));
+      final Ast.Identifier right = (Ast.Identifier) call.operands.get(1);
+      return builder.dot(left, right.value);
+    case EQ:
+    case NE:
+    case GT:
+    case GTE:
+    case LT:
+    case LTE:
+    case AND:
+    case OR:
+    case NOT:
+    case PLUS:
+    case MINUS:
+      call = (Ast.Call) exp;
+      return builder.call(op(exp.op), toRex(call.operands));
+    default:
+      throw new AssertionError("unknown op " + exp.op);
+    }
+  }
+
+  private static SqlOperator op(Ast.Op op) {
+    switch (op) {
+    case EQ:
+      return SqlStdOperatorTable.EQUALS;
+    case NE:
+      return SqlStdOperatorTable.NOT_EQUALS;
+    case GT:
+      return SqlStdOperatorTable.GREATER_THAN;
+    case GTE:
+      return SqlStdOperatorTable.GREATER_THAN_OR_EQUAL;
+    case LT:
+      return SqlStdOperatorTable.LESS_THAN;
+    case LTE:
+      return SqlStdOperatorTable.LESS_THAN_OR_EQUAL;
+    case AND:
+      return SqlStdOperatorTable.AND;
+    case OR:
+      return SqlStdOperatorTable.OR;
+    case NOT:
+      return SqlStdOperatorTable.NOT;
+    case PLUS:
+      return SqlStdOperatorTable.PLUS;
+    case MINUS:
+      return SqlStdOperatorTable.MINUS;
+    default:
+      throw new AssertionError("unknown: " + op);
+    }
+  }
+
+  private ImmutableList<RexNode> toRex(Iterable<Ast.Node> operands) {
+    final ImmutableList.Builder<RexNode> builder = ImmutableList.builder();
+    for (Ast.Node operand : operands) {
+      builder.add(toRex(operand));
+    }
+    return builder.build();
+  }
+
+  /** Assigns the current relational expression to a given name. */
+  private void register(String name) {
+    map.put(name, builder.peek());
+  }
+}
+
+// End Handler.java

http://git-wip-us.apache.org/repos/asf/incubator-calcite/blob/5cee2a1a/piglet/src/main/java/org/apache/calcite/piglet/package-info.java
----------------------------------------------------------------------
diff --git a/piglet/src/main/java/org/apache/calcite/piglet/package-info.java b/piglet/src/main/java/org/apache/calcite/piglet/package-info.java
new file mode 100644
index 0000000..63d5365
--- /dev/null
+++ b/piglet/src/main/java/org/apache/calcite/piglet/package-info.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** Piglet, a Pig-like language. */
+@PackageMarker
+package org.apache.calcite.piglet;
+
+import org.apache.calcite.avatica.util.PackageMarker;
+
+// End package-info.java

http://git-wip-us.apache.org/repos/asf/incubator-calcite/blob/5cee2a1a/piglet/src/main/javacc/PigletParser.jj
----------------------------------------------------------------------
diff --git a/piglet/src/main/javacc/PigletParser.jj b/piglet/src/main/javacc/PigletParser.jj
new file mode 100644
index 0000000..8ea79ce
--- /dev/null
+++ b/piglet/src/main/javacc/PigletParser.jj
@@ -0,0 +1,1112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+options {
+  STATIC = false;
+  IGNORE_CASE = true;
+  UNICODE_INPUT = true;
+}
+
+PARSER_BEGIN(PigletParser)
+
+package org.apache.calcite.piglet.parser;
+
+import org.apache.calcite.avatica.util.Casing;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.parser.SqlParseException;
+import org.apache.calcite.sql.parser.SqlParserUtil;
+import org.apache.calcite.runtime.CalciteContextException;
+import org.apache.calcite.piglet.Ast.*;
+import org.apache.calcite.util.trace.CalciteTrace;
+import org.apache.calcite.util.Pair;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import static org.apache.calcite.util.Static.RESOURCE;
+
+/**
+ * Parser Piglet, a Pig-like language, generated from PigletParser.jj by JavaCC.
+ */
+public class PigletParser
+{
+  private static final Logger LOGGER = CalciteTrace.getParserTracer();
+
+  public void setTabSize(int tabSize) {
+    jj_input_stream.setTabSize(tabSize);
+  }
+}
+
+PARSER_END(PigletParser)
+
+/* For Debug */
+JAVACODE
+void debug_message1()
+{
+  LOGGER.log(Level.INFO, getToken(0).image + " , " + getToken(1).image);
+}
+
+JAVACODE String unquotedIdentifier() {
+  return SqlParserUtil.strip(getToken(0).image, null, null, null,
+    Casing.UNCHANGED);
+}
+
+String nonReservedKeyWord() :
+{
+  String kw;
+}
+{
+  kw = commonNonReservedKeyWord() {
+    return kw;
+  }
+}
+
+/* Epsilon */
+JAVACODE
+void e() {}
+
+JAVACODE SqlParserPos pos() {
+  return new SqlParserPos(token.beginLine, token.beginColumn,
+    token.endLine, token.endColumn);
+}
+
+JAVACODE SqlParserPos pos2(SqlParserPos p) {
+  return pos().plus(p);
+}
+
+JAVACODE SqlParserPos pos3(Node n) {
+  return pos().plus(n.pos);
+}
+
+/**
+ * Converts a ParseException (local to this particular instantiation
+ * of the parser) into a SqlParseException (common to all parsers).
+ */
+JAVACODE SqlParseException convertException(Throwable ex) {
+  if (ex instanceof SqlParseException) {
+    return (SqlParseException) ex;
+  }
+  SqlParserPos pos = null;
+  int[][] expectedTokenSequences = null;
+  String[] tokenImage = null;
+  if (ex instanceof ParseException) {
+    ParseException pex = (ParseException) ex;
+    expectedTokenSequences = pex.expectedTokenSequences;
+    tokenImage = pex.tokenImage;
+    if (pex.currentToken != null) {
+      final Token token = pex.currentToken.next;
+      pos = new SqlParserPos(token.beginLine, token.beginColumn,
+          token.endLine, token.endColumn);
+    }
+  } else if (ex instanceof TokenMgrError) {
+    TokenMgrError tme = (TokenMgrError) ex;
+    expectedTokenSequences = null;
+    tokenImage = null;
+    // Example:
+    //    Lexical error at line 3, column 24.  Encountered "#" after "a".
+    final java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(
+        "(?s)Lexical error at line ([0-9]+), column ([0-9]+).*");
+    java.util.regex.Matcher matcher = pattern.matcher(ex.getMessage());
+    if (matcher.matches()) {
+      int line = Integer.parseInt(matcher.group(1));
+      int column = Integer.parseInt(matcher.group(2));
+      pos = new SqlParserPos(line, column, line, column);
+    }
+  } else if (ex instanceof CalciteContextException) {
+    // CalciteContextException is the standard wrapper for exceptions
+    // produced by the validator, but in the parser, the standard is
+    // SqlParseException; so, strip it away. In case you were wondering,
+    // the CalciteContextException appears because the parser
+    // occasionally calls into validator-style code such as
+    // SqlSpecialOperator.reduceExpr.
+    CalciteContextException ece =
+        (CalciteContextException) ex;
+    pos = new SqlParserPos(
+        ece.getPosLine(),
+        ece.getPosColumn(),
+        ece.getEndPosLine(),
+        ece.getEndPosColumn());
+    ex = ece.getCause();
+  }
+
+  return new SqlParseException(ex.getMessage(), pos, expectedTokenSequences,
+      tokenImage, ex);
+}
+
+/*****************************************
+ * Syntactical Descriptions              *
+ *****************************************/
+
+/**
+ * Parses a list of statements (LOAD, DUMP, etc.) followed by
+ * the end-of-file symbol.
+ */
+Program stmtListEof() :
+{
+  final List<Stmt> list = Lists.newArrayList();
+  Stmt s;
+}
+{
+  (
+    s = stmt() {
+      list.add(s);
+    }
+  )*
+  <EOF> {
+    SqlParserPos p = SqlParserPos.ZERO;
+    for (Stmt s2 : list) {
+      p = p.plus(s2.pos);
+    }
+    return new Program(p, list);
+  }
+}
+
+Stmt stmt() :
+{
+  final Identifier target;
+  final Stmt s;
+}
+{
+  (
+    target = simpleIdentifier() <EQ>
+    (
+      s = loadStmt(target)
+    |
+      s = valuesStmt(target)
+    |
+      s = distinctStmt(target)
+    |
+      s = limitStmt(target)
+    |
+      s = orderStmt(target)
+    |
+      s = foreachStmt(target)
+    |
+      s = filterStmt(target)
+    |
+      s = groupStmt(target)
+    )
+  |
+    s = describeStmt()
+  |
+    s = dumpStmt()
+  ) {
+    return s;
+  }
+}
+
+Assignment1 nestedStmt() :
+{
+  final Identifier target;
+  final Assignment1 s;
+}
+{
+  target = simpleIdentifier() <EQ>
+  (
+    s = distinctStmt(target)
+  |
+    s = limitStmt(target)
+  |
+    s = orderStmt(target)
+  |
+    s = filterStmt(target)
+  /* or sample */
+  ) {
+    return s;
+  }
+}
+
+LoadStmt loadStmt(final Identifier target) :
+{
+  final Literal name;
+}
+{
+  <LOAD> name = stringLiteral() <SEMICOLON> {
+    return new LoadStmt(pos3(target), target, name);
+  }
+}
+
+ValuesStmt valuesStmt(final Identifier target) :
+{
+  final List<List<Node>> tupleList;
+  final Schema schema;
+}
+{
+  <VALUES> tupleList = tupleListMaybeEmpty()
+  <AS> schema = schema() <SEMICOLON> {
+    return new ValuesStmt(pos3(target), target, schema, tupleList);
+  }
+}
+
+/** Parses a tuple list.
+ * The list may be empty.
+ * Each tuple is a list of at least one expression. */
+List<List<Node>> tupleListMaybeEmpty() :
+{
+  final ImmutableList.Builder<List<Node>> tupleList = ImmutableList.builder();
+  List<Node> tuple;
+}
+{
+  [
+    tuple = tuple() {
+      tupleList.add(tuple);
+    }
+    (
+      <COMMA> tuple = tuple() {
+        tupleList.add(tuple);
+      }
+    )*
+  ] {
+    return tupleList.build();
+  }
+}
+
+DescribeStmt describeStmt() :
+{
+  final SqlParserPos p;
+  final Identifier id;
+}
+{
+  <DESCRIBE> { p = pos(); } id = simpleIdentifier() <SEMICOLON> {
+    return new DescribeStmt(pos2(p), id);
+  }
+}
+
+DumpStmt dumpStmt() :
+{
+  final SqlParserPos p;
+  final Identifier id;
+}
+{
+  <DUMP> { p = pos(); } id = simpleIdentifier() <SEMICOLON> {
+    return new DumpStmt(pos2(p), id);
+  }
+}
+
+Assignment foreachStmt(final Identifier target) :
+{
+  final Identifier id;
+  final List<Node> expList;
+  final List<Stmt> nestedStmtList;
+  final Schema schema = null; // TODO:
+}
+{
+  <FOREACH> id = simpleIdentifier()
+  (
+    LOOKAHEAD(1)
+    <GENERATE> expList = expCommaList() <SEMICOLON> {
+      return new ForeachStmt(pos3(target), target, id, expList, schema);
+    }
+  | <LBRACE> nestedStmtList = nestedStmtList()
+    <GENERATE> expList = expCommaList() <SEMICOLON> <RBRACE> {
+      return new ForeachNestedStmt(pos3(target), target, id, nestedStmtList,
+          expList, schema);
+    }
+  )
+}
+
+List<Stmt> nestedStmtList() :
+{
+  Assignment s;
+  final List<Stmt> list = Lists.newArrayList();
+}
+{
+  s = nestedStmt() {
+    list.add(s);
+  }
+  (
+    s = nestedStmt() {
+      list.add(s);
+    }
+  )* {
+    return list;
+  }
+}
+
+FilterStmt filterStmt(final Identifier target) :
+{
+  final Identifier id;
+  final Node condition;
+}
+{
+  <FILTER> id = simpleIdentifier()
+  <BY> condition = exp() <SEMICOLON> {
+    return new FilterStmt(pos3(target), target, id, condition);
+  }
+}
+
+DistinctStmt distinctStmt(final Identifier target) :
+{
+  final Identifier id;
+}
+{
+  <DISTINCT> id = simpleIdentifier() <SEMICOLON> {
+    return new DistinctStmt(pos3(target), target, id);
+  }
+}
+
+LimitStmt limitStmt(final Identifier target) :
+{
+  final Identifier id;
+  final NumericLiteral count;
+}
+{
+  <LIMIT> id = simpleIdentifier() count = numericLiteral() <SEMICOLON> {
+    return new LimitStmt(pos3(target), target, id, count);
+  }
+}
+
+OrderStmt orderStmt(final Identifier target) :
+{
+  final Identifier id;
+  final List<Pair<Identifier, Direction>> fields;
+}
+{
+  <ORDER> id = simpleIdentifier() <BY> fields = orderFieldCommaList()
+  <SEMICOLON> {
+    return new OrderStmt(pos3(target), target, id, fields);
+  }
+}
+
+List<Pair<Identifier, Direction>> orderFieldCommaList() :
+{
+  final List<Pair<Identifier, Direction>> list = Lists.newArrayList();
+  Pair<Identifier, Direction> field;
+}
+{
+  field = orderField() {
+    list.add(field);
+  }
+  (
+    <COMMA> field = orderField() {
+      list.add(field);
+    }
+  )* {
+    return list;
+  }
+}
+
+Pair<Identifier, Direction> orderField() :
+{
+  final Identifier id;
+  final Direction direction;
+}
+{
+  (
+    <STAR> {
+      id = new SpecialIdentifier(pos());
+    }
+  |
+    id = simpleIdentifier()
+  )
+  (
+    <ASC> {
+      direction = Direction.ASC;
+    }
+  |
+    <DESC> {
+      direction = Direction.DESC;
+    }
+  |
+    {
+      direction = Direction.NOT_SPECIFIED;
+    }
+  ) {
+    return Pair.of(id, direction);
+  }
+}
+
+GroupStmt groupStmt(final Identifier target) :
+{
+  final Identifier id;
+  final List<Node> keys;
+  final Node exp;
+}
+{
+  <GROUP> id = simpleIdentifier()
+  (
+    <ALL> {
+      keys = null;
+    }
+  |
+    <BY>
+    (
+      keys = tuple()
+    |
+      exp = exp() {
+        keys = ImmutableList.of(exp);
+      }
+    )
+  )
+  <SEMICOLON> {
+    return new GroupStmt(pos3(target), target, id, keys);
+  }
+}
+
+/** Parses a schema. */
+Schema schema() :
+{
+  final SqlParserPos pos;
+  final ImmutableList.Builder<FieldSchema> list = ImmutableList.builder();
+  FieldSchema fieldSchema;
+}
+{
+  <LPAREN> {
+    pos = pos();
+  }
+  fieldSchema = fieldSchema() {
+    list.add(fieldSchema);
+  }
+  (
+    <COMMA> fieldSchema = fieldSchema() {
+      list.add(fieldSchema);
+    }
+  )*
+  <RPAREN> {
+    return new Schema(pos2(pos), list.build());
+  }
+}
+
+FieldSchema fieldSchema() :
+{
+  final Identifier identifier;
+  final Type type;
+}
+{
+  identifier = simpleIdentifier() <COLON> type = type() {
+    return new FieldSchema(pos3(identifier), identifier, type);
+  }
+}
+
+/** Parses a Type. */
+Type type() :
+{
+  String s;
+}
+{
+  s = identifier() {
+    return new Type(pos(), s);
+  }
+}
+
+/** Parses an expression. */
+Node exp() :
+{
+  final Node e;
+}
+{
+  e = exp1() {
+    return e;
+  }
+}
+
+/** Parses an expression of precedence 1: OR. */
+Node exp1() :
+{
+  Node e;
+  Node f;
+}
+{
+  e = exp2()
+  (
+    <OR> f = exp2() {
+      e = new Call(pos3(e), Op.OR, e, f);
+    }
+  )* {
+    return e;
+  }
+}
+
+/** Parses an expression of precedence 2: AND. */
+Node exp2() :
+{
+  Node e;
+  Node f;
+}
+{
+  e = exp3()
+  (
+    <AND> f = exp3() {
+      e = new Call(pos3(e), Op.AND, e, f);
+    }
+  )* {
+    return e;
+  }
+}
+
+/** Parses an expression of precedence 3: NOT. */
+Node exp3() :
+{
+  final Node e;
+  final SqlParserPos p;
+}
+{
+  (
+    <NOT> {
+      p = pos();
+    }
+    e = exp3() {
+      return new Call(pos2(p), Op.NOT, e);
+    }
+  |
+    e = exp4() {
+      return e;
+    }
+  )
+}
+
+/** Parses an expression of precedence 4: relational operators (==, !=, <, <=,
+ * >, >=). */
+Node exp4() :
+{
+  Node e;
+  Node f;
+}
+{
+  e = exp5()
+  (
+    <EQEQ> f = exp5() {
+      e = new Call(pos3(e), Op.EQ, e, f);
+    }
+  |
+    <GT> f = exp5() {
+      e = new Call(pos3(e), Op.GT, e, f);
+    }
+  |
+    <LT> f = exp5() {
+      e = new Call(pos3(e), Op.LT, e, f);
+    }
+  |
+    <GE> f = exp5() {
+      e = new Call(pos3(e), Op.GTE, e, f);
+    }
+  |
+    <LE> f = exp5() {
+      e = new Call(pos3(e), Op.LTE, e, f);
+    }
+  )* {
+    return e;
+  }
+}
+
+/** Parses an expression of precedence 5: +, -. */
+Node exp5() :
+{
+  Node e;
+  Node f;
+}
+{
+  e = exp10()
+  (
+    <PLUS> f = exp10() {
+      e = new Call(pos3(e), Op.PLUS, e, f);
+    }
+  |
+    <MINUS> f = exp10() {
+      e = new Call(pos3(e), Op.PLUS, e, f);
+    }
+  )* {
+    return e;
+  }
+}
+
+/** Parses an expression of precedence 10: . */
+Node exp10() :
+{
+  Node e;
+  Node f;
+}
+{
+  e = atom()
+  (
+    <DOT>
+    f = atom() {
+      e = new Call(pos3(e), Op.DOT, e, f);
+    }
+  )* {
+    return e;
+  }
+}
+
+/** Parses an atomic expression, effectively an expression of infinite
+  * precedence. */
+Node atom() :
+{
+  final Node e;
+}
+{
+  (
+    e = literal()
+  |
+    e = simpleIdentifier()
+  |
+    <LPAREN> e = exp() <RPAREN>
+  ) {
+    return e;
+  }
+}
+
+/** A non-empty list of expressions. */
+List<Node> expCommaList() :
+{
+  final List<Node> list = Lists.newArrayList();
+  Node e;
+}
+{
+  e = exp() {
+    list.add(e);
+  }
+  (
+    <COMMA> e = exp() {
+      list.add(e);
+    }
+  )* {
+    return list;
+  }
+}
+
+/** Parses a tuple. A tuple is a list of one or more expressions surrounded by
+ * parentheses. */
+List<Node> tuple() :
+{
+  final List<Node> expList;
+}
+{
+  <LPAREN> expList = expCommaList() <RPAREN> {
+    return expList;
+  }
+}
+
+/** Parses a literal expression. */
+Literal literal() :
+{
+  Literal e;
+}
+{
+  (
+    e = numericLiteral()
+  |
+    e = stringLiteral()
+  ) {
+    return e;
+  }
+}
+
+/** Parses a unsigned numeric literal */
+NumericLiteral unsignedNumericLiteral() :
+{
+}
+{
+  (
+    <UNSIGNED_INTEGER_LITERAL> {
+      return Literal.createExactNumeric(token.image, pos());
+    }
+  |
+    <FLOAT_LITERAL> {
+      assert token.image.endsWith("F");
+      final String x = token.image.substring(0, token.image.length() - 1);
+      return Literal.createExactNumeric(x, pos());
+    }
+  )
+}
+
+/** Parses a numeric literal (can be signed) */
+NumericLiteral numericLiteral() :
+{
+  NumericLiteral num;
+  SqlParserPos p;
+}
+{
+  <PLUS> num = unsignedNumericLiteral() {
+    return num;
+  }
+|
+  <MINUS> {
+    p = pos();
+  }
+  num = unsignedNumericLiteral() {
+    return num.negate(pos2(p));
+  }
+|
+  num = unsignedNumericLiteral() {
+    return num;
+  }
+}
+
+/** Parses a string literal. */
+Literal stringLiteral() :
+{
+  String s;
+}
+{
+  <QUOTED_STRING> {
+    s = SqlParserUtil.parseString(token.image);
+    return new Literal(pos(), s);
+  }
+}
+
+/** Parses a simple identifier as a string. */
+String identifier() :
+{
+  String id;
+}
+{
+  (
+    <IDENTIFIER> {
+      id = unquotedIdentifier();
+    }
+  | id = nonReservedKeyWord()
+  ) {
+    return id;
+  }
+}
+
+/**
+ * Parses a simple identifier as an Identifier.
+ */
+Identifier simpleIdentifier() :
+{
+  String s;
+}
+{
+  s = identifier() {
+    return new Identifier(pos(), s);
+  }
+}
+
+/* KEYWORDS:  anything in this list is a reserved word unless it appears
+   in the nonReservedKeyWord() production. */
+
+<DEFAULT, DQID, BTID> TOKEN :
+{
+  < ALL: "ALL" >
+| < AND: "AND" >
+| < ASC: "ASC" >
+| < AS: "AS" >
+| < BY: "BY" >
+| < DESC: "DESC" >
+| < DESCRIBE: "DESCRIBE" >
+| < DISTINCT: "DISTINCT" >
+| < DUMP: "DUMP" >
+| < FILTER: "FILTER" >
+| < FOREACH: "FOREACH" >
+| < GENERATE: "GENERATE" >
+| < GROUP: "GROUP" >
+| < LOAD: "LOAD" >
+| < LIMIT: "LIMIT" >
+| < NOT: "NOT" >
+| < ORDER: "ORDER" >
+| < OR: "OR" >
+| < VALUES: "VALUES" >
+}
+
+/** Parses a non-reserved keyword for use as an identifier. */
+String commonNonReservedKeyWord() :
+{
+}
+{
+  (
+    <ALL>
+  | <AND>
+  | <ASC>
+  | <BY>
+  | <DESC>
+  | <DESCRIBE>
+  | <DISTINCT>
+  | <DUMP>
+  | <FOREACH>
+  | <GENERATE>
+  | <GROUP>
+  | <LIMIT>
+  | <LOAD>
+  | <NOT>
+  | <OR>
+  | <ORDER>
+  | <VALUES>
+  ) {
+    return unquotedIdentifier();
+  }
+}
+
+/* LITERALS */
+
+<DEFAULT, DQID, BTID> TOKEN :
+{
+    < UNSIGNED_INTEGER_LITERAL: (["0"-"9"])+ >
+    |
+    < FLOAT_LITERAL: (["0"-"9"])+ "." (["0"-"9"])+ "F" >
+    |
+    < APPROX_NUMERIC_LITERAL:
+    (<UNSIGNED_INTEGER_LITERAL> | <DECIMAL_NUMERIC_LITERAL>) <EXPONENT> >
+    |
+    < DECIMAL_NUMERIC_LITERAL:
+    (["0"-"9"])+(".")?(["0"-"9"])*
+    | "."(["0"-"9"])+
+    >
+    |
+    < #EXPONENT: ["e","E"] (["+","-"])? (["0"-"9"])+ >
+    |
+    < #HEXDIGIT: ["0"-"9","a"-"f","A"-"F"] >
+    |
+    < #WHITESPACE:
+    [ " ","\t","\n","\r","\f" ]
+    >
+    |
+    /* To improve error reporting, we allow all kinds of characters,
+     * not just hexits, in a binary string literal. */
+    < BINARY_STRING_LITERAL: ["x","X"] <QUOTE> ( (~["'"]) | ("''"))* <QUOTE> >
+    |
+    < QUOTED_STRING: <QUOTE> ( (~["'"]) | ("''"))* <QUOTE> >
+    |
+    < PREFIXED_STRING_LITERAL: ("_" <CHARSETNAME> | "N") <QUOTED_STRING> >
+    |
+    < UNICODE_STRING_LITERAL: "U" "&" <QUOTED_STRING> >
+    |
+    < #CHARSETNAME: (["a"-"z","A"-"Z","0"-"9"])
+    (["a"-"z","A"-"Z","0"-"9",":",".","-","_"])*
+    >
+}
+
+<DEFAULT, DQID, BTID> TOKEN :
+{
+    < UNICODE_QUOTED_ESCAPE_CHAR:
+    <QUOTE>
+    (~["0"-"9","a"-"f","A"-"F","+","\""," ","\t","\n","\r","\f"])
+    <QUOTE>
+    >
+}
+
+/* SEPARATORS */
+
+<DEFAULT, DQID, BTID> TOKEN :
+{
+    < LPAREN: "(">
+    | < RPAREN: ")">
+    | < LBRACE_D: "{" (" ")* ["d","D"] >
+    | < LBRACE_T: "{" (" ")* ["t","T"] >
+    | < LBRACE_TS: "{" (" ")* ["t","T"] ["s","S"] >
+    | < LBRACE_FN: "{" (" ")* ["f","F"] ["n","N"] >
+    | < LBRACE: "{" >
+    | < RBRACE: "}" >
+    | < LBRACKET: "[" >
+    | < RBRACKET: "]" >
+    | < SEMICOLON: ";" >
+    | < DOT: "." >
+    | < COMMA: "," >
+}
+
+/* OPERATORS */
+
+<DEFAULT, DQID, BTID> TOKEN :
+{
+    < EQ: "=" >
+    | < EQEQ: "==" >
+    | < GT: ">" >
+    | < LT: "<" >
+    | < HOOK: "?" >
+    | < COLON: ":" >
+    | < LE: "<=" >
+    | < GE: ">=" >
+    | < NE: "!=" >
+    | < PLUS: "+" >
+    | < MINUS: "-" >
+    | < STAR: "*" >
+    | < SLASH: "/" >
+    | < CONCAT: "||" >
+    | < DOUBLE_PERIOD: ".." >
+    | < QUOTE: "'" >
+    | < DOUBLE_QUOTE: "\"" >
+}
+
+
+/*****************************************
+ * Lexical Descriptions                  *
+ *****************************************/
+
+TOKEN_MGR_DECLS : {
+    List<Integer> lexicalStateStack = Lists.newArrayList();
+
+    void pushState() {
+      lexicalStateStack.add(curLexState);
+    }
+
+    void popState() {
+      SwitchTo(lexicalStateStack.remove(lexicalStateStack.size() - 1));
+    }
+}
+
+/*
+Lexical states:
+
+DEFAULT: Identifiers are quoted in brackets, e.g. [My Identifier]
+DQID:    Identifiers are double-quoted, e.g. "My Identifier"
+BTID:    Identifiers are enclosed in back-ticks, e.g. `My Identifier`
+IN_SINGLE_LINE_COMMENT:
+IN_FORMAL_COMMENT:
+IN_MULTI_LINE_COMMENT:
+
+DEFAULT, DQID, BTID are the 3 'normal states'. Behavior is identical except
+for how quoted identifiers are recognized.
+
+After a comment has completed, the lexer returns to the previous state, one
+of the 'normal states'.
+*/
+
+/* WHITE SPACE */
+
+<DEFAULT, DQID, BTID> SKIP :
+{
+    " "
+    | "\t"
+    | "\n"
+    | "\r"
+    | "\f"
+}
+
+/* COMMENTS */
+
+<DEFAULT, DQID, BTID> MORE :
+{
+    <"/**" ~["/"]> { pushState(); } : IN_FORMAL_COMMENT
+}
+
+<DEFAULT, DQID, BTID> MORE :
+{
+    "//" { pushState(); } : IN_SINGLE_LINE_COMMENT
+    |
+    "--" { pushState(); } : IN_SINGLE_LINE_COMMENT
+    |
+    "/*" { pushState(); } : IN_MULTI_LINE_COMMENT
+}
+
+<IN_SINGLE_LINE_COMMENT>
+SPECIAL_TOKEN :
+{
+    <SINGLE_LINE_COMMENT: "\n" | "\r" | "\r\n" > { popState(); }
+}
+
+<IN_FORMAL_COMMENT>
+SPECIAL_TOKEN :
+{
+    <FORMAL_COMMENT: "*/" > { popState(); }
+}
+
+<IN_MULTI_LINE_COMMENT>
+SPECIAL_TOKEN :
+{
+    <MULTI_LINE_COMMENT: "*/" > { popState(); }
+}
+
+<IN_SINGLE_LINE_COMMENT,IN_FORMAL_COMMENT,IN_MULTI_LINE_COMMENT>
+MORE :
+{
+    < ~[] >
+}
+
+
+/* IDENTIFIERS */
+
+<DEFAULT> TOKEN :
+{
+    < BRACKET_QUOTED_IDENTIFIER:
+    "["
+    (   (~["]","\n","\r"])
+        | ("]]")
+        )+
+    "]"
+    >
+}
+
+<DQID> TOKEN :
+{
+    < QUOTED_IDENTIFIER:
+    "\""
+    (   (~["\"","\n","\r"])
+        | ("\"\"")
+        )+
+    "\""
+    >
+}
+
+<BTID>  TOKEN :
+{
+    < BACK_QUOTED_IDENTIFIER:
+    "`"
+    (   (~["`","\n","\r"])
+        | ("``")
+        )+
+    "`"
+    >
+}
+
+<DEFAULT, DQID, BTID> TOKEN :
+{
+    < COLLATION_ID:
+    (<LETTER>|<DIGIT>)+ (<LETTER>|<DIGIT>|":"|"."|"-"|"_")*
+    "$"
+    (<LETTER>|"_")+
+    ("$" (<LETTER>|<DIGIT>|"_")+)?
+    >
+    |
+    < IDENTIFIER: <LETTER> (<LETTER>|<DIGIT>)* >
+    |
+    < UNICODE_QUOTED_IDENTIFIER: "U" "&" <QUOTED_IDENTIFIER> >
+    |
+    < #LETTER:
+    [
+        "\u0024",
+        "\u0041"-"\u005a",
+        "\u005f",
+        "\u0061"-"\u007a",
+        "\u00c0"-"\u00d6",
+        "\u00d8"-"\u00f6",
+        "\u00f8"-"\u00ff",
+        "\u0100"-"\u1fff",
+        "\u3040"-"\u318f",
+        "\u3300"-"\u337f",
+        "\u3400"-"\u3d2d",
+        "\u4e00"-"\u9fff",
+        "\uf900"-"\ufaff"
+    ]
+    >
+    |
+    < #DIGIT:
+    [
+        "\u0030"-"\u0039",
+        "\u0660"-"\u0669",
+        "\u06f0"-"\u06f9",
+        "\u0966"-"\u096f",
+        "\u09e6"-"\u09ef",
+        "\u0a66"-"\u0a6f",
+        "\u0ae6"-"\u0aef",
+        "\u0b66"-"\u0b6f",
+        "\u0be7"-"\u0bef",
+        "\u0c66"-"\u0c6f",
+        "\u0ce6"-"\u0cef",
+        "\u0d66"-"\u0d6f",
+        "\u0e50"-"\u0e59",
+        "\u0ed0"-"\u0ed9",
+        "\u1040"-"\u1049"
+    ]
+    >
+}
+
+// End PigletParser.jj

http://git-wip-us.apache.org/repos/asf/incubator-calcite/blob/5cee2a1a/piglet/src/test/java/org/apache/calcite/test/Fluent.java
----------------------------------------------------------------------
diff --git a/piglet/src/test/java/org/apache/calcite/test/Fluent.java b/piglet/src/test/java/org/apache/calcite/test/Fluent.java
new file mode 100644
index 0000000..6cfa410
--- /dev/null
+++ b/piglet/src/test/java/org/apache/calcite/test/Fluent.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.test;
+
+import org.apache.calcite.piglet.Ast;
+import org.apache.calcite.piglet.Handler;
+import org.apache.calcite.piglet.parser.ParseException;
+import org.apache.calcite.piglet.parser.PigletParser;
+import org.apache.calcite.plan.RelOptUtil;
+import org.apache.calcite.tools.PigRelBuilder;
+
+import com.google.common.base.Function;
+import com.google.common.collect.Lists;
+
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+
+/** Fluent API to perform Piglet test actions. */
+class Fluent {
+  private final String pig;
+
+  public Fluent(String pig) {
+    this.pig = pig;
+  }
+
+  private Ast.Program parseProgram(String s) throws ParseException {
+    return new PigletParser(new StringReader(s)).stmtListEof();
+  }
+
+  public Fluent explainContains(String expected) throws ParseException {
+    final Ast.Program program = parseProgram(pig);
+    final PigRelBuilder builder =
+        PigRelBuilder.create(PigRelBuilderTest.config().build());
+    new Handler(builder).handle(program);
+    assertThat(RelOptUtil.toString(builder.peek()), is(expected));
+    return this;
+  }
+
+  public Fluent returns(final String out)
+      throws ParseException {
+    return returns(
+        new Function<String, Void>() {
+          public Void apply(String s) {
+            assertThat(s, is(out));
+            return null;
+          }
+        });
+  }
+
+  public Fluent returnsUnordered(String... lines) throws ParseException {
+    final List<String> expectedLines = Lists.newArrayList(lines);
+    Collections.sort(expectedLines);
+    return returns(
+        new Function<String, Void>() {
+          public Void apply(String s) {
+            final List<String> actualLines = new ArrayList<>();
+            for (;;) {
+              int i = s.indexOf('\n');
+              if (i < 0) {
+                if (!s.isEmpty()) {
+                  actualLines.add(s);
+                }
+                break;
+              } else {
+                actualLines.add(s.substring(0, i));
+                s = s.substring(i + 1);
+              }
+            }
+            Collections.sort(actualLines);
+            assertThat(actualLines, is(expectedLines));
+            return null;
+          }
+        });
+  }
+
+  public Fluent returns(Function<String, Void> checker) throws ParseException {
+    final Ast.Program program = parseProgram(pig);
+    final PigRelBuilder builder =
+        PigRelBuilder.create(PigRelBuilderTest.config().build());
+    final StringWriter sw = new StringWriter();
+    new CalciteHandler(builder, sw).handle(program);
+    checker.apply(sw.toString());
+    return this;
+  }
+
+  public Fluent parseContains(String expected) throws ParseException {
+    final Ast.Program program = parseProgram(pig);
+    assertThat(Ast.toString(program), is(expected));
+    return this;
+  }
+}
+
+// End Fluent.java

http://git-wip-us.apache.org/repos/asf/incubator-calcite/blob/5cee2a1a/piglet/src/test/java/org/apache/calcite/test/PigletTest.java
----------------------------------------------------------------------
diff --git a/piglet/src/test/java/org/apache/calcite/test/PigletTest.java b/piglet/src/test/java/org/apache/calcite/test/PigletTest.java
new file mode 100644
index 0000000..b8d204b
--- /dev/null
+++ b/piglet/src/test/java/org/apache/calcite/test/PigletTest.java
@@ -0,0 +1,289 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.test;
+
+import org.apache.calcite.piglet.parser.ParseException;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+/** Unit tests for Piglet. */
+public class PigletTest {
+  private static Fluent pig(String pig) {
+    return new Fluent(pig);
+  }
+
+  @Test public void testParseLoad() throws ParseException {
+    final String s = "A = LOAD 'Emp';";
+    final String expected = "{op: PROGRAM, stmts: [\n"
+        + "  {op: LOAD, target: A, name: Emp}]}";
+    pig(s).parseContains(expected);
+  }
+
+  /** Tests parsing and un-parsing all kinds of operators. */
+  @Test public void testParse2() throws ParseException {
+    final String s = "A = LOAD 'Emp';\n"
+        + "DESCRIBE A;\n"
+        + "DUMP A;\n"
+        + "B = FOREACH A GENERATE 1, name;\n"
+        + "B1 = FOREACH A {\n"
+        + "  X = DISTINCT A;\n"
+        + "  Y = FILTER X BY foo;\n"
+        + "  Z = LIMIT Z 3;\n"
+        + "  GENERATE 1, name;\n"
+        + "}\n"
+        + "C = FILTER B BY name;\n"
+        + "D = DISTINCT C;\n"
+        + "E = ORDER D BY $1 DESC, $2 ASC, $3;\n"
+        + "F = ORDER E BY * DESC;\n"
+        + "G = LIMIT F -10;\n"
+        + "H = GROUP G ALL;\n"
+        + "I = GROUP H BY e;\n"
+        + "J = GROUP I BY (e1, e2);\n";
+    final String expected = "{op: PROGRAM, stmts: [\n"
+        + "  {op: LOAD, target: A, name: Emp},\n"
+        + "  {op: DESCRIBE, relation: A},\n"
+        + "  {op: DUMP, relation: A},\n"
+        + "  {op: FOREACH, target: B, source: A, expList: [\n"
+        + "    1,\n"
+        + "    name]},\n"
+        + "  {op: FOREACH, target: B1, source: A, nestedOps: [\n"
+        + "    {op: DISTINCT, target: X, source: A},\n"
+        + "    {op: FILTER, target: Y, source: X, condition: foo},\n"
+        + "    {op: LIMIT, target: Z, source: Z, count: 3}], expList: [\n"
+        + "    1,\n"
+        + "    name]},\n"
+        + "  {op: FILTER, target: C, source: B, condition: name},\n"
+        + "  {op: DISTINCT, target: D, source: C},\n"
+        + "  {op: ORDER, target: E, source: D},\n"
+        + "  {op: ORDER, target: F, source: E},\n"
+        + "  {op: LIMIT, target: G, source: F, count: -10},\n"
+        + "  {op: GROUP, target: H, source: G},\n"
+        + "  {op: GROUP, target: I, source: H, keys: [\n"
+        + "    e]},\n"
+        + "  {op: GROUP, target: J, source: I, keys: [\n"
+        + "    e1,\n"
+        + "    e2]}]}";
+    pig(s).parseContains(expected);
+  }
+
+  @Test public void testScan() throws ParseException {
+    final String s = "A = LOAD 'EMP';";
+    final String expected = "LogicalTableScan(table=[[scott, EMP]])\n";
+    pig(s).explainContains(expected);
+  }
+
+  @Test public void testDump() throws ParseException {
+    final String s = "A = LOAD 'DEPT';\n"
+        + "DUMP A;";
+    final String expected = "LogicalTableScan(table=[[scott, DEPT]])\n";
+    final String out = "(10,ACCOUNTING,NEW YORK)\n"
+        + "(20,RESEARCH,DALLAS)\n"
+        + "(30,SALES,CHICAGO)\n"
+        + "(40,OPERATIONS,BOSTON)\n";
+    pig(s).explainContains(expected).returns(out);
+  }
+
+  /** VALUES is an extension to Pig. You can achieve the same effect in standard
+   * Pig by creating a text file. */
+  @Test public void testDumpValues() throws ParseException {
+    final String s = "A = VALUES (1, 'a'), (2, 'b') AS (x: int, y: string);\n"
+        + "DUMP A;";
+    final String expected =
+        "LogicalValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n";
+    final String out = "(1,a)\n(2,b)\n";
+    pig(s).explainContains(expected).returns(out);
+  }
+
+  @Test public void testForeach() throws ParseException {
+    final String s = "A = LOAD 'DEPT';\n"
+        + "B = FOREACH A GENERATE DNAME, $2;";
+    final String expected = "LogicalProject(DNAME=[$1], LOC=[$2])\n"
+        + "  LogicalTableScan(table=[[scott, DEPT]])\n";
+    pig(s).explainContains(expected);
+  }
+
+  @Ignore // foreach nested not implemented yet
+  @Test public void testForeachNested() throws ParseException {
+    final String s = "A = LOAD 'EMP';\n"
+        + "B = GROUP A BY DEPTNO;\n"
+        + "C = FOREACH B {\n"
+        + "  D = ORDER A BY SAL DESC;\n"
+        + "  E = LIMIT D 3;\n"
+        + "  GENERATE E.DEPTNO, E.EMPNO;\n"
+        + "}";
+    final String expected = "LogicalProject(DNAME=[$1], LOC=[$2])\n"
+        + "  LogicalTableScan(table=[[scott, DEPT]])\n";
+    pig(s).explainContains(expected);
+  }
+
+  @Test public void testGroup() throws ParseException {
+    final String s = "A = LOAD 'EMP';\n"
+        + "B = GROUP A BY DEPTNO;";
+    final String expected = ""
+        + "LogicalAggregate(group=[{7}], A=[COLLECT($0, $1, $2, $3, $4, $5, $6, $7)])\n"
+        + "  LogicalTableScan(table=[[scott, EMP]])\n";
+    pig(s).explainContains(expected);
+  }
+
+  @Ignore("COLLECT not implemented")
+  @Test public void testGroupExample() throws ParseException {
+    final String pre = "A = VALUES ('John',18,4.0F),\n"
+        + "('Mary',19,3.8F),\n"
+        + "('Bill',20,3.9F),\n"
+        + "('Joe',18,3.8F) AS (name:chararray,age:int,gpa:float);\n";
+    final String b = pre
+        + "B = GROUP A BY age;\n"
+        + "DUMP B;\n";
+    pig(b).returnsUnordered(
+        "(18,{(John,18,4.0F),(Joe,18,3.8F)})",
+        "(19,{(Mary,19,3.8F)})",
+        "(20,{(Bill,20,3.9F)})");
+  }
+
+  @Test public void testDistinctExample() throws ParseException {
+    final String pre = "A = VALUES (8,3,4),\n"
+        + "(1,2,3),\n"
+        + "(4,3,3),\n"
+        + "(4,3,3),\n"
+        + "(1,2,3) AS (a1:int,a2:int,a3:int);\n";
+    final String x = pre
+        + "X = DISTINCT A;\n"
+        + "DUMP X;\n";
+    pig(x).returnsUnordered("(1,2,3)",
+        "(4,3,3)",
+        "(8,3,4)");
+  }
+
+  @Test public void testFilter() throws ParseException {
+    final String s = "A = LOAD 'DEPT';\n"
+        + "B = FILTER A BY DEPTNO;";
+    final String expected = "LogicalFilter(condition=[$0])\n"
+        + "  LogicalTableScan(table=[[scott, DEPT]])\n";
+    pig(s).explainContains(expected);
+  }
+
+  @Test public void testFilterExample() throws ParseException {
+    final String pre = "A = VALUES (1,2,3),\n"
+        + "(4,2,1),\n"
+        + "(8,3,4),\n"
+        + "(4,3,3),\n"
+        + "(7,2,5),\n"
+        + "(8,4,3) AS (f1:int,f2:int,f3:int);\n";
+
+    final String x = pre
+        + "X = FILTER A BY f3 == 3;\n"
+        + "DUMP X;\n";
+    final String expected = "(1,2,3)\n"
+        + "(4,3,3)\n"
+        + "(8,4,3)\n";
+    pig(x).returns(expected);
+
+    final String x2 = pre
+        + "X2 = FILTER A BY (f1 == 8) OR (NOT (f2+f3 > f1));\n"
+        + "DUMP X2;\n";
+    final String expected2 = "(4,2,1)\n"
+        + "(8,3,4)\n"
+        + "(7,2,5)\n"
+        + "(8,4,3)\n";
+    pig(x2).returns(expected2);
+  }
+
+  @Test public void testLimit() throws ParseException {
+    final String s = "A = LOAD 'DEPT';\n"
+        + "B = LIMIT A 3;";
+    final String expected = "LogicalSort(fetch=[3])\n"
+        + "  LogicalTableScan(table=[[scott, DEPT]])\n";
+    pig(s).explainContains(expected);
+  }
+
+  @Test public void testLimitExample() throws ParseException {
+    final String pre = "A = VALUES (1,2,3),\n"
+        + "(4,2,1),\n"
+        + "(8,3,4),\n"
+        + "(4,3,3),\n"
+        + "(7,2,5),\n"
+        + "(8,4,3) AS (f1:int,f2:int,f3:int);\n";
+
+    final String x = pre
+        + "X = LIMIT A 3;\n"
+        + "DUMP X;\n";
+    final String expected = "(1,2,3)\n"
+        + "(4,2,1)\n"
+        + "(8,3,4)\n";
+    pig(x).returns(expected);
+
+    final String x2 = pre
+        + "B = ORDER A BY f1 DESC, f2 ASC;\n"
+        + "X2 = LIMIT B 3;\n"
+        + "DUMP X2;\n";
+    final String expected2 = "(8,3,4)\n"
+        + "(8,4,3)\n"
+        + "(7,2,5)\n";
+    pig(x2).returns(expected2);
+  }
+
+  @Test public void testOrder() throws ParseException {
+    final String s = "A = LOAD 'DEPT';\n"
+        + "B = ORDER A BY DEPTNO DESC, DNAME;";
+    final String expected = ""
+        + "LogicalSort(sort0=[$0], sort1=[$1], dir0=[DESC], dir1=[ASC])\n"
+        + "  LogicalTableScan(table=[[scott, DEPT]])\n";
+    pig(s).explainContains(expected);
+  }
+
+  @Test public void testOrderStar() throws ParseException {
+    final String s = "A = LOAD 'DEPT';\n"
+        + "B = ORDER A BY * DESC;";
+    final String expected = ""
+        + "LogicalSort(sort0=[$0], sort1=[$1], sort2=[$2], dir0=[DESC], dir1=[DESC], dir2=[DESC])\n"
+        + "  LogicalTableScan(table=[[scott, DEPT]])\n";
+    pig(s).explainContains(expected);
+  }
+
+  @Test public void testOrderExample() throws ParseException {
+    final String pre = "A = VALUES (1,2,3),\n"
+        + "(4,2,1),\n"
+        + "(8,3,4),\n"
+        + "(4,3,3),\n"
+        + "(7,2,5),\n"
+        + "(8,4,3) AS (a1:int,a2:int,a3:int);\n";
+
+    final String x = pre
+        + "X = ORDER A BY a3 DESC;\n"
+        + "DUMP X;\n";
+    final String expected = "(7,2,5)\n"
+        + "(8,3,4)\n"
+        + "(1,2,3)\n"
+        + "(4,3,3)\n"
+        + "(8,4,3)\n"
+        + "(4,2,1)\n";
+    pig(x).returns(expected);
+  }
+
+  /** VALUES is an extension to Pig. You can achieve the same effect in standard
+   * Pig by creating a text file. */
+  @Test public void testValues() throws ParseException {
+    final String s = "A = VALUES (1, 'a'), (2, 'b') AS (x: int, y: string);\n"
+        + "DUMP A;";
+    final String expected =
+        "LogicalValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n";
+    pig(s).explainContains(expected);
+  }
+}
+
+// End PigletTest.java

http://git-wip-us.apache.org/repos/asf/incubator-calcite/blob/5cee2a1a/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 4830e62..23567f9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -71,6 +71,7 @@ limitations under the License.
     <module>example</module>
     <module>linq4j</module>
     <module>mongodb</module>
+    <module>piglet</module>
     <module>plus</module>
     <module>spark</module>
     <module>splunk</module>
@@ -563,7 +564,7 @@ limitations under the License.
           <links>
             <link>http://docs.oracle.com/javase/8/docs/api/</link>
           </links>
-          <excludePackageNames>org.apache.calcite.sql.parser.impl</excludePackageNames>
+          <excludePackageNames>org.apache.calcite.sql.parser.impl,org.apache.calcite.piglet.parser</excludePackageNames>
           <tags>
             <tag>
               <name>sql.92</name>