You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@drill.apache.org by ja...@apache.org on 2013/04/14 04:35:05 UTC

[1/9] Add flatten and join test executions. Abstract graph classes. Update storage engine definition to be a map. Move plan properties to use enum for plan type. Remove unused tests/resources. Update sql parser for change in storage engine definitio

Updated Branches:
  refs/heads/execwork [created] b53933f22


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/test/resources/logical_plan1.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/resources/logical_plan1.json b/sandbox/prototype/common/src/test/resources/logical_plan1.json
deleted file mode 100644
index 2d1de3d..0000000
--- a/sandbox/prototype/common/src/test/resources/logical_plan1.json
+++ /dev/null
@@ -1,139 +0,0 @@
-{
-	head: {
-	    type: "apache_drill_logical_plan",
-		version: "1",
-		generator: {
-		  type: "manual",
-		  info: "na"
-		}
-		
-	},
-
-
-	sources: [
-	  {
-  		type: "text",
-  		name: "local-logs",
-	    files: ["local://logs/*.log"],
-	    compress: "gzip",
-	    line-delimiter: "\n",
-	    record-maker: {type: "first-row", delimiter: ","}
-	  },
-	  {
-	    type: "mongo",
-	  	name: "users",
-	    connection: "mongodb://blue:red@localhost/users"
-	  },
-	  {
-	    type: "mysql",
-	    name: "mysql",
-	    connection: "jdbc:mysql://localhost/main"
-	  }
-	  
-	],
-
-	
-	query: [
-
-	  {
-		  @id: "log",
-		  op: "sequence",
-		  do: [
-		 	  {
-				  op: "scan",
-				  source: "local-logs",
-				  name: "activity"
-			  },
-			  { 
-				  op: "transform",
-				  transforms: [
-				    {
-				    	name: "userId", 
-				    	expr: "regex('activity.cookie', \"persistent=([^;]*)\")"
-				    },
-				        
-				    {
-				    	name: "session", 
-				    	expr: "regex('activity.cookie', \"persistent=([^;]*)\")"
-				    }
-				  ]
-			  },
-			  {
-					op: "group",
-				    exprs: [ "sessionId", "session" ]
-			  },
-			  {
-			    op: "order",
-			    orders: [
-			      { direction: "desc", expr: "timestamp" }
-			    ]
-			  },
-			  { 
-			    op: "transform",
-			    transforms: [{name: "productId", expr: "session" }]
-			  },
-			  { 
-			    op: "filter",
-			    expr: "isNull(productId) && startsWith(path, \"/cart/add\")"
-			  },
-			  { op: "aggregate",
-				
-		      }
-			  { 
-			    op: "group",
-			    exprs: ["productId"]
-			  },
-			  { 
-			    op: "combine",
-			    name: "productId"
-		      }, 
-			  { 
-			    op: "nest",
-			    name: "sessions",
-			    exprs: ["productId"]
-			  },		     
-			  { 
-			    op: "nest",
-			    name: "user"
-			  },
-		  ]
-	  },
- 	  {
-		  @id: "users",
-		  op: "scan",
-		  source: "mongo",
-		  table: "users",
-		  name: "users"
- 	  },
-		  { 
- 		    @id: "transactions",
-		    op: "scan",
-		    source: "mysql",
-		    name: "transactions",
-		    table: "purchases"
-		  },	
-	 
-	  {
-	    @id: "usersAndTransactions",
-	    op: "join",
-	    left: "transactions",
-	    right: "users",
-	    conditions: [{left-expr: "userId", right-expr: "id", relationship: "equal"}]
-	  },
-	  { 
-		@id: "allData"
-	    op: "join",
-	    left: "usersAndTransactions",
-	    right: "log",
-	    conditions: [{left-expr: "userId", right-expr: "id", relationship: "equal"}]
-	  },
-	  {
-	    op: "project",
-	    input: "7",
-	    exprs: []
-	  }
-	]
-	
-	
-	
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/test/resources/simple_plan.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/resources/simple_plan.json b/sandbox/prototype/common/src/test/resources/simple_plan.json
deleted file mode 100644
index 070290d..0000000
--- a/sandbox/prototype/common/src/test/resources/simple_plan.json
+++ /dev/null
@@ -1,134 +0,0 @@
-{
-  head:{
-    type:"apache_drill_logical_plan",
-    version:"1",
-    generator:{
-      type:"manual",
-      info:"na"
-    }
-  },
-  storage:[
-    {
-      type:"text",
-      name:"logs",
-	  file: "local://logs/*.log",
-	  compress:"gzip",
-	  line-delimiter:"\n",
-	  record-maker:{
-	    type:"first-row",
-	    delimiter:","
-      }
-    },
-    {
-      type:"mongo",
-      name:"users",
-      connection:"mongodb://blue:red@localhost/users"
-    },
-    {
-      type:"mysql",
-      name:"mysql",
-      connection:"jdbc:mysql://localhost/main"
-    }
-  ],
-  query:[
-    {
-      @id:"1",
-      op:"scan",
-      memo:"initial_scan",
-      storageengine:"local-logs",
-      selection: {}
-    },
-    {
-      @id:"2",
-      input:"1",
-      memo:"transform1",
-      op:"transform",
-      transforms:[
-        {
-          ref:"userId",
-          expr:"regex_like('activity.cookie', \"persistent=([^;]*)\")"
-        },
-        {
-          ref:"session",
-          expr:"regex_like('activity.cookie', \"session=([^;]*)\")"
-        }
-      ]
-    },
-    {
-      @id:"3",
-      input:"2",
-      memo:"transform2",
-      op:"transform",
-      transforms:[
-        {
-          ref:"userId",
-          expr:"regex_like('activity.cookie', \"persistent=([^;]*)\")"
-        },
-        {
-          ref:"session",
-          expr:"regex_like('activity.cookie', \"session=([^;]*)\")"
-        }
-      ]
-    },
-    {
-      @id:"7",
-      input:"3",
-      op:"sequence",
-      do:[
-        {
-          op:"transform",
-          memo:"seq_transform",
-          transforms:[
-            {
-              ref:"happy",
-              expr:"regex_like('ep2', \"dink\")"
-            }
-          ]
-        }
-        ,
-        {
-          op:"transform",
-          memo:"last_transform",
-          transforms:[
-            {
-              ref:"abc",
-              expr:"123"
-            }
-          ]
-        }
-      ]
-    },
-    {
-      @id:"10",
-      input:"3",
-      op:"transform",
-      memo:"t3",
-      transforms:[
-        {
-          ref:"happy",
-          expr:"regex_like('ep2', \"dink\")"
-        }
-      ]
-    },
-    {
-      @id:12,
-      op:"join",
-      type: "inner",
-      left:"7",
-      right:"10",
-      conditions: [{relationship:"==", left: "1", right: "1" }]
-    }
-    ,
-    {
-      input: 12,
-      op: "store",
-      memo: "output sink",
-      target: {
-        file: "console:///stdout"
-      }
-      
-    }
-
-    
-  ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/test/resources/storage_engine_plan.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/resources/storage_engine_plan.json b/sandbox/prototype/common/src/test/resources/storage_engine_plan.json
index e02c481..efde80d 100644
--- a/sandbox/prototype/common/src/test/resources/storage_engine_plan.json
+++ b/sandbox/prototype/common/src/test/resources/storage_engine_plan.json
@@ -1,19 +1,18 @@
 {
   head:{
-    type:"apache_drill_logical_plan",
+    type:"APACHE_DRILL_LOGICAL",
     version:"1",
     generator:{
       type:"manual",
       info:"na"
     }
   },
-  storage:[
-    {
+  storage:{
+    mock-engine: {
       type:"mock",
-      name:"mock-engine",
 	  url: "http://www.apache.org/"
     }
-  ],
+  },
   query:[
     {
       @id:"1",

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/exec/ref/pom.xml
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/ref/pom.xml b/sandbox/prototype/exec/ref/pom.xml
index 2f1a33f..b253f6b 100644
--- a/sandbox/prototype/exec/ref/pom.xml
+++ b/sandbox/prototype/exec/ref/pom.xml
@@ -24,7 +24,7 @@
 		<dependency>
 			<groupId>org.apache.hadoop</groupId>
 			<artifactId>hadoop-core</artifactId>
-			<version>1.1.0</version>
+			<version>1.1.1</version>
 			<exclusions>
 				<exclusion>
 					<artifactId>jets3t</artifactId>
@@ -36,6 +36,7 @@
 				</exclusion>
 			</exclusions>
 		</dependency>
+		
 
 		<dependency>
 			<groupId>com.carrotsearch</groupId>
@@ -43,6 +44,11 @@
 			<version>0.4.2</version>
 		</dependency>
 
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-examples</artifactId>
+			<version>1.1.1</version>
+		</dependency>
 	</dependencies>
 
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rops/OrderROP.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rops/OrderROP.java b/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rops/OrderROP.java
index 969df71..a692dbf 100644
--- a/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rops/OrderROP.java
+++ b/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rops/OrderROP.java
@@ -4,9 +4,9 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
+import org.apache.drill.common.defs.OrderDef;
+import org.apache.drill.common.defs.OrderDef.Direction;
 import org.apache.drill.common.logical.data.Order;
-import org.apache.drill.common.logical.data.Order.Direction;
-import org.apache.drill.common.logical.data.Order.Ordering;
 import org.apache.drill.exec.ref.RecordIterator;
 import org.apache.drill.exec.ref.RecordPointer;
 import org.apache.drill.exec.ref.eval.EvaluatorFactory;
@@ -33,7 +33,7 @@ public class OrderROP extends AbstractBlockingOperator<Order> {
 
   @Override
   protected void setupEvals(EvaluatorFactory builder) {
-    Ordering[] orderings = config.getOrderings();
+    OrderDef[] orderings = config.getOrderings();
     withinConstrained = config.getWithin() != null;
     if (withinConstrained) {
       withinExtra = 1;

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/ClasspathRSE.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/ClasspathRSE.java b/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/ClasspathRSE.java
index d382e38..aa8186d 100644
--- a/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/ClasspathRSE.java
+++ b/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/ClasspathRSE.java
@@ -48,10 +48,6 @@ public class ClasspathRSE extends RSEBase {
   
   @JsonTypeName("classpath")
   public static class ClasspathRSEConfig extends StorageEngineConfigBase {
-    @JsonCreator
-    public ClasspathRSEConfig(@JsonProperty("name") String name) {
-      super(name);
-    }
   }
   
   public static class ClasspathInputConfig implements ReadEntry{
@@ -66,7 +62,7 @@ public class ClasspathRSE extends RSEBase {
 
   @Override
   public Collection<ReadEntry> getReadEntries(Scan scan) throws IOException {
-    ClasspathInputConfig c = scan.getSelection().getWith(ClasspathInputConfig.class);
+    ClasspathInputConfig c = scan.getSelection().getWith(dConfig, ClasspathInputConfig.class);
     c.rootPath = scan.getOutputReference();
     return Collections.singleton((ReadEntry) c);
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/ConsoleRSE.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/ConsoleRSE.java b/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/ConsoleRSE.java
index 76061ac..1570ea9 100644
--- a/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/ConsoleRSE.java
+++ b/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/ConsoleRSE.java
@@ -24,19 +24,19 @@ import org.apache.drill.common.logical.StorageEngineConfigBase;
 import org.apache.drill.common.logical.data.Store;
 import org.apache.drill.exec.ref.rops.DataWriter.ConverterType;
 
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
 
 public class ConsoleRSE extends RSEBase {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ConsoleRSE.class);
-
+  
+  private final DrillConfig dConfig;
+  
   public static enum Pipe {
     STD_OUT, STD_ERR
   };
 
-  public ConsoleRSE(ConsoleRSEConfig engineConfig, DrillConfig config){
-    
+  public ConsoleRSE(ConsoleRSEConfig engineConfig, DrillConfig dConfig){
+    this.dConfig = dConfig;
   }
   
   public static class ConsoleOutputConfig {
@@ -44,21 +44,15 @@ public class ConsoleRSE extends RSEBase {
     public ConverterType type = ConverterType.JSON;
   }
   
-  @JsonTypeName("console")
-  public static class ConsoleRSEConfig extends StorageEngineConfigBase {
-
-    @JsonCreator
-    public ConsoleRSEConfig(@JsonProperty("name") String name) {
-      super(name);
-    }
-  }
+  @JsonTypeName("console") public static class ConsoleRSEConfig extends StorageEngineConfigBase {}
+  
   public boolean supportsWrite() {
     return true;
   }
 
   @Override
   public RecordRecorder getWriter(Store store) {
-    ConsoleOutputConfig config = store.getTarget().getWith(ConsoleOutputConfig.class);
+    ConsoleOutputConfig config = store.getTarget().getWith(dConfig, ConsoleOutputConfig.class);
     OutputStream out = config.pipe == Pipe.STD_OUT ? System.out : System.err;
     return new OutputStreamWriter(out, config.type, false);
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/FileSystemRSE.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/FileSystemRSE.java b/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/FileSystemRSE.java
index 06a31f5..522191b 100644
--- a/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/FileSystemRSE.java
+++ b/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/FileSystemRSE.java
@@ -67,9 +67,9 @@ public class FileSystemRSE extends RSEBase {
   @JsonTypeName("fs")
   public static class FileSystemRSEConfig extends StorageEngineConfigBase {
     private String root;
+
     @JsonCreator
-    public FileSystemRSEConfig(@JsonProperty("name") String name, @JsonProperty("root") String root) {
-      super(name);
+    public FileSystemRSEConfig(@JsonProperty("root") String root) {
       this.root = root;
     }
   }
@@ -112,7 +112,7 @@ public class FileSystemRSE extends RSEBase {
 
   @Override
   public RecordRecorder getWriter(Store store) throws IOException {
-    FileSystemOutputConfig config = store.getTarget().getWith(FileSystemOutputConfig.class);
+    FileSystemOutputConfig config = store.getTarget().getWith(dConfig, FileSystemOutputConfig.class);
     OutputStream out = fs.create(new Path(basePath, config.file));
     return new OutputStreamWriter(out, config.type, true);
   }
@@ -120,7 +120,7 @@ public class FileSystemRSE extends RSEBase {
   @Override
   public Collection<ReadEntry> getReadEntries(Scan scan) throws IOException {
     Set<ReadEntry> s = new HashSet<ReadEntry>();
-    for(FileSpec f : scan.getSelection().getWith(FileSystemInputConfig.class).files){
+    for(FileSpec f : scan.getSelection().getWith(dConfig, FileSystemInputConfig.class).files){
       s.add(new FSEntry(f, scan.getOutputReference()));
     }
     return s;

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/QueueRSE.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/QueueRSE.java b/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/QueueRSE.java
index 623e752..9a0a132 100644
--- a/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/QueueRSE.java
+++ b/sandbox/prototype/exec/ref/src/main/java/org/apache/drill/exec/ref/rse/QueueRSE.java
@@ -31,8 +31,6 @@ import org.apache.drill.exec.ref.RecordPointer;
 import org.apache.drill.exec.ref.RunOutcome.OutcomeType;
 import org.apache.drill.exec.ref.exceptions.SetupException;
 
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
 
 public class QueueRSE extends RSEBase {
@@ -50,13 +48,7 @@ public class QueueRSE extends RSEBase {
     return sinkQueues.get(number);
   }
   
-  @JsonTypeName("queue")
-  public static class QueueRSEConfig extends StorageEngineConfigBase {
-    @JsonCreator
-    public QueueRSEConfig(@JsonProperty("name") String name) {
-      super(name);
-    }
-  }
+  @JsonTypeName("queue") public static class QueueRSEConfig extends StorageEngineConfigBase {}
   
   public static class QueueOutputInfo{
     public int number;
@@ -69,7 +61,7 @@ public class QueueRSE extends RSEBase {
   
   @Override
   public RecordRecorder getWriter(Store store) throws IOException {
-    QueueOutputInfo config = store.getTarget().getWith(QueueOutputInfo.class);
+    QueueOutputInfo config = store.getTarget().getWith(dConfig, QueueOutputInfo.class);
     Queue<Object> q = dConfig.getQueue(config.number);
     return new QueueRecordRecorder(q);
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/exec/ref/src/test/java/org/apache/drill/exec/ref/RunSimplePlan.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/ref/src/test/java/org/apache/drill/exec/ref/RunSimplePlan.java b/sandbox/prototype/exec/ref/src/test/java/org/apache/drill/exec/ref/RunSimplePlan.java
index e15c568..110e655 100644
--- a/sandbox/prototype/exec/ref/src/test/java/org/apache/drill/exec/ref/RunSimplePlan.java
+++ b/sandbox/prototype/exec/ref/src/test/java/org/apache/drill/exec/ref/RunSimplePlan.java
@@ -46,4 +46,28 @@ public class RunSimplePlan{
     assertEquals(outcomes.size(), 1);
     assertEquals(outcomes.iterator().next().records, 2);
   }
+  
+  @Test
+  public void joinPlan() throws Exception{
+    DrillConfig config = DrillConfig.create();
+    LogicalPlan plan = LogicalPlan.parse(config, Files.toString(FileUtils.getResourceAsFile("/simple_join.json"), Charsets.UTF_8));
+    IteratorRegistry ir = new IteratorRegistry();
+    ReferenceInterpreter i = new ReferenceInterpreter(plan, ir, new BasicEvaluatorFactory(ir), new RSERegistry(config));
+    i.setup();
+    Collection<RunOutcome> outcomes = i.run();
+    assertEquals(outcomes.size(), 1);
+    assertEquals(outcomes.iterator().next().outcome, RunOutcome.OutcomeType.SUCCESS);
+  }
+  
+  @Test
+  public void flattenPlan() throws Exception{
+    DrillConfig config = DrillConfig.create();
+    LogicalPlan plan = LogicalPlan.parse(config, Files.toString(FileUtils.getResourceAsFile("/simple_plan_flattened.json"), Charsets.UTF_8));
+    IteratorRegistry ir = new IteratorRegistry();
+    ReferenceInterpreter i = new ReferenceInterpreter(plan, ir, new BasicEvaluatorFactory(ir), new RSERegistry(config));
+    i.setup();
+    Collection<RunOutcome> outcomes = i.run();
+    assertEquals(outcomes.size(), 1);
+    assertEquals(outcomes.iterator().next().outcome, RunOutcome.OutcomeType.SUCCESS);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/exec/ref/src/test/resources/simple_join.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/ref/src/test/resources/simple_join.json b/sandbox/prototype/exec/ref/src/test/resources/simple_join.json
index 37e2a61..ba078d5 100644
--- a/sandbox/prototype/exec/ref/src/test/resources/simple_join.json
+++ b/sandbox/prototype/exec/ref/src/test/resources/simple_join.json
@@ -1,27 +1,17 @@
 {
    head: {
-      type: "apache_drill_logical_plan",
+      type: "APACHE_DRILL_LOGICAL",
       version: "1",
       generator: {
          type: "manual",
          info: "na"
       }
    },
-   storage:[
-       {
-         type:"console",
-         name:"console"
-       },
-       {
-         type:"fs",
-         name:"fs1",
-         root:"file:///"
-       },
-       {
-         type:"classpath",
-         name:"cp"
-       }
-   ],
+   storage: {
+       console: {type:"console"},
+       fs1: {type:"fs", root:"file:///"},
+	   cp: {type:"classpath"}
+   },
    query: [
       {
          @id: 1,
@@ -61,7 +51,7 @@
       },
       {
          input: 3,
-         op: "write",
+         op: "store",
          memo: "output sink",
          storageengine: "console",
          target: {pipe: "STD_OUT"}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/exec/ref/src/test/resources/simple_plan.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/ref/src/test/resources/simple_plan.json b/sandbox/prototype/exec/ref/src/test/resources/simple_plan.json
index 34297b4..0f38476 100644
--- a/sandbox/prototype/exec/ref/src/test/resources/simple_plan.json
+++ b/sandbox/prototype/exec/ref/src/test/resources/simple_plan.json
@@ -1,27 +1,17 @@
 {
   head:{
-    type:"apache_drill_logical_plan",
+    type:"APACHE_DRILL_LOGICAL",
     version:"1",
     generator:{
       type:"manual",
       info:"na"
     }
   },
-  storage:[
-    {
-      type:"console",
-      name:"console"
-    },
-    {
-      type:"fs",
-      name:"fs1",
-      root:"file:///"
-    },
-    {
-      type:"classpath",
-      name:"cp"
-    }
-  ],
+  storage:{
+    console: {type: "console"},
+    fs1: {type: "fs", root:"file:///"},
+    cp: {type: "classpath"}
+  },
   query:[
     {
       op:"sequence",
@@ -64,7 +54,7 @@
 	    {
 	      op: "order",
 	      orderings: [
-	        {order: "desc", expr: "donuts.ppu" }
+	        {order: "DESC", expr: "donuts.ppu" }
 	      ]
 	    }, 
 	    {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/exec/ref/src/test/resources/simple_plan_flattened.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/ref/src/test/resources/simple_plan_flattened.json b/sandbox/prototype/exec/ref/src/test/resources/simple_plan_flattened.json
index 7e38b4e..1601b4a 100644
--- a/sandbox/prototype/exec/ref/src/test/resources/simple_plan_flattened.json
+++ b/sandbox/prototype/exec/ref/src/test/resources/simple_plan_flattened.json
@@ -1,21 +1,17 @@
 {
   head:{
-    type:"apache_drill_logical_plan",
+    type:"APACHE_DRILL_LOGICAL",
     version:"1",
     generator:{
       type:"manual",
       info:"na"
     }
   },
-  sources:[
-    {
-      type:"json",
-      name:"donuts-json",
-      files:[
-        "src/test/resources/donuts.json"
-      ]
-    }
-  ],
+  storage:{
+    console: {type:"console"},
+    fs1: {type:"fs", root:"file:///"},
+	cp: {type:"classpath"}  
+  },
   query:[
     {
       op:"sequence",
@@ -24,8 +20,11 @@
 	      op: "scan",
 	      memo: "initial_scan",
 	      ref: "donuts",
-	      source: "donuts-json",
-	      selection: {data: "activity"}
+	      storageengine: "cp",
+	      selection: {
+	        path: "/donuts.json",
+	        type: "JSON"
+	      }
 	    },
 	    {
 	      op: "transform",
@@ -44,9 +43,10 @@
           drop: 1
         },
 	    {
-	      op: "write",
+	      op: "store",
 	      memo: "output sink",
-	      file: "console:///stdout"
+	      storageengine: "console",
+          target: {pipe: "STD_OUT"}
 	    }
       ]
     }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/Driver.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/Driver.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/Driver.java
index 0a0594c..ceb2027 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/Driver.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/jdbc/Driver.java
@@ -73,7 +73,7 @@ public class Driver extends UnregisteredDriver {
           MapSchema.create(connection, rootSchema, schemaName);
 
       connection.setSchema(schemaName);
-      final ClasspathRSEConfig rseConfig = new ClasspathRSEConfig("donuts-json");
+      final ClasspathRSEConfig rseConfig = new ClasspathRSEConfig();
       final ClasspathInputConfig inputConfig = new ClasspathInputConfig();
       inputConfig.path = "/donuts.json";
       inputConfig.type = ConverterType.JSON; 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillImplementor.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillImplementor.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillImplementor.java
index 445b118..29c4d12 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillImplementor.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillImplementor.java
@@ -43,7 +43,7 @@ public class DrillImplementor {
   public DrillImplementor() {
     final ObjectNode headNode = mapper.createObjectNode();
     rootNode.put("head", headNode);
-    headNode.put("type", "apache_drill_logical_plan");
+    headNode.put("type", "APACHE_DRILL_LOGICAL");
     headNode.put("version", "1");
 
     final ObjectNode generatorNode = mapper.createObjectNode();
@@ -53,21 +53,19 @@ public class DrillImplementor {
 
     // TODO: populate sources based on the sources of scans that occur in
     // the query
-    final ArrayNode sourcesNode = mapper.createArrayNode();
+    final ObjectNode sourcesNode = mapper.createObjectNode();
     rootNode.put("storage", sourcesNode);
     
     // input file source
     {
       final ObjectNode sourceNode = mapper.createObjectNode();
-      sourceNode.put("name", "donuts-json");
       sourceNode.put("type", "classpath");
-      sourcesNode.add(sourceNode);
+      sourcesNode.put("donuts-json", sourceNode);
     }
     {
       final ObjectNode sourceNode = mapper.createObjectNode();
-      sourceNode.put("name", "queue");
       sourceNode.put("type", "queue");
-      sourcesNode.add(sourceNode);
+      sourcesNode.put("queue", sourceNode);
     }
     
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillScan.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillScan.java b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillScan.java
index c18baaf..2e2849a 100644
--- a/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillScan.java
+++ b/sandbox/prototype/sqlparser/src/main/java/org/apache/drill/optiq/DrillScan.java
@@ -38,7 +38,8 @@ public class DrillScan extends TableAccessRelBase implements DrillRel {
     node.put("op", "scan");
     node.put("memo", "initial_scan");
     node.put("ref", "donuts");
-    node.put("storageengine", drillTable.storageEngineConfig.getName());
+    final ObjectNode engines = implementor.mapper.createObjectNode();
+    node.put("storageengine", "donuts-json");
     node.put("selection", implementor.mapper.convertValue(drillTable.selection, JsonNode.class));
     implementor.add(node);
   }


[7/9] basic framework for physical plan. abstraction of graph classes.

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/DeadBuf.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/DeadBuf.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/DeadBuf.java
new file mode 100644
index 0000000..dafb68c
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/DeadBuf.java
@@ -0,0 +1,848 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.channels.GatheringByteChannel;
+import java.nio.channels.ScatteringByteChannel;
+import java.nio.charset.Charset;
+
+import io.netty.buffer.BufType;
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.buffer.ByteBufIndexFinder;
+
+public class DeadBuf implements ByteBuf {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DeadBuf.class);
+  
+  private static final String ERROR_MESSAGE = "Attemped to access a DeadBuf.  This would happen if you attempted to interact with a buffer that has been moved or not yet initialized.";
+  
+  public static final DeadBuf DEAD_BUFFER = new DeadBuf();
+
+  private DeadBuf(){}
+  
+  @Override
+  public BufType type() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public boolean isReadable(int size) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public boolean isWritable(int size) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int refCnt() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public boolean release() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public boolean release(int decrement) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int capacity() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf capacity(int newCapacity) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int maxCapacity() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBufAllocator alloc() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteOrder order() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf order(ByteOrder endianness) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf unwrap() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public boolean isDirect() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int readerIndex() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf readerIndex(int readerIndex) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int writerIndex() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf writerIndex(int writerIndex) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setIndex(int readerIndex, int writerIndex) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int readableBytes() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int writableBytes() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int maxWritableBytes() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public boolean isReadable() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  @Deprecated
+  public boolean readable() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public boolean isWritable() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  @Deprecated
+  public boolean writable() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf clear() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf markReaderIndex() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf resetReaderIndex() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf markWriterIndex() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf resetWriterIndex() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf discardReadBytes() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf discardSomeReadBytes() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf ensureWritable(int minWritableBytes) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  @Deprecated
+  public ByteBuf ensureWritableBytes(int minWritableBytes) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int ensureWritable(int minWritableBytes, boolean force) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public boolean getBoolean(int index) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public byte getByte(int index) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public short getUnsignedByte(int index) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public short getShort(int index) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int getUnsignedShort(int index) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int getMedium(int index) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int getUnsignedMedium(int index) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int getInt(int index) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public long getUnsignedInt(int index) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public long getLong(int index) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public char getChar(int index) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public float getFloat(int index) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public double getDouble(int index) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf getBytes(int index, ByteBuf dst) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf getBytes(int index, ByteBuf dst, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf getBytes(int index, byte[] dst) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf getBytes(int index, ByteBuffer dst) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int getBytes(int index, GatheringByteChannel out, int length) throws IOException {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setBoolean(int index, boolean value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setByte(int index, int value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setShort(int index, int value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setMedium(int index, int value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setInt(int index, int value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setLong(int index, long value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setChar(int index, int value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setFloat(int index, float value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setDouble(int index, double value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setBytes(int index, ByteBuf src) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setBytes(int index, ByteBuf src, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setBytes(int index, byte[] src) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setBytes(int index, ByteBuffer src) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int setBytes(int index, InputStream in, int length) throws IOException {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf setZero(int index, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public boolean readBoolean() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public byte readByte() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public short readUnsignedByte() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public short readShort() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int readUnsignedShort() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int readMedium() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int readUnsignedMedium() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int readInt() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public long readUnsignedInt() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public long readLong() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public char readChar() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public float readFloat() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public double readDouble() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf readBytes(int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf readSlice(int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf readBytes(ByteBuf dst) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf readBytes(ByteBuf dst, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf readBytes(ByteBuf dst, int dstIndex, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf readBytes(byte[] dst) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf readBytes(byte[] dst, int dstIndex, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf readBytes(ByteBuffer dst) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf readBytes(OutputStream out, int length) throws IOException {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int readBytes(GatheringByteChannel out, int length) throws IOException {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf skipBytes(int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeBoolean(boolean value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeByte(int value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeShort(int value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeMedium(int value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeInt(int value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeLong(long value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeChar(int value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeFloat(float value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeDouble(double value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeBytes(ByteBuf src) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeBytes(ByteBuf src, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeBytes(ByteBuf src, int srcIndex, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeBytes(byte[] src) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeBytes(byte[] src, int srcIndex, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeBytes(ByteBuffer src) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int writeBytes(InputStream in, int length) throws IOException {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int writeBytes(ScatteringByteChannel in, int length) throws IOException {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf writeZero(int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int indexOf(int fromIndex, int toIndex, byte value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int indexOf(int fromIndex, int toIndex, ByteBufIndexFinder indexFinder) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int bytesBefore(byte value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int bytesBefore(ByteBufIndexFinder indexFinder) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int bytesBefore(int length, byte value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int bytesBefore(int length, ByteBufIndexFinder indexFinder) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int bytesBefore(int index, int length, byte value) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int bytesBefore(int index, int length, ByteBufIndexFinder indexFinder) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf copy() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf copy(int index, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf slice() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf slice(int index, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf duplicate() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int nioBufferCount() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuffer nioBuffer() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuffer nioBuffer(int index, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuffer[] nioBuffers() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuffer[] nioBuffers(int index, int length) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public boolean hasArray() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public byte[] array() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public int arrayOffset() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public boolean hasMemoryAddress() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public long memoryAddress() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public String toString(Charset charset) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public String toString(int index, int length, Charset charset) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+
+  }
+
+  @Override
+  public ByteBuf suspendIntermediaryDeallocations() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf resumeIntermediaryDeallocations() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public int compareTo(ByteBuf buffer) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf retain(int increment) {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+
+  @Override
+  public ByteBuf retain() {
+    throw new UnsupportedOperationException(ERROR_MESSAGE);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/InvalidValueAccessor.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/InvalidValueAccessor.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/InvalidValueAccessor.java
new file mode 100644
index 0000000..403c7a3
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/InvalidValueAccessor.java
@@ -0,0 +1,46 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record;
+
+import org.apache.drill.exec.exception.ExecutionSetupException;
+
+public class InvalidValueAccessor extends ExecutionSetupException{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(InvalidValueAccessor.class);
+
+  public InvalidValueAccessor() {
+    super();
+  }
+
+  public InvalidValueAccessor(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+    super(message, cause, enableSuppression, writableStackTrace);
+  }
+
+  public InvalidValueAccessor(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public InvalidValueAccessor(String message) {
+    super(message);
+  }
+
+  public InvalidValueAccessor(Throwable cause) {
+    super(cause);
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
new file mode 100644
index 0000000..3cadf89
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
@@ -0,0 +1,79 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record;
+
+import org.apache.drill.common.expression.types.DataType;
+import org.apache.drill.common.physical.RecordField.ValueMode;
+import org.apache.drill.exec.exception.SchemaChangeException;
+
+public class MaterializedField implements Comparable<MaterializedField>{
+  private int fieldId;
+  private DataType type;
+  private boolean nullable;
+  private ValueMode mode;
+  private Class<?> valueClass;
+  
+  public MaterializedField(int fieldId, DataType type, boolean nullable, ValueMode mode, Class<?> valueClass) {
+    super();
+    this.fieldId = fieldId;
+    this.type = type;
+    this.nullable = nullable;
+    this.mode = mode;
+    this.valueClass = valueClass;
+  }
+
+  public int getFieldId() {
+    return fieldId;
+  }
+
+  public DataType getType() {
+    return type;
+  }
+
+  public boolean isNullable() {
+    return nullable;
+  }
+
+  public ValueMode getMode() {
+    return mode;
+  }
+
+  public Class<?> getValueClass() {
+    return valueClass;
+  }
+
+  private void check(String name, Object val1, Object expected) throws SchemaChangeException{
+    if(expected.equals(val1)) return;
+    throw new SchemaChangeException("Expected and actual field definitions don't match. Actual %s: %s, expected %s: %s", name, val1, name, expected);
+  }
+  
+  public void checkMaterialization(MaterializedField expected) throws SchemaChangeException{
+    if(this.type == expected.type || expected.type == DataType.LATEBIND) throw new SchemaChangeException("Expected and actual field definitions don't match. Actual DataType: %s, expected DataTypes: %s", this.type, expected.type);
+    if(expected.valueClass != null) check("valueClass", this.valueClass, expected.valueClass);
+    check("fieldId", this.fieldId, expected.fieldId);
+    check("nullability", this.nullable, expected.nullable);
+    check("valueMode", this.mode, expected.mode);
+  }
+
+  @Override
+  public int compareTo(MaterializedField o) {
+    return Integer.compare(this.fieldId, o.fieldId);
+  }
+  
+  
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatch.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatch.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatch.java
new file mode 100644
index 0000000..eca62bb
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatch.java
@@ -0,0 +1,85 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record;
+
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.record.vector.ValueVector;
+
+/**
+ * A record batch contains a set of field values for a particular range of records. In the case of a record batch
+ * composed of ValueVectors, ideally a batch fits within L2 cache (~256k per core). The set of value vectors do not
+ * change unless the next() IterOutcome is a *_NEW_SCHEMA type.
+ */
+public interface RecordBatch {
+
+  /**
+   * Describes the outcome of a RecordBatch being incremented forward.
+   */
+  public static enum IterOutcome {
+    NONE, // No more records were found.
+    OK, // A new range of records have been provided.
+    OK_NEW_SCHEMA, // A full collection of records
+    STOP // Informs parent nodes that the query has terminated. In this case, a consumer can consume their QueryContext
+         // to understand the current state of things.
+  }
+
+  /**
+   * Access the FragmentContext of the current query fragment. Useful for reporting failure information or other query
+   * level information.
+   * 
+   * @return
+   */
+  public FragmentContext getContext();
+
+  /**
+   * Provide the schema of the current RecordBatch. This changes if and only if a *_NEW_SCHEMA IterOutcome is provided.
+   * 
+   * @return
+   */
+  public BatchSchema getSchema();
+
+  /**
+   * Provide the number of records that are within this record count
+   * 
+   * @return
+   */
+  public int getRecordCount();
+
+  /**
+   * Inform child nodes that this query should be terminated. Child nodes should utilize the QueryContext to determine
+   * what has happened.
+   */
+  public void kill();
+
+
+  public abstract <T extends ValueVector<T>> T getValueVector(int fieldId, Class<T> clazz) throws InvalidValueAccessor;
+
+//  public abstract void getDictReader(int fieldId, Class<?> clazz) throws InvalidValueAccessor;
+//
+//  public abstract void getRleReader(int fieldId, Class<?> clazz) throws InvalidValueAccessor;
+
+  /**
+   * Update the data in each Field reading interface for the next range of records. Once a RecordBatch returns an
+   * IterOutcome.NONE, the consumer should no longer next(). Behavior at this point is undetermined and likely to throw
+   * an exception.
+   * 
+   * @return An IterOutcome describing the result of the iteration.
+   */
+  public IterOutcome next();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordMaker.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordMaker.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordMaker.java
new file mode 100644
index 0000000..9bc6e5f
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordMaker.java
@@ -0,0 +1,22 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record;
+
+public class RecordMaker {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RecordMaker.class);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/AbstractFixedValueVector.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/AbstractFixedValueVector.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/AbstractFixedValueVector.java
new file mode 100644
index 0000000..735493d
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/AbstractFixedValueVector.java
@@ -0,0 +1,60 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record.vector;
+
+import io.netty.buffer.ByteBuf;
+
+import org.apache.drill.exec.BufferAllocator;
+
+/**
+ * Abstract class that fixed value vectors are derived from.
+ */
+abstract class AbstractFixedValueVector<T extends AbstractFixedValueVector<T>> extends BaseValueVector<T> {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AbstractFixedValueVector.class);
+
+  private final int widthInBits;
+
+  protected int longWords = 0;
+
+  public AbstractFixedValueVector(int fieldId, BufferAllocator allocator, int widthInBits) {
+    super(fieldId, allocator);
+    this.widthInBits = widthInBits;
+  }
+  
+  @Override
+  protected int getAllocationSize(int valueCount) {
+    return (int) Math.ceil(valueCount*widthInBits*1.0/8);
+  }
+  
+  @Override
+  protected void childResetAllocation(int valueCount, ByteBuf buf) {
+    this.longWords = valueCount/8;
+  }
+
+  @Override
+  protected void childCloneMetadata(T other) {
+    other.longWords = this.longWords;
+  }
+
+  @Override
+  protected void childClear() {
+    longWords = 0;
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/AnyVector.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/AnyVector.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/AnyVector.java
new file mode 100644
index 0000000..6becfcd
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/AnyVector.java
@@ -0,0 +1,30 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record.vector;
+
+/**
+ * A Vector that holds each value with type information. In the case of complex types, an any vector will inline the
+ * complex type within the value space.  The complex value will be stored in 
+ */
+public class AnyVector {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AnyVector.class);
+
+  
+   
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BaseValueVector.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BaseValueVector.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BaseValueVector.java
new file mode 100644
index 0000000..33a81e5
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BaseValueVector.java
@@ -0,0 +1,104 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record.vector;
+
+import io.netty.buffer.ByteBuf;
+
+import org.apache.drill.exec.BufferAllocator;
+import org.apache.drill.exec.record.DeadBuf;
+
+public abstract class BaseValueVector<T extends BaseValueVector<T>> implements ValueVector<T>{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BaseValueVector.class);
+  
+  protected final BufferAllocator allocator;
+  protected ByteBuf data = DeadBuf.DEAD_BUFFER;
+  protected int valueCount = 0;
+  protected final int fieldId;
+  
+  public BaseValueVector(int fieldId, BufferAllocator allocator) {
+    this.allocator = allocator;
+    this.fieldId = fieldId;
+  }
+
+  public final void allocateNew(int valueCount){
+    int allocationSize = getAllocationSize(valueCount);
+    resetAllocation(valueCount, allocator.buffer(allocationSize));
+  }
+
+  protected abstract int getAllocationSize(int valueCount);
+  protected abstract void childResetAllocation(int valueCount, ByteBuf buf);
+  protected abstract void childCloneMetadata(T other);
+  protected abstract void childClear();
+  
+  protected final void resetAllocation(int valueCount, ByteBuf buf){
+    clear();
+    this.valueCount = valueCount;
+    this.data = buf;
+    childResetAllocation(valueCount, buf);
+  }
+  
+  public final void cloneMetadata(T other){
+    other.valueCount = this.valueCount;
+  }
+  
+  @Override
+  public final void cloneInto(T vector) {
+    vector.allocateNew(valueCount);
+    data.writeBytes(vector.data);
+    cloneMetadata(vector);
+    childResetAllocation(valueCount, vector.data);
+  }
+  
+  @Override
+  public final void transferTo(T vector) {
+    vector.data = this.data;
+    cloneMetadata(vector);
+    childResetAllocation(valueCount, data);
+    clear();
+  }
+
+  protected final void clear(){
+    if(this.data != DeadBuf.DEAD_BUFFER){
+      this.data.release();
+      this.data = DeadBuf.DEAD_BUFFER;
+      this.valueCount = 0;
+    }
+    childClear();
+  }
+  
+  /**
+   * Give the length of the value vector in bytes.
+   * 
+   * @return
+   */
+  public int size() {
+    return valueCount;
+  }
+  
+  @Override
+  public void close() {
+    clear();
+  }
+
+  @Override
+  public ByteBuf getBuffer() {
+    return data;
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BitUtil.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BitUtil.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BitUtil.java
new file mode 100644
index 0000000..4a3ae2f
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BitUtil.java
@@ -0,0 +1,108 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record.vector;  // from org.apache.solr.util rev 555343
+
+import io.netty.buffer.ByteBuf;
+
+
+/**  A variety of high efficiency bit twiddling routines.
+ * @lucene.internal
+ */
+public final class BitUtil {
+
+  private BitUtil() {} // no instance
+
+  // The pop methods used to rely on bit-manipulation tricks for speed but it
+  // turns out that it is faster to use the Long.bitCount method (which is an
+  // intrinsic since Java 6u18) in a naive loop, see LUCENE-2221
+
+  /** Returns the number of set bits in an array of longs. */
+  public static long pop_array(ByteBuf arr, int wordOffset, int numWords) {
+    long popCount = 0;
+    for (int i = wordOffset, end = wordOffset + numWords; i < end; i+=8) {
+      popCount += Long.bitCount(arr.getLong(i));
+    }
+    return popCount;
+  }
+
+  /** Returns the popcount or cardinality of the two sets after an intersection.
+   *  Neither array is modified. */
+  public static long pop_intersect(ByteBuf arr1, ByteBuf arr2, int wordOffset, int numWords) {
+    long popCount = 0;
+    for (int i = wordOffset, end = wordOffset + numWords; i < end; i+=8) {
+      popCount += Long.bitCount(arr1.getLong(i) & arr2.getLong(i));
+    }
+    return popCount;
+  }
+
+   /** Returns the popcount or cardinality of the union of two sets.
+    *  Neither array is modified. */
+   public static long pop_union(ByteBuf arr1, ByteBuf arr2, int wordOffset, int numWords) {
+     long popCount = 0;
+     for (int i = wordOffset, end = wordOffset + numWords; i < end; i+=8) {
+       popCount += Long.bitCount(arr1.getLong(i) | arr2.getLong(i));
+     }
+     return popCount;
+   }
+
+  /** Returns the popcount or cardinality of A & ~B.
+   *  Neither array is modified. */
+  public static long pop_andnot(ByteBuf arr1, ByteBuf arr2, int wordOffset, int numWords) {
+    long popCount = 0;
+    for (int i = wordOffset, end = wordOffset + numWords; i < end; i+=8) {
+      popCount += Long.bitCount(arr1.getLong(i) & ~arr2.getLong(i));
+    }
+    return popCount;
+  }
+
+  /** Returns the popcount or cardinality of A ^ B
+    * Neither array is modified. */
+  public static long pop_xor(ByteBuf arr1, ByteBuf arr2, int wordOffset, int numWords) {
+    long popCount = 0;
+    for (int i = wordOffset, end = wordOffset + numWords; i < end; i+=8) {
+      popCount += Long.bitCount(arr1.getLong(i) ^ arr2.getLong(i));
+    }
+    return popCount;
+  }
+
+  /** returns the next highest power of two, or the current value if it's already a power of two or zero*/
+  public static int nextHighestPowerOfTwo(int v) {
+    v--;
+    v |= v >> 1;
+    v |= v >> 2;
+    v |= v >> 4;
+    v |= v >> 8;
+    v |= v >> 16;
+    v++;
+    return v;
+  }
+
+  /** returns the next highest power of two, or the current value if it's already a power of two or zero*/
+   public static long nextHighestPowerOfTwo(long v) {
+    v--;
+    v |= v >> 1;
+    v |= v >> 2;
+    v |= v >> 4;
+    v |= v >> 8;
+    v |= v >> 16;
+    v |= v >> 32;
+    v++;
+    return v;
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BitVector.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BitVector.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BitVector.java
new file mode 100644
index 0000000..c157b12
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BitVector.java
@@ -0,0 +1,118 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record.vector;
+
+import org.apache.drill.common.expression.types.DataType;
+import org.apache.drill.common.physical.RecordField.ValueMode;
+import org.apache.drill.exec.BufferAllocator;
+import org.apache.drill.exec.record.MaterializedField;
+
+/**
+ * Describes a vector which holds a number of true/false values.
+ */
+public class BitVector extends AbstractFixedValueVector<BitVector> {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BitVector.class);
+
+  private final MaterializedField field;
+  
+  public BitVector(int fieldId, BufferAllocator allocator) {
+    super(fieldId, allocator, 1);
+    this.field = new MaterializedField(fieldId, DataType.BOOLEAN, false, ValueMode.VECTOR, this.getClass());
+  }
+
+  @Override
+  public MaterializedField getField() {
+    return field;
+  }
+  
+  /** Returns true or false for the specified bit index.
+   * The index should be less than the OpenBitSet size
+   */
+  public boolean get(int index) {
+    assert index >= 0 && index < this.valueCount;
+    int i = index >> 3;               // div 8
+    // signed shift will keep a negative index and force an
+    // array-index-out-of-bounds-exception, removing the need for an explicit check.
+    int bit = index & 0x3f;           // mod 64
+    long bitmask = 1L << bit;
+    return (data.getLong(i) & bitmask) != 0;
+  }
+  
+  /** Sets the bit at the specified index.
+   * The index should be less than the OpenBitSet size.
+   */
+   public void set(int index) {
+     assert index >= 0 && index < this.valueCount;
+     int wordNum = index >> 3;   
+     int bit = index & 0x3f;
+     long bitmask = 1L << bit;
+     data.setLong(wordNum, data.getLong(wordNum) | bitmask);
+   }
+   
+   public void clear(int index) {
+     assert index >= 0 && index < this.valueCount;
+     int wordNum = index >> 3;
+     int bit = index & 0x03f;
+     long bitmask = 1L << bit;
+     data.setLong(wordNum, data.getLong(wordNum) & ~bitmask);
+   }
+   
+   
+   /** Clears a range of bits.  Clearing past the end does not change the size of the set.
+   *
+   * @param startIndex lower index
+   * @param endIndex one-past the last bit to clear
+   */
+  private void clear(int startIndex, int endIndex) {
+    if (endIndex <= startIndex) return;
+
+    int startWord = (startIndex>>3);
+    if (startWord >= this.longWords) return;
+
+    // since endIndex is one past the end, this is index of the last
+    // word to be changed.
+    int endWord   = ((endIndex-1)>>3);
+
+    long startmask = -1L << startIndex;
+    long endmask = -1L >>> -endIndex;  // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
+
+    // invert masks since we are clearing
+    startmask = ~startmask;
+    endmask = ~endmask;
+
+    if (startWord == endWord) {
+      data.setLong(startWord,  data.getLong(startWord) & (startmask | endmask));
+      return;
+    }
+
+    data.setLong(startWord,  data.getLong(startWord) & startmask);
+
+    int middle = Math.min(this.longWords, endWord);
+    for(int i =startWord+1; i < middle; i += 8){
+      data.setLong(i, 0L);
+    }
+    if (endWord < this.longWords) {
+      data.setLong(endWord,  data.getLong(endWord) & endmask);
+    }
+  }
+  
+  public void setAllFalse(){
+    clear(0, valueCount);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BufBitSet.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BufBitSet.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BufBitSet.java
new file mode 100644
index 0000000..574389f
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/BufBitSet.java
@@ -0,0 +1,847 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.drill.exec.record.vector; // from org.apache.solr.util rev 555343
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+
+/**
+ * HEAVY WIP: ONLY PARTIALLY TRANSFERRED TO BUFFER METHODS. STILL NEEDS BIT SHIFT FIXES, GETLONG AND SETLONG updates to
+ * fix index postion AND OTHER THINGS.
+ * 
+ * An "open" BitSet implementation that allows direct access to the array of words storing the bits.
+ * <p/>
+ * Unlike java.util.bitset, the fact that bits are packed into an array of longs is part of the interface. This allows
+ * efficient implementation of other algorithms by someone other than the author. It also allows one to efficiently
+ * implement alternate serialization or interchange formats.
+ * <p/>
+ * <code>BufBitSet</code> is faster than <code>java.util.BitSet</code> in most operations and *much* faster at
+ * calculating cardinality of sets and results of set operations. It can also handle sets of larger cardinality (up to
+ * 64 * 2**32-1)
+ * <p/>
+ * The goals of <code>BufBitSet</code> are the fastest implementation possible, and maximum code reuse. Extra safety
+ * and encapsulation may always be built on top, but if that's built in, the cost can never be removed (and hence people
+ * re-implement their own version in order to get better performance). If you want a "safe", totally encapsulated (and
+ * slower and limited) BitSet class, use <code>java.util.BitSet</code>.
+ * <p/>
+ */
+
+public class BufBitSet {
+  private ByteBufAllocator allocator;
+  private ByteBuf buf;
+  // protected long[] bits;
+  protected int wlen; // number of words (elements) used in the array
+
+  // Used only for assert:
+  private long numBits;
+
+  // /** Constructs an BufBitSet large enough to hold <code>numBits</code>.
+  // */
+  // public BufBitSet(long numBits) {
+  // this.numBits = numBits;
+  // wlen = buf.capacity();
+  // }
+  //
+  // public BufBitSet() {
+  // this(64);
+  // }
+
+  public BufBitSet(long numBits, ByteBufAllocator allocator) {
+    this.allocator = allocator;
+    this.numBits = numBits;
+    int words = bits2words(numBits);
+    this.wlen = words;
+    buf = allocator.buffer(wlen);
+  }
+
+  private BufBitSet(ByteBufAllocator allocator, ByteBuf buf) {
+    this.allocator = allocator;
+    this.numBits = buf.capacity() * 8;
+    int words = buf.capacity();
+    this.wlen = words;
+    this.buf = buf;
+  }
+
+  /** Returns the current capacity in bits (1 greater than the index of the last bit) */
+  public long capacity() {
+    return buf.capacity() << 6;
+  }
+
+  /**
+   * Returns the current capacity of this set. Included for compatibility. This is *not* equal to {@link #cardinality}
+   */
+  public long size() {
+    return capacity();
+  }
+
+  public int length() {
+    return buf.capacity() << 6;
+  }
+
+  /** Returns true if there are no set bits */
+  public boolean isEmpty() {
+    return cardinality() == 0;
+  }
+
+  // /** Expert: returns the long[] storing the bits */
+  // public long[] getBits() { return bits; }
+  //
+  // /** Expert: sets a new long[] to use as the bit storage */
+  // public void setBits(long[] bits) { this.bits = bits; }
+
+  /** Expert: gets the number of longs in the array that are in use */
+  public int getNumWords() {
+    return wlen;
+  }
+
+  /** Expert: sets the number of longs in the array that are in use */
+  public void setNumWords(int nWords) {
+    this.wlen = nWords;
+  }
+
+  /** Returns true or false for the specified bit index. */
+  public boolean get(int index) {
+    int i = index >> 6; // div 64
+    // signed shift will keep a negative index and force an
+    // array-index-out-of-bounds-exception, removing the need for an explicit check.
+    if (i >= buf.capacity()) return false;
+
+    int bit = index & 0x3f; // mod 64
+    long bitmask = 1L << bit;
+    return (buf.getLong(i) & bitmask) != 0;
+  }
+
+  /**
+   * Returns true or false for the specified bit index. The index should be less than the BufBitSet size
+   */
+  public boolean fastGet(int index) {
+    assert index >= 0 && index < numBits;
+    int i = index >> 6; // div 64
+    // signed shift will keep a negative index and force an
+    // array-index-out-of-bounds-exception, removing the need for an explicit check.
+    int bit = index & 0x3f; // mod 64
+    long bitmask = 1L << bit;
+    return (buf.getLong(i) & bitmask) != 0;
+  }
+
+  /**
+   * Returns true or false for the specified bit index
+   */
+  public boolean get(long index) {
+    int i = (int) (index >> 6); // div 64
+    if (i >= buf.capacity()) return false;
+    int bit = (int) index & 0x3f; // mod 64
+    long bitmask = 1L << bit;
+    return (buf.getLong(i) & bitmask) != 0;
+  }
+
+  /**
+   * Returns true or false for the specified bit index. The index should be less than the BufBitSet size.
+   */
+  public boolean fastGet(long index) {
+    assert index >= 0 && index < numBits;
+    int i = (int) (index >> 6); // div 64
+    int bit = (int) index & 0x3f; // mod 64
+    long bitmask = 1L << bit;
+    return (buf.getLong(i) & bitmask) != 0;
+  }
+
+  /*
+   * // alternate implementation of get() public boolean get1(int index) { int i = index >> 6; // div 64 int bit = index
+   * & 0x3f; // mod 64 return ((buf.getLong(i)>>>bit) & 0x01) != 0; // this does a long shift and a bittest (on x86) vs
+   * // a long shift, and a long AND, (the test for zero is prob a no-op) // testing on a P4 indicates this is slower
+   * than (buf.getLong(i) & bitmask) != 0; }
+   */
+
+  /**
+   * returns 1 if the bit is set, 0 if not. The index should be less than the BufBitSet size
+   */
+  public int getBit(int index) {
+    assert index >= 0 && index < numBits;
+    int i = index >> 6; // div 64
+    int bit = index & 0x3f; // mod 64
+    return ((int) (buf.getLong(i) >>> bit)) & 0x01;
+  }
+
+  /*
+   * public boolean get2(int index) { int word = index >> 6; // div 64 int bit = index & 0x0000003f; // mod 64 return
+   * (buf.getLong(word) << bit) < 0; // hmmm, this would work if bit order were reversed // we could right shift and
+   * check for parity bit, if it was available to us. }
+   */
+
+  /** sets a bit, expanding the set size if necessary */
+  public void set(long index) {
+    int wordNum = expandingWordNum(index);
+    int bit = (int) index & 0x3f;
+    long bitmask = 1L << bit;
+    buf.setLong(wordNum, buf.getLong(wordNum) | bitmask);
+    buf.setLong(wordNum, buf.getLong(wordNum) | bitmask);
+  }
+
+  /**
+   * Sets the bit at the specified index. The index should be less than the BufBitSet size.
+   */
+  public void fastSet(int index) {
+    assert index >= 0 && index < numBits;
+    int wordNum = index >> 6; // div 64
+    int bit = index & 0x3f; // mod 64
+    long bitmask = 1L << bit;
+    buf.setLong(wordNum, buf.getLong(wordNum) | bitmask);
+  }
+
+  /**
+   * Sets the bit at the specified index. The index should be less than the BufBitSet size.
+   */
+  public void fastSet(long index) {
+    assert index >= 0 && index < numBits;
+    int wordNum = (int) (index >> 6);
+    int bit = (int) index & 0x3f;
+    long bitmask = 1L << bit;
+    buf.setLong(wordNum, buf.getLong(wordNum) | bitmask);
+  }
+
+  /**
+   * Sets a range of bits, expanding the set size if necessary
+   * 
+   * @param startIndex
+   *          lower index
+   * @param endIndex
+   *          one-past the last bit to set
+   */
+  public void set(long startIndex, long endIndex) {
+    if (endIndex <= startIndex) return;
+
+    int startWord = (int) (startIndex >> 6);
+
+    // since endIndex is one past the end, this is index of the last
+    // word to be changed.
+    int endWord = expandingWordNum(endIndex - 1);
+
+    long startmask = -1L << startIndex;
+    long endmask = -1L >>> -endIndex; // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
+
+    if (startWord == endWord) {
+      buf.setLong(startWord, buf.getLong(startWord) | (startmask & endmask));
+      return;
+    }
+    buf.setLong(startWord, buf.getLong(startWord) | startmask);
+
+    fill(buf, startWord + 1, endWord, -1L);
+    buf.setLong(endWord, buf.getLong(endWord) | endmask);
+  }
+
+  private void fill(ByteBuf buf, int start, int end, long val) {
+    for (int i = 0; i < buf.capacity(); i += 8) {
+      buf.setLong(i, val);
+    }
+  }
+
+  private final void setLongWord(int pos, long value) {
+    buf.setLong(pos * 8, value);
+  }
+
+  private final long getLongWord(int pos) {
+    return buf.getLong(pos * 8);
+  }
+
+  protected int expandingWordNum(long index) {
+    int wordNum = (int) (index >> 6);
+    if (wordNum >= wlen) {
+      ensureCapacity(index + 1);
+      wlen = wordNum + 1;
+    }
+    assert (numBits = Math.max(numBits, index + 1)) >= 0;
+    return wordNum;
+  }
+
+  /**
+   * clears a bit. The index should be less than the BufBitSet size.
+   */
+  public void fastClear(int index) {
+    assert index >= 0 && index < numBits;
+    int wordNum = index >> 6;
+    int bit = index & 0x03f;
+    long bitmask = 1L << bit;
+    buf.setLong(wordNum, buf.getLong(wordNum) & ~bitmask);
+    // hmmm, it takes one more instruction to clear than it does to set... any
+    // way to work around this? If there were only 63 bits per word, we could
+    // use a right shift of 10111111...111 in binary to position the 0 in the
+    // correct place (using sign extension).
+    // Could also use Long.rotateRight() or rotateLeft() *if* they were converted
+    // by the JVM into a native instruction.
+    // buf.getLong(word) &= Long.rotateLeft(0xfffffffe,bit);
+  }
+
+  /**
+   * clears a bit. The index should be less than the BufBitSet size.
+   */
+  public void fastClear(long index) {
+    assert index >= 0 && index < numBits;
+    int wordNum = (int) (index >> 6); // div 64
+    int bit = (int) index & 0x3f; // mod 64
+    long bitmask = 1L << bit;
+    buf.setLong(wordNum, buf.getLong(wordNum) & ~bitmask);
+  }
+
+  /** clears a bit, allowing access beyond the current set size without changing the size. */
+  public void clear(long index) {
+    int wordNum = (int) (index >> 6); // div 64
+    if (wordNum >= wlen) return;
+    int bit = (int) index & 0x3f; // mod 64
+    long bitmask = 1L << bit;
+    buf.setLong(wordNum, buf.getLong(wordNum) & ~bitmask);
+  }
+
+  /**
+   * Clears a range of bits. Clearing past the end does not change the size of the set.
+   * 
+   * @param startIndex
+   *          lower index
+   * @param endIndex
+   *          one-past the last bit to clear
+   */
+  public void clear(int startIndex, int endIndex) {
+    if (endIndex <= startIndex) return;
+
+    int startWord = (startIndex >> 6);
+    if (startWord >= wlen) return;
+
+    // since endIndex is one past the end, this is index of the last
+    // word to be changed.
+    int endWord = ((endIndex - 1) >> 6);
+
+    long startmask = -1L << startIndex;
+    long endmask = -1L >>> -endIndex; // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
+
+    // invert masks since we are clearing
+    startmask = ~startmask;
+    endmask = ~endmask;
+
+    if (startWord == endWord) {
+      buf.setLong(startWord, buf.getLong(startWord) & (startmask | endmask));
+      return;
+    }
+
+    buf.setLong(startWord, buf.getLong(startWord) & startmask);
+
+    int middle = Math.min(wlen, endWord);
+    fill(buf, startWord + 1, middle, 0L);
+    if (endWord < wlen) {
+      buf.setLong(endWord, buf.getLong(endWord) & endmask);
+    }
+  }
+
+  /**
+   * Clears a range of bits. Clearing past the end does not change the size of the set.
+   * 
+   * @param startIndex
+   *          lower index
+   * @param endIndex
+   *          one-past the last bit to clear
+   */
+  public void clear(long startIndex, long endIndex) {
+    if (endIndex <= startIndex) return;
+
+    int startWord = (int) (startIndex >> 6);
+    if (startWord >= wlen) return;
+
+    // since endIndex is one past the end, this is index of the last
+    // word to be changed.
+    int endWord = (int) ((endIndex - 1) >> 6);
+
+    long startmask = -1L << startIndex;
+    long endmask = -1L >>> -endIndex; // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
+
+    // invert masks since we are clearing
+    startmask = ~startmask;
+    endmask = ~endmask;
+
+    if (startWord == endWord) {
+      buf.setLong(startWord, buf.getLong(startWord) & (startmask | endmask));
+      return;
+    }
+
+    buf.setLong(startWord, buf.getLong(startWord) & startmask);
+
+    int middle = Math.min(wlen, endWord);
+    fill(buf, startWord + 1, middle, 0L);
+    if (endWord < wlen) {
+      buf.setLong(endWord, buf.getLong(endWord) & endmask);
+    }
+  }
+
+  /**
+   * Sets a bit and returns the previous value. The index should be less than the BufBitSet size.
+   */
+  public boolean getAndSet(int index) {
+    assert index >= 0 && index < numBits;
+    int wordNum = index >> 6; // div 64
+    int bit = index & 0x3f; // mod 64
+    long bitmask = 1L << bit;
+    long longVal = buf.getLong(wordNum);
+    boolean val = (longVal & bitmask) != 0;
+    buf.setLong(wordNum, longVal | bitmask);
+    return val;
+  }
+
+  /**
+   * flips a bit. The index should be less than the BufBitSet size.
+   */
+  public void fastFlip(int index) {
+    assert index >= 0 && index < numBits;
+    int wordNum = index >> 6; // div 64
+    int bit = index & 0x3f; // mod 64
+    long bitmask = 1L << bit;
+    buf.setLong(wordNum, (buf.getLong(wordNum) ^ bitmask));
+  }
+
+  /**
+   * flips a bit. The index should be less than the BufBitSet size.
+   */
+  public void fastFlip(long index) {
+    assert index >= 0 && index < numBits;
+    int wordNum = (int) (index >> 6); // div 64
+    int bit = (int) index & 0x3f; // mod 64
+    long bitmask = 1L << bit;
+    buf.setLong(wordNum, (buf.getLong(wordNum) ^ bitmask));
+  }
+
+  /** flips a bit, expanding the set size if necessary */
+  public void flip(long index) {
+    int wordNum = expandingWordNum(index);
+    int bit = (int) index & 0x3f; // mod 64
+    long bitmask = 1L << bit;
+    buf.setLong(wordNum, (buf.getLong(wordNum) ^ bitmask));
+  }
+
+  /**
+   * flips a bit and returns the resulting bit value. The index should be less than the BufBitSet size.
+   */
+  public boolean flipAndGet(int index) {
+    assert index >= 0 && index < numBits;
+    int wordNum = index >> 6; // div 64
+    int bit = index & 0x3f; // mod 64
+    long bitmask = 1L << bit;
+    long longVal = buf.getLong(wordNum);
+    buf.setLong(wordNum, longVal ^ bitmask);
+    return (longVal & bitmask) != 0;
+  }
+
+  /**
+   * flips a bit and returns the resulting bit value. The index should be less than the BufBitSet size.
+   */
+  public boolean flipAndGet(long index) {
+    assert index >= 0 && index < numBits;
+    int wordNum = (int) (index >> 6); // div 64
+    int bit = (int) index & 0x3f; // mod 64
+    long bitmask = 1L << bit;
+    long longVal = buf.getLong(wordNum);
+    buf.setLong(wordNum, longVal ^ bitmask);
+    return (longVal & bitmask) != 0;
+  }
+
+  /**
+   * Flips a range of bits, expanding the set size if necessary
+   * 
+   * @param startIndex
+   *          lower index
+   * @param endIndex
+   *          one-past the last bit to flip
+   */
+  public void flip(long startIndex, long endIndex) {
+    if (endIndex <= startIndex) return;
+    int startWord = (int) (startIndex >> 6);
+
+    // since endIndex is one past the end, this is index of the last
+    // word to be changed.
+    int endWord = expandingWordNum(endIndex - 1);
+
+    /***
+     * Grrr, java shifting wraps around so -1L>>>64 == -1 for that reason, make sure not to use endmask if the bits to
+     * flip will be zero in the last word (redefine endWord to be the last changed...) long startmask = -1L <<
+     * (startIndex & 0x3f); // example: 11111...111000 long endmask = -1L >>> (64-(endIndex & 0x3f)); // example:
+     * 00111...111111
+     ***/
+
+    long startmask = -1L << startIndex;
+    long endmask = -1L >>> -endIndex; // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
+
+    if (startWord == endWord) {
+      buf.setLong(startWord, buf.getLong(startWord) ^ (startmask & endmask));
+      return;
+    }
+
+    buf.setLong(startWord, buf.getLong(startWord) ^ startmask);
+
+    for (int i = startWord + 1; i < endWord; i++) {
+      buf.setLong(i, ~buf.getLong(i));
+    }
+
+    buf.setLong(endWord, buf.getLong(endWord) ^ endmask);
+  }
+
+  /*
+   * public static int pop(long v0, long v1, long v2, long v3) { // derived from pop_array by setting last four elems to
+   * 0. // exchanges one pop() call for 10 elementary operations // saving about 7 instructions... is there a better
+   * way? long twosA=v0 & v1; long ones=v0^v1;
+   * 
+   * long u2=ones^v2; long twosB =(ones&v2)|(u2&v3); ones=u2^v3;
+   * 
+   * long fours=(twosA&twosB); long twos=twosA^twosB;
+   * 
+   * return (pop(fours)<<2) + (pop(twos)<<1) + pop(ones);
+   * 
+   * }
+   */
+
+  /** @return the number of set bits */
+  public long cardinality() {
+    return BitUtil.pop_array(buf, 0, wlen);
+  }
+
+  /**
+   * Returns the popcount or cardinality of the intersection of the two sets. Neither set is modified.
+   */
+  public static long intersectionCount(BufBitSet a, BufBitSet b) {
+    return BitUtil.pop_intersect(a.buf, b.buf, 0, Math.min(a.wlen, b.wlen));
+  }
+
+  /**
+   * Returns the popcount or cardinality of the union of the two sets. Neither set is modified.
+   */
+  public static long unionCount(BufBitSet a, BufBitSet b) {
+    long tot = BitUtil.pop_union(a.buf, b.buf, 0, Math.min(a.wlen, b.wlen));
+    if (a.wlen < b.wlen) {
+      tot += BitUtil.pop_array(b.buf, a.wlen, b.wlen - a.wlen);
+    } else if (a.wlen > b.wlen) {
+      tot += BitUtil.pop_array(a.buf, b.wlen, a.wlen - b.wlen);
+    }
+    return tot;
+  }
+
+  /**
+   * Returns the popcount or cardinality of "a and not b" or "intersection(a, not(b))". Neither set is modified.
+   */
+  public static long andNotCount(BufBitSet a, BufBitSet b) {
+    long tot = BitUtil.pop_andnot(a.buf, b.buf, 0, Math.min(a.wlen, b.wlen));
+    if (a.wlen > b.wlen) {
+      tot += BitUtil.pop_array(a.buf, b.wlen, a.wlen - b.wlen);
+    }
+    return tot;
+  }
+
+  /**
+   * Returns the popcount or cardinality of the exclusive-or of the two sets. Neither set is modified.
+   */
+  public static long xorCount(BufBitSet a, BufBitSet b) {
+    long tot = BitUtil.pop_xor(a.buf, b.buf, 0, Math.min(a.wlen, b.wlen));
+    if (a.wlen < b.wlen) {
+      tot += BitUtil.pop_array(b.buf, a.wlen, b.wlen - a.wlen);
+    } else if (a.wlen > b.wlen) {
+      tot += BitUtil.pop_array(a.buf, b.wlen, a.wlen - b.wlen);
+    }
+    return tot;
+  }
+
+  /**
+   * Returns the index of the first set bit starting at the index specified. -1 is returned if there are no more set
+   * bits.
+   */
+  public int nextSetBit(int index) {
+    int i = index >> 6;
+    if (i >= wlen) return -1;
+    int subIndex = index & 0x3f; // index within the word
+    long word = buf.getLong(i) >> subIndex; // skip all the bits to the right of index
+
+    if (word != 0) {
+      return (i << 6) + subIndex + Long.numberOfTrailingZeros(word);
+    }
+
+    while (++i < wlen) {
+      word = buf.getLong(i);
+      if (word != 0) return (i << 6) + Long.numberOfTrailingZeros(word);
+    }
+
+    return -1;
+  }
+
+  /**
+   * Returns the index of the first set bit starting at the index specified. -1 is returned if there are no more set
+   * bits.
+   */
+  public long nextSetBit(long index) {
+    int i = (int) (index >>> 6);
+    if (i >= wlen) return -1;
+    int subIndex = (int) index & 0x3f; // index within the word
+    long word = buf.getLong(i) >>> subIndex; // skip all the bits to the right of index
+
+    if (word != 0) {
+      return (((long) i) << 6) + (subIndex + Long.numberOfTrailingZeros(word));
+    }
+
+    while (++i < wlen) {
+      word = buf.getLong(i);
+      if (word != 0) return (((long) i) << 6) + Long.numberOfTrailingZeros(word);
+    }
+
+    return -1;
+  }
+
+  /**
+   * Returns the index of the first set bit starting downwards at the index specified. -1 is returned if there are no
+   * more set bits.
+   */
+  public int prevSetBit(int index) {
+    int i = index >> 6;
+    final int subIndex;
+    long word;
+    if (i >= wlen) {
+      i = wlen - 1;
+      if (i < 0) return -1;
+      subIndex = 63; // last possible bit
+      word = buf.getLong(i);
+    } else {
+      if (i < 0) return -1;
+      subIndex = index & 0x3f; // index within the word
+      word = (buf.getLong(i) << (63 - subIndex)); // skip all the bits to the left of index
+    }
+
+    if (word != 0) {
+      return (i << 6) + subIndex - Long.numberOfLeadingZeros(word); // See LUCENE-3197
+    }
+
+    while (--i >= 0) {
+      word = buf.getLong(i);
+      if (word != 0) {
+        return (i << 6) + 63 - Long.numberOfLeadingZeros(word);
+      }
+    }
+
+    return -1;
+  }
+
+  /**
+   * Returns the index of the first set bit starting downwards at the index specified. -1 is returned if there are no
+   * more set bits.
+   */
+  public long prevSetBit(long index) {
+    int i = (int) (index >> 6);
+    final int subIndex;
+    long word;
+    if (i >= wlen) {
+      i = wlen - 1;
+      if (i < 0) return -1;
+      subIndex = 63; // last possible bit
+      word = buf.getLong(i);
+    } else {
+      if (i < 0) return -1;
+      subIndex = (int) index & 0x3f; // index within the word
+      word = (buf.getLong(i) << (63 - subIndex)); // skip all the bits to the left of index
+    }
+
+    if (word != 0) {
+      return (((long) i) << 6) + subIndex - Long.numberOfLeadingZeros(word); // See LUCENE-3197
+    }
+
+    while (--i >= 0) {
+      word = buf.getLong(i);
+      if (word != 0) {
+        return (((long) i) << 6) + 63 - Long.numberOfLeadingZeros(word);
+      }
+    }
+
+    return -1;
+  }
+
+  BufBitSet cloneTest() {
+    BufBitSet obs = new BufBitSet(allocator, buf.copy());
+    return obs;
+  }
+
+  /** this = this AND other */
+  public void intersect(BufBitSet other) {
+    int newLen = Math.min(this.wlen, other.wlen);
+    ByteBuf thisArr = this.buf;
+    ByteBuf otherArr = other.buf;
+    // testing against zero can be more efficient
+    int pos = newLen;
+    while (--pos >= 0) {
+      thisArr.setLong(pos, thisArr.getLong(pos) & otherArr.getLong(pos));
+    }
+    if (this.wlen > newLen) {
+      // fill zeros from the new shorter length to the old length
+      fill(buf, newLen, this.wlen, 0);
+    }
+    this.wlen = newLen;
+  }
+
+  /** this = this OR other */
+  public void union(BufBitSet other) {
+    int newLen = Math.max(wlen, other.wlen);
+    ensureCapacityWords(newLen);
+    assert (numBits = Math.max(other.numBits, numBits)) >= 0;
+
+    ByteBuf thisArr = this.buf;
+    ByteBuf otherArr = other.buf;
+
+    int pos = Math.min(wlen, other.wlen);
+    while (--pos >= 0) {
+      thisArr.setLong(pos, thisArr.getLong(pos) | otherArr.getLong(pos));
+    }
+    if (this.wlen < newLen) {
+      System.arraycopy(otherArr, this.wlen, thisArr, this.wlen, newLen - this.wlen);
+    }
+    this.wlen = newLen;
+  }
+
+  /** Remove all elements set in other. this = this AND_NOT other */
+  public void remove(BufBitSet other) {
+    int idx = Math.min(wlen, other.wlen);
+    ByteBuf thisArr = this.buf;
+    ByteBuf otherArr = other.buf;
+    while (--idx >= 0) {
+      thisArr.setLong(idx, thisArr.getLong(idx) & ~otherArr.getLong(idx));
+    }
+  }
+
+  /** this = this XOR other */
+  public void xor(BufBitSet other) {
+    int newLen = Math.max(wlen, other.wlen);
+    ensureCapacityWords(newLen);
+    assert (numBits = Math.max(other.numBits, numBits)) >= 0;
+
+    ByteBuf thisArr = this.buf;
+    ByteBuf otherArr = other.buf;
+    int pos = Math.min(wlen, other.wlen);
+    while (--pos >= 0) {
+      thisArr.setLong(pos, thisArr.getLong(pos) ^ otherArr.getLong(pos));
+    }
+    if (this.wlen < newLen) {
+      otherArr.readerIndex(wlen);
+      otherArr.writeBytes(thisArr);
+    }
+    this.wlen = newLen;
+
+  }
+
+  // some BitSet compatability methods
+
+  // ** see {@link intersect} */
+  public void and(BufBitSet other) {
+    intersect(other);
+  }
+
+  // ** see {@link union} */
+  public void or(BufBitSet other) {
+    union(other);
+  }
+
+  // ** see {@link andNot} */
+  public void andNot(BufBitSet other) {
+    remove(other);
+  }
+
+  /** returns true if the sets have any elements in common */
+  public boolean intersects(BufBitSet other) {
+    int pos = Math.min(this.wlen, other.wlen);
+    ByteBuf thisArr = this.buf;
+    ByteBuf otherArr = other.buf;
+    while (--pos >= 0) {
+      if ((thisArr.getLong(pos) & otherArr.getLong(pos)) != 0) return true;
+    }
+    return false;
+  }
+
+  public void ensureCapacityWords(int numWords) {
+    if (buf.capacity() < numWords) {
+      ByteBuf newBuf = allocator.buffer(numWords * 8);
+      buf.writeBytes(newBuf);
+      buf.release();
+      buf = newBuf;
+      this.numBits = numWords * 64;
+    }
+  }
+
+  /**
+   * Ensure that the long[] is big enough to hold numBits, expanding it if necessary. getNumWords() is unchanged by this
+   * call.
+   */
+  public void ensureCapacity(long numBits) {
+    ensureCapacityWords(bits2words(numBits));
+  }
+
+  /**
+   * Lowers numWords, the number of words in use, by checking for trailing zero words.
+   */
+  public void trimTrailingZeros() {
+    int idx = wlen - 1;
+    while (idx >= 0 && buf.getLong(idx) == 0)
+      idx--;
+    wlen = idx + 1;
+  }
+
+  /** returns the number of 64 bit words it would take to hold numBits */
+  public static int bits2words(long numBits) {
+    return (int) (((numBits - 1) >>> 6) + 1);
+  }
+
+  /** returns true if both sets have the same bits set */
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (!(o instanceof BufBitSet)) return false;
+    BufBitSet a;
+    BufBitSet b = (BufBitSet) o;
+    // make a the larger set.
+    if (b.wlen > this.wlen) {
+      a = b;
+      b = this;
+    } else {
+      a = this;
+    }
+
+    // check for any set bits out of the range of b
+    for (int i = a.wlen - 1; i >= b.wlen; i--) {
+      if (a.buf.getLong(i) != 0) return false;
+    }
+
+    for (int i = b.wlen - 1; i >= 0; i--) {
+      if (a.buf.getLong(i) != b.buf.getLong(i)) return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    // Start with a zero hash and use a mix that results in zero if the input is zero.
+    // This effectively truncates trailing zeros without an explicit check.
+    long h = 0;
+    for (int i = buf.capacity(); --i >= 0;) {
+      h ^= buf.getLong(i);
+      h = (h << 1) | (h >>> 63); // rotate left
+    }
+    // fold leftmost bits into right and add a constant to prevent
+    // empty sets from returning 0, which is too common.
+    return (int) ((h >> 32) ^ h) + 0x98761234;
+  }
+
+  public void release() {
+    this.buf.release();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/ByteVector.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/ByteVector.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/ByteVector.java
new file mode 100644
index 0000000..258f1cc
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/ByteVector.java
@@ -0,0 +1,48 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record.vector;
+
+import org.apache.drill.common.expression.types.DataType;
+import org.apache.drill.common.physical.RecordField.ValueMode;
+import org.apache.drill.exec.BufferAllocator;
+import org.apache.drill.exec.record.MaterializedField;
+
+
+public class ByteVector extends AbstractFixedValueVector<ByteVector>{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ByteVector.class);
+
+  private final MaterializedField field;
+
+  public ByteVector(int fieldId, BufferAllocator allocator) {
+    super(fieldId, allocator, 8);
+    this.field = new MaterializedField(fieldId, DataType.SIGNED_BYTE, false, ValueMode.VECTOR, this.getClass());
+  }
+
+  @Override
+  public MaterializedField getField() {
+    return field;
+  }
+
+  public void setByte(int index, byte b){
+    data.setByte(index, b);
+  }
+
+  public byte getByte(int index){
+    return data.getByte(index);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/Int32Vector.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/Int32Vector.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/Int32Vector.java
new file mode 100644
index 0000000..d8add04
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/Int32Vector.java
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record.vector;
+
+import org.apache.drill.common.expression.types.DataType;
+import org.apache.drill.common.physical.RecordField.ValueMode;
+import org.apache.drill.exec.BufferAllocator;
+import org.apache.drill.exec.record.MaterializedField;
+
+public class Int32Vector extends AbstractFixedValueVector<Int32Vector>{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Int32Vector.class);
+  
+  private final MaterializedField field;
+
+  public Int32Vector(int fieldId, BufferAllocator allocator) {
+    super(fieldId, allocator, 32);
+    this.field = new MaterializedField(fieldId, DataType.INT32, false, ValueMode.VECTOR, this.getClass());
+  }
+
+  @Override
+  public MaterializedField getField() {
+    return field;
+  }
+
+  public final void set(int index, int value){
+    index*=4;
+    data.setInt(index, value);
+  }
+  
+  public final int get(int index){
+    index*=4;
+    return data.getInt(index);
+  }
+  
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/NullableValueVector.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/NullableValueVector.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/NullableValueVector.java
new file mode 100644
index 0000000..b9bad6e
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/NullableValueVector.java
@@ -0,0 +1,70 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record.vector;
+
+import io.netty.buffer.ByteBuf;
+
+import org.apache.drill.exec.BufferAllocator;
+
+/**
+ * Abstract class supports null versions.
+ */
+abstract class NullableValueVector<T extends NullableValueVector<T, E>, E extends BaseValueVector<E>> extends BaseValueVector<T> {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(NullableValueVector.class);
+
+  protected BitVector bits;
+  protected E value;
+
+  public NullableValueVector(int fieldId, BufferAllocator allocator) {
+    super(fieldId, allocator);
+    bits = new BitVector(fieldId, allocator);
+    value = getNewValueVector(fieldId, allocator);
+  }
+  
+  protected abstract E getNewValueVector(int fieldId, BufferAllocator allocator);
+
+  @Override
+  protected int getAllocationSize(int valueCount) {
+    return bits.getAllocationSize(valueCount) + value.getAllocationSize(valueCount);
+  }
+  
+  
+  @Override
+  protected void childResetAllocation(int valueCount, ByteBuf buf) {
+    super.resetAllocation(valueCount, buf);
+    int firstSize = bits.getAllocationSize(valueCount);
+    value.resetAllocation(valueCount, buf.slice(firstSize, value.getAllocationSize(valueCount)));
+    bits.resetAllocation(valueCount, buf.slice(0, firstSize));
+    bits.setAllFalse();
+  }
+
+  @Override
+  protected void childCloneMetadata(T other) {
+    bits.cloneMetadata(other.bits);
+    value.cloneInto(value);
+  }
+
+  @Override
+  protected void childClear() {
+    bits.clear();
+    value.clear();
+  }
+
+  
+}
+


[6/9] basic framework for physical plan. abstraction of graph classes.

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/ValueVector.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/ValueVector.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/ValueVector.java
new file mode 100644
index 0000000..97a9b3b
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/ValueVector.java
@@ -0,0 +1,81 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record.vector;
+
+import io.netty.buffer.ByteBuf;
+
+import java.io.Closeable;
+
+import org.apache.drill.exec.record.MaterializedField;
+
+/**
+ * A vector of values.  Acts a containing instance that may rotate its internal buffer depending on what it needs to hold.  Should be language agnostic so that it can be passed between Java and JNI without modification.
+ */
+public interface ValueVector<T extends ValueVector<T>> extends Closeable {
+
+  /**
+   * Copies the data from this vector into its pair.
+   * 
+   * @param vector
+   */
+  public abstract void cloneInto(T vector);
+
+  /**
+   * Allocate a new memory space for this vector.
+   * 
+   * @param valueCount
+   *          The number of possible values which should be contained in this vector.
+   */
+  public abstract void allocateNew(int valueCount);
+
+  /**
+   * Zero copy move of data from this vector to the target vector. Any future access to this vector without being
+   * populated by a new vector will cause problems.
+   * 
+   * @param vector
+   */
+  public abstract void transferTo(T vector);
+
+  /**
+   * Return the underlying buffer. Note that this doesn't impact the reference counts for this buffer so it only should be
+   * used for in context access. Also note that this buffer changes regularly thus external classes shouldn't hold a
+   * reference to it.
+   * 
+   * @return The underlying ByteBuf.
+   */
+  public abstract ByteBuf getBuffer();
+
+  /**
+   * Returns the number of value contained within this vector.
+   * @return Vector size
+   */
+  public abstract int size();
+
+  /**
+   * Release supporting resources.
+   */
+  public abstract void close();
+
+  /**
+   * Get information about how this field is materialized.
+   * 
+   * @return
+   */
+  public abstract MaterializedField getField();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/VariableVector.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/VariableVector.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/VariableVector.java
new file mode 100644
index 0000000..bae45dc
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/vector/VariableVector.java
@@ -0,0 +1,78 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record.vector;
+
+import io.netty.buffer.ByteBuf;
+
+import org.apache.drill.exec.BufferAllocator;
+import org.apache.drill.exec.record.DeadBuf;
+
+/** 
+ * A vector of variable length bytes.  Constructed as a vector of lengths or positions and a vector of values.  Random access is only possible if the variable vector stores positions as opposed to lengths.
+ */
+public abstract class VariableVector<T extends VariableVector<T, E>, E extends BaseValueVector<E>> extends BaseValueVector<T>{
+
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(VariableVector.class);
+  
+  protected E lengthVector;
+  private ByteBuf values = DeadBuf.DEAD_BUFFER;
+  protected int expectedValueLength;
+  private final boolean hasPositions;
+  
+  public VariableVector(int fieldId, BufferAllocator allocator, boolean hasPositions) {
+    super(fieldId, allocator);
+    this.lengthVector = getNewLengthVector(fieldId, allocator);
+    this.hasPositions = hasPositions;
+  }
+  
+  protected abstract E getNewLengthVector(int fieldId, BufferAllocator allocator);
+  
+  @Override
+  protected int getAllocationSize(int valueCount) {
+    return lengthVector.getAllocationSize(valueCount) + (expectedValueLength * valueCount);
+  }
+  
+  @Override
+  protected void childResetAllocation(int valueCount, ByteBuf buf) {
+    int firstSize = lengthVector.getAllocationSize(valueCount);
+    lengthVector.resetAllocation(valueCount, buf.slice(0, firstSize));
+    values = buf.slice(firstSize, expectedValueLength * valueCount);
+  }
+
+  @Override
+  protected void childCloneMetadata(T other) {
+    lengthVector.cloneMetadata(other.lengthVector);
+    other.expectedValueLength = expectedValueLength;
+  }
+
+  @Override
+  protected void childClear() {
+    lengthVector.clear();
+    if(values != DeadBuf.DEAD_BUFFER){
+      values.release();
+      values = DeadBuf.DEAD_BUFFER;
+    }
+  }  
+  
+  public boolean hasPositions(){
+    return hasPositions;
+  }
+  
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BasicClient.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BasicClient.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BasicClient.java
new file mode 100644
index 0000000..aa42fc1
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BasicClient.java
@@ -0,0 +1,81 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import io.netty.bootstrap.Bootstrap;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.ChannelInitializer;
+import io.netty.channel.ChannelOption;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.socket.SocketChannel;
+import io.netty.channel.socket.nio.NioSocketChannel;
+
+import java.util.concurrent.atomic.AtomicBoolean;
+
+public abstract class BasicClient<T extends Enum<T>> extends RpcBus<T> {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BasicClient.class);
+
+  private Bootstrap b;
+  private volatile boolean connect = false;
+
+  public BasicClient(ByteBufAllocator alloc, EventLoopGroup eventLoopGroup) {
+    b = new Bootstrap() //
+        .group(eventLoopGroup) //
+        .channel(NioSocketChannel.class) //
+        .option(ChannelOption.ALLOCATOR, alloc) //
+        .option(ChannelOption.SO_RCVBUF, 1 << 17) //
+        .option(ChannelOption.SO_SNDBUF, 1 << 17) //
+        .handler(new ChannelInitializer<SocketChannel>() {
+          
+          @Override
+          protected void initChannel(SocketChannel ch) throws Exception {
+            ch.closeFuture().addListener(getCloseHandler(ch));
+            
+            ch.pipeline().addLast( //
+                new ZeroCopyProtobufLengthDecoder(), //
+                new RpcDecoder(), //
+                new RpcEncoder(), //
+                new InboundHandler(ch), //
+                new RpcExceptionHandler() //
+                );
+            channel = ch;
+            connect = true;
+          }
+        }) //
+        
+        ;
+  }
+
+  @Override
+  public boolean isClient() {
+    return true;
+  }
+  
+  public ChannelFuture connectAsClient(String host, int port) throws InterruptedException {
+    ChannelFuture f = b.connect(host, port).sync();
+    connect = !connect;
+    return f;
+  }
+
+  public void close() {
+    logger.debug("Closing client");
+    b.shutdown();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BasicServer.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BasicServer.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BasicServer.java
new file mode 100644
index 0000000..acf1822
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BasicServer.java
@@ -0,0 +1,106 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import io.netty.bootstrap.ServerBootstrap;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.ChannelInboundMessageHandlerAdapter;
+import io.netty.channel.ChannelInitializer;
+import io.netty.channel.ChannelOption;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.socket.SocketChannel;
+import io.netty.channel.socket.nio.NioServerSocketChannel;
+import io.netty.handler.logging.LogLevel;
+import io.netty.handler.logging.LoggingHandler;
+import io.netty.util.concurrent.GenericFutureListener;
+
+import java.io.IOException;
+
+import org.apache.drill.exec.exception.DrillbitStartupException;
+
+/**
+ * A server is bound to a port and is responsible for responding to various type of requests. In some cases, the inbound
+ * requests will generate more than one outbound request.
+ */
+public abstract class BasicServer<T extends Enum<T>> extends RpcBus<T>{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BasicServer.class);
+
+  private ServerBootstrap b;
+  private volatile boolean connect = false;
+
+  public BasicServer(ByteBufAllocator alloc, EventLoopGroup eventLoopGroup) {
+
+    b = new ServerBootstrap() //
+        .channel(NioServerSocketChannel.class) //
+        .option(ChannelOption.SO_BACKLOG, 100) //
+        .option(ChannelOption.SO_RCVBUF, 1 << 17) //
+        .option(ChannelOption.SO_SNDBUF, 1 << 17) //
+        .group(eventLoopGroup) //
+        .childOption(ChannelOption.ALLOCATOR, alloc) //
+        .handler(new LoggingHandler(LogLevel.INFO)) //
+        .childHandler(new ChannelInitializer<SocketChannel>() {
+          @Override
+          protected void initChannel(SocketChannel ch) throws Exception {
+            
+            ch.closeFuture().addListener(getCloseHandler(ch));
+
+            ch.pipeline().addLast( //
+                new ZeroCopyProtobufLengthDecoder(), //
+                new RpcDecoder(), //
+                new RpcEncoder(), //
+                new InboundHandler(ch), //
+                new RpcExceptionHandler() //
+                );            
+            channel = ch;
+            connect = true;
+          }
+        });
+  }
+ 
+  @Override
+  public boolean isClient() {
+    return false;
+  }
+
+
+  public int bind(final int initialPort) throws InterruptedException, DrillbitStartupException{
+    boolean ok = false;
+    int port = initialPort;
+    for(; port < Character.MAX_VALUE; port++){
+      if(b.bind(port).sync().isSuccess()){
+        ok = true;
+        break;
+      }
+    }
+    if(!ok){
+      throw new DrillbitStartupException(String.format("Unable to find available port for Drillbit server starting at port %d.", initialPort));
+    }
+    
+    connect = !connect;
+    return port;    
+  }
+
+  @Override
+  public void close() throws IOException {
+    if(b != null) b.shutdown();
+  }
+  
+  
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ChannelClosedException.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ChannelClosedException.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ChannelClosedException.java
new file mode 100644
index 0000000..e80292f
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ChannelClosedException.java
@@ -0,0 +1,39 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+public class ChannelClosedException extends RpcException{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ChannelClosedException.class);
+
+  public ChannelClosedException() {
+    super();
+  }
+
+  public ChannelClosedException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public ChannelClosedException(String message) {
+    super(message);
+  }
+
+  public ChannelClosedException(Throwable cause) {
+    super(cause);
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/CoordinationQueue.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/CoordinationQueue.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/CoordinationQueue.java
new file mode 100644
index 0000000..a924359
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/CoordinationQueue.java
@@ -0,0 +1,87 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.drill.exec.proto.GeneralRPCProtos.RpcFailure;
+
+import com.google.common.util.concurrent.MoreExecutors;
+
+/**
+ * Manages the creation of rpc futures for a particular socket.
+ */
+public class CoordinationQueue{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CoordinationQueue.class);
+  
+  private final PositiveAtomicInteger circularInt = new PositiveAtomicInteger();
+  private final Map<Integer, DrillRpcFuture<?>> map;
+  
+  
+  public CoordinationQueue(int segmentSize, int segmentCount){
+    map = new ConcurrentHashMap<Integer, DrillRpcFuture<?>>(segmentSize, 0.75f, segmentCount);
+  }
+  
+  void channelClosed(Exception ex){
+    for(DrillRpcFuture<?> f : map.values()){
+      f.setException(ex);
+    }
+  }
+  
+  public <V> DrillRpcFuture<V> getNewFuture(Class<V> clazz){
+    int i = circularInt.getNext();
+    DrillRpcFuture<V> future = DrillRpcFuture.getNewFuture(i, clazz);
+//    logger.debug("Writing to map coord {}, future {}", i, future);
+    Object old = map.put(i,  future);
+    if(old != null) throw new IllegalStateException("You attempted to reuse a coordination id when the previous coordination id has not been removed.  This is likely rpc future callback memory leak.");
+    return future;
+  }
+
+  private DrillRpcFuture<?> removeFromMap(int coordinationId){
+    DrillRpcFuture<?> rpc = map.remove(coordinationId);
+    if(rpc == null){
+      logger.error("Rpc is null.");
+      throw new IllegalStateException("Attempting to retrieve an rpc that wasn't first stored in the rpc coordination queue.  This would most likely happen if you're opposite endpoint sent the multiple messages on the same coordination id.");
+    }
+    return rpc;
+  }
+  
+  public <V> DrillRpcFuture<V> getFuture(int coordinationId, Class<V> clazz){
+//    logger.debug("Getting future for coordinationId {} and class {}", coordinationId, clazz);
+    DrillRpcFuture<?> rpc = removeFromMap(coordinationId);
+//    logger.debug("Got rpc from map {}", rpc);
+    
+    if(rpc.clazz != clazz){
+      logger.error("Rpc class is not expected class {}", rpc.clazz, clazz);
+      throw new IllegalStateException("You attempted to request a future for a coordination id that has a different value class than was used when you initially created the coordination id.  This shouldn't happen.");
+    }
+    
+    @SuppressWarnings("unchecked")
+    DrillRpcFuture<V> crpc = (DrillRpcFuture<V>) rpc; 
+    
+//    logger.debug("Returning casted future");
+    return crpc;
+  }
+  
+  public void updateFailedFuture(int coordinationId, RpcFailure failure){
+//    logger.debug("Updating failed future.");
+    DrillRpcFuture<?> rpc = removeFromMap(coordinationId);
+    rpc.setException(new RemoteRpcException(failure));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/DrillRpcFuture.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/DrillRpcFuture.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/DrillRpcFuture.java
new file mode 100644
index 0000000..5a2fd93
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/DrillRpcFuture.java
@@ -0,0 +1,92 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import java.util.concurrent.ExecutionException;
+
+import com.google.common.util.concurrent.AbstractCheckedFuture;
+import com.google.common.util.concurrent.AbstractFuture;
+import com.google.common.util.concurrent.ListenableFuture;
+
+public class DrillRpcFuture<V> extends AbstractCheckedFuture<V, RpcException> {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillRpcFuture.class);
+
+  final int coordinationId;
+  final Class<V> clazz;
+
+  public DrillRpcFuture(ListenableFuture<V> delegate, int coordinationId, Class<V> clazz) {
+    super(delegate);
+    this.coordinationId = coordinationId;
+    this.clazz = clazz;
+  }
+
+  /**
+   * Drill doesn't currently support rpc cancellations since nearly all requests should be either instance of
+   * asynchronous. Business level cancellation is managed a separate call (e.g. canceling a query.). Calling this method
+   * will result in an UnsupportedOperationException.
+   */
+  @Override
+  public boolean cancel(boolean mayInterruptIfRunning) {
+    throw new UnsupportedOperationException(
+        "Drill doesn't currently support rpc cancellations. See javadocs for more detail.");
+  }
+
+  @Override
+  protected RpcException mapException(Exception ex) {
+    if (ex instanceof RpcException)  return (RpcException) ex;
+    
+    if (ex instanceof ExecutionException) {
+      Throwable e2 = ex.getCause();
+      
+      if (e2 instanceof RpcException) {
+        return (RpcException) e2;
+      }
+    }
+    return new RpcException(ex);
+
+  }
+
+  @SuppressWarnings("unchecked")
+  void setValue(Object value) {
+    assert clazz.isAssignableFrom(value.getClass());
+    ((InnerFuture<V>) super.delegate()).setValue((V) value);
+  }
+
+  boolean setException(Throwable t) {
+    return ((InnerFuture<V>) super.delegate()).setException(t);
+  }
+
+  public static class InnerFuture<T> extends AbstractFuture<T> {
+    // we rewrite these so that the parent can see them
+
+    void setValue(T value) {
+      super.set(value);
+    }
+
+    protected boolean setException(Throwable t) {
+      return super.setException(t);
+    }
+  }
+
+  public static <V> DrillRpcFuture<V> getNewFuture(int coordinationId, Class<V> clazz) {
+    InnerFuture<V> f = new InnerFuture<V>();
+    return new DrillRpcFuture<V>(f, coordinationId, clazz);
+  }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/InboundRpcMessage.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/InboundRpcMessage.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/InboundRpcMessage.java
new file mode 100644
index 0000000..ab977db
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/InboundRpcMessage.java
@@ -0,0 +1,50 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import io.netty.buffer.ByteBuf;
+
+import org.apache.drill.exec.proto.GeneralRPCProtos.RpcMode;
+
+public class InboundRpcMessage extends RpcMessage{
+  public ByteBuf pBody;
+  
+  public InboundRpcMessage(RpcMode mode, int rpcType, int coordinationId, ByteBuf pBody, ByteBuf dBody) {
+    super(mode, rpcType, coordinationId, dBody);
+    this.pBody = pBody;
+  }
+  
+  public int getBodySize(){
+    int len = pBody.capacity();
+    if(dBody != null) len += dBody.capacity();
+    return len;
+  }
+  
+  void release(){
+    pBody.release();
+    super.release();
+  }
+
+  @Override
+  public String toString() {
+    return "InboundRpcMessage [pBody=" + pBody + ", mode=" + mode + ", rpcType=" + rpcType + ", coordinationId="
+        + coordinationId + ", dBody=" + dBody + "]";
+  }
+  
+  
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/NamedThreadFactory.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/NamedThreadFactory.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/NamedThreadFactory.java
new file mode 100644
index 0000000..0df7719
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/NamedThreadFactory.java
@@ -0,0 +1,48 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class NamedThreadFactory implements ThreadFactory {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(NamedThreadFactory.class);
+  private final AtomicInteger nextId = new AtomicInteger();
+  private final String prefix;
+
+  public NamedThreadFactory(String prefix) {
+    this.prefix = prefix;
+  }
+
+  @Override
+  public Thread newThread(Runnable r) {
+    Thread t = new Thread(r, prefix + nextId.incrementAndGet());
+    try {
+      if (t.isDaemon()) {
+        t.setDaemon(true);
+      }
+      if (t.getPriority() != Thread.MAX_PRIORITY) {
+        t.setPriority(Thread.MAX_PRIORITY);
+      }
+    } catch (Exception ignored) {
+      // Doesn't matter even if failed to set.
+    }
+    return t;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/OutboundRpcMessage.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/OutboundRpcMessage.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/OutboundRpcMessage.java
new file mode 100644
index 0000000..bb7644e
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/OutboundRpcMessage.java
@@ -0,0 +1,50 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import io.netty.buffer.ByteBuf;
+
+import org.apache.drill.exec.proto.GeneralRPCProtos.RpcMode;
+
+import com.google.protobuf.MessageLite;
+
+class OutboundRpcMessage extends RpcMessage{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(OutboundRpcMessage.class);
+
+  final MessageLite pBody;
+  
+  public OutboundRpcMessage(RpcMode mode, Enum<?> rpcType, int coordinationId, MessageLite pBody, ByteBuf dBody) {
+    super(mode, rpcType.ordinal(), coordinationId, dBody);
+    this.pBody = pBody;
+  }
+  
+  public int getBodySize(){
+    int len = pBody.getSerializedSize();
+    len += RpcEncoder.getRawVarintSize(len);
+    if(dBody != null) len += dBody.capacity();
+    return len;
+  }
+
+  @Override
+  public String toString() {
+    return "OutboundRpcMessage [pBody=" + pBody + ", mode=" + mode + ", rpcType=" + rpcType + ", coordinationId="
+        + coordinationId + ", dBody=" + dBody + "]";
+  }
+
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/PositiveAtomicInteger.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/PositiveAtomicInteger.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/PositiveAtomicInteger.java
new file mode 100644
index 0000000..7408516
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/PositiveAtomicInteger.java
@@ -0,0 +1,39 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import java.util.concurrent.atomic.AtomicInteger;
+
+/*
+ * An atomic integer that only ever returns 0 > MAX_INT and then starts over.  Should never has a negative overflow.
+ */
+public class PositiveAtomicInteger {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PositiveAtomicInteger.class);
+  
+  private final AtomicInteger internal = new AtomicInteger(Integer.MIN_VALUE);
+  
+  public int getNext(){
+    int i = internal.addAndGet(1);
+    if(i < 0){
+      return i + (-Integer.MIN_VALUE);
+    }else{
+      return i;
+    }
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RemoteRpcException.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RemoteRpcException.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RemoteRpcException.java
new file mode 100644
index 0000000..2a535a7
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RemoteRpcException.java
@@ -0,0 +1,38 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import org.apache.drill.exec.proto.GeneralRPCProtos.RpcFailure;
+
+public class RemoteRpcException extends RpcException{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RemoteRpcException.class);
+  
+  private final RpcFailure failure;
+
+  public RemoteRpcException(RpcFailure failure) {
+    super(String.format("Failure while executing rpc.  Remote failure message: [%s].  Error Code: [%d].  Remote Error Id: [%d]", failure.getShortError(), failure.getErrorId(), failure.getErrorCode()));
+    this.failure = failure;
+  }
+
+  public RpcFailure getFailure() {
+    return failure;
+  }
+  
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/Response.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/Response.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/Response.java
new file mode 100644
index 0000000..8a2f48d
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/Response.java
@@ -0,0 +1,41 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import io.netty.buffer.ByteBuf;
+
+import com.google.protobuf.MessageLite;
+
+public class Response {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Response.class);
+  
+  public Enum<?> rpcType;
+  public MessageLite pBody;
+  public ByteBuf dBody;
+  
+  public Response(Enum<?> rpcType, MessageLite pBody, ByteBuf dBody) {
+    super();
+    this.rpcType = rpcType;
+    this.pBody = pBody;
+    this.dBody = dBody;
+  }
+  
+  
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
new file mode 100644
index 0000000..760bd30
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
@@ -0,0 +1,172 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufInputStream;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelInboundMessageHandlerAdapter;
+import io.netty.channel.socket.SocketChannel;
+import io.netty.util.concurrent.GenericFutureListener;
+
+import java.io.Closeable;
+import java.util.concurrent.CancellationException;
+
+import org.apache.drill.exec.proto.GeneralRPCProtos.RpcFailure;
+import org.apache.drill.exec.proto.GeneralRPCProtos.RpcMode;
+
+import com.google.common.base.Preconditions;
+import com.google.protobuf.MessageLite;
+import com.google.protobuf.Parser;
+
+/**
+ * The Rpc Bus deals with incoming and outgoing communication and is used on both the server and the client side of a system.
+ * @param <T>
+ */
+public abstract class RpcBus<T extends Enum<T>> implements Closeable{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RpcBus.class);
+  
+  private CoordinationQueue queue = new CoordinationQueue(16, 16);
+  protected Channel channel;
+
+  protected abstract MessageLite getResponseDefaultInstance(int rpcType) throws RpcException;
+  protected abstract Response handle(SocketChannel channel, int RpcType, ByteBuf pBody, ByteBuf dBody) throws RpcException;
+  public abstract boolean isClient(); 
+
+  
+  protected <SEND extends MessageLite, RECEIVE extends MessageLite> DrillRpcFuture<RECEIVE> send(T rpcType,
+      SEND protobufBody, Class<RECEIVE> clazz, ByteBuf dataBody) throws RpcException {
+    ByteBuf pBuffer = null;
+    boolean completed = false;
+
+    try {
+//      logger.debug("Seding message");
+      Preconditions.checkNotNull(protobufBody);
+      DrillRpcFuture<RECEIVE> rpcFuture = queue.getNewFuture(clazz);
+      OutboundRpcMessage m = new OutboundRpcMessage(RpcMode.REQUEST, rpcType, rpcFuture.coordinationId, protobufBody, dataBody);
+      ChannelFuture channelFuture = channel.write(m);
+      channelFuture.addListener(new Listener(rpcFuture.coordinationId, clazz));
+      completed = true;
+      return rpcFuture;
+    } finally {
+      if (!completed) {
+        if (pBuffer != null) pBuffer.release();
+        if (dataBody != null) dataBody.release();
+      }
+      ;
+    }
+  }
+
+  
+  public class ChannelClosedHandler implements GenericFutureListener<ChannelFuture>{
+    @Override
+    public void operationComplete(ChannelFuture future) throws Exception {
+      logger.info("Channel closed between local {} and remote {}", future.channel().localAddress(), future.channel().remoteAddress());
+      queue.channelClosed(new ChannelClosedException());
+    }
+  }
+  
+  protected GenericFutureListener<ChannelFuture> getCloseHandler(SocketChannel ch){
+    return new ChannelClosedHandler();
+  }
+  
+  protected class InboundHandler extends ChannelInboundMessageHandlerAdapter<InboundRpcMessage> {
+
+    private final SocketChannel channel;
+    
+    
+    public InboundHandler(SocketChannel channel) {
+      super();
+      this.channel = channel;
+    }
+
+
+    @Override
+    public void messageReceived(ChannelHandlerContext ctx, InboundRpcMessage msg) throws Exception {
+      if(!ctx.channel().isOpen()) return;
+
+      if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Received message {}", msg);
+      switch(msg.mode){
+      case REQUEST:
+        // handle message and ack.
+        Response r = handle(channel, msg.rpcType, msg.pBody, msg.dBody);
+        OutboundRpcMessage outMessage = new OutboundRpcMessage(RpcMode.RESPONSE, r.rpcType, msg.coordinationId, r.pBody, r.dBody);
+        if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Adding message to outbound buffer. {}", outMessage);
+        ctx.write(outMessage);
+        break;
+        
+      case RESPONSE:
+        MessageLite m = getResponseDefaultInstance(msg.rpcType);
+        DrillRpcFuture<?> rpcFuture = queue.getFuture(msg.coordinationId, m.getClass());
+        Parser<?> parser = m.getParserForType();
+        Object value = parser.parseFrom(new ByteBufInputStream(msg.pBody, msg.pBody.readableBytes()));
+        rpcFuture.setValue(value);
+        if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Updated rpc future {} with value {}", rpcFuture, value);
+        break;
+        
+      case RESPONSE_FAILURE:
+        RpcFailure failure = RpcFailure.parseFrom(new ByteBufInputStream(msg.pBody, msg.pBody.readableBytes()));
+        queue.updateFailedFuture(msg.coordinationId, failure);
+        if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Updated rpc future with coordinationId {} with failure ", msg.coordinationId, failure);
+        break;
+        
+      default:
+        throw new UnsupportedOperationException(); 
+      }
+    }
+
+  }
+
+  private class Listener implements GenericFutureListener<ChannelFuture> {
+
+    private int coordinationId;
+    private Class<?> clazz;
+
+    public Listener(int coordinationId, Class<?> clazz) {
+      this.coordinationId = coordinationId;
+      this.clazz = clazz;
+    }
+
+    @Override
+    public void operationComplete(ChannelFuture channelFuture) throws Exception {
+//      logger.debug("Completed channel write.");
+      
+      if (channelFuture.isCancelled()) {
+        DrillRpcFuture<?> rpcFuture = queue.getFuture(coordinationId, clazz);
+        rpcFuture.setException(new CancellationException("Socket operation was canceled."));
+      } else if (!channelFuture.isSuccess()) {
+        try {
+          channelFuture.get();
+          throw new IllegalStateException(
+              "Future was described as completed and not succesful but did not throw an exception.");
+        } catch (Exception e) {
+          DrillRpcFuture<?> rpcFuture = queue.getFuture(coordinationId, clazz);
+          rpcFuture.setException(e);
+        }
+      } else {
+        // send was successful. No need to modify DrillRpcFuture.
+        return;
+      }
+    }
+
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcConstants.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcConstants.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcConstants.java
new file mode 100644
index 0000000..7753e07
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcConstants.java
@@ -0,0 +1,26 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+public class RpcConstants {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RpcConstants.class);
+  
+  private RpcConstants(){}
+  
+  public static final boolean EXTRA_DEBUGGING = false;
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcDecoder.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcDecoder.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcDecoder.java
new file mode 100644
index 0000000..134e54b
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcDecoder.java
@@ -0,0 +1,142 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufInputStream;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.handler.codec.CorruptedFrameException;
+import io.netty.handler.codec.MessageToMessageDecoder;
+
+import java.io.IOException;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.drill.exec.proto.GeneralRPCProtos.RpcHeader;
+
+/**
+ * Converts a previously length adjusted buffer into an RpcMessage.
+ */
+class RpcDecoder extends MessageToMessageDecoder<ByteBuf> {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RpcDecoder.class);
+  
+  private final AtomicLong messageCounter = new AtomicLong();
+  
+  @Override
+  protected InboundRpcMessage decode(ChannelHandlerContext ctx, ByteBuf buffer) throws Exception {
+    if(!ctx.channel().isOpen()){
+      return null;
+    }
+    
+    if (RpcConstants.EXTRA_DEBUGGING) logger.debug("Inbound rpc message received.");
+
+    // now, we know the entire message is in the buffer and the buffer is constrained to this message. Additionally,
+    // this process should avoid reading beyond the end of this buffer so we inform the ByteBufInputStream to throw an
+    // exception if be go beyond readable bytes (as opposed to blocking).
+    final ByteBufInputStream is = new ByteBufInputStream(buffer, buffer.readableBytes());
+
+    // read the rpc header, saved in delimited format.
+    checkTag(is, RpcEncoder.HEADER_TAG);
+    final RpcHeader header = RpcHeader.parseDelimitedFrom(is);
+    if (RpcConstants.EXTRA_DEBUGGING) logger.debug("Read header. {}", header);
+
+    if(RpcConstants.EXTRA_DEBUGGING) logger.debug(" post header read index {}", buffer.readerIndex());
+    
+    // read the protobuf body into a buffer.
+    checkTag(is, RpcEncoder.PROTOBUF_BODY_TAG);
+    final int pBodyLength = readRawVarint32(is);
+    final ByteBuf pBody = buffer.slice(buffer.readerIndex(), pBodyLength);
+    buffer.skipBytes(pBodyLength);
+    buffer.retain();
+    if (RpcConstants.EXTRA_DEBUGGING) logger.debug("Read protobuf body of length {} into buffer {}.", pBodyLength, pBody);
+
+    if(RpcConstants.EXTRA_DEBUGGING) logger.debug("post protobufbody read index {}", buffer.readerIndex());
+    
+    ByteBuf dBody = null;
+    int dBodyLength = 0;
+
+    // read the data body.
+    if (buffer.readableBytes() > 0) {
+      
+      if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Reading raw body, buffer has {} bytes available, is available {}.", buffer.readableBytes(), is.available());
+      checkTag(is, RpcEncoder.RAW_BODY_TAG);
+      if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Reading length.");
+      dBodyLength = readRawVarint32(is);
+      if(buffer.readableBytes() != dBodyLength) throw new CorruptedFrameException(String.format("Expected to receive a raw body of %d bytes but received a buffer with %d bytes.", dBodyLength, buffer.readableBytes()));
+      dBody = buffer.slice();
+      buffer.retain();
+      if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Read raw body of {}", dBody);
+      
+    }else{
+      if(RpcConstants.EXTRA_DEBUGGING) logger.debug("No need to read raw body, no readable bytes left.");
+    }
+
+
+    // return the rpc message.
+    InboundRpcMessage m = new InboundRpcMessage(header.getMode(), header.getRpcType(), header.getCoordinationId(),
+        pBody, dBody);
+
+    // move the reader index forward so the next rpc call won't try to work with it.
+    buffer.skipBytes(dBodyLength);
+    messageCounter.incrementAndGet();
+    if (RpcConstants.EXTRA_DEBUGGING) logger.trace("Inbound Rpc Message Decoded {}.", m);
+    return m;
+
+  }
+
+  private void checkTag(ByteBufInputStream is, int expectedTag) throws IOException {
+    int actualTag = readRawVarint32(is);
+    if (actualTag != expectedTag){
+      throw new CorruptedFrameException(String.format("Expected to read a tag of %d but actually received a value of %d.  Happened after reading %d message.", expectedTag, actualTag, messageCounter.get()));
+    }
+  }
+
+  // Taken from CodedInputStream and modified to enable ByteBufInterface.
+  public static int readRawVarint32(ByteBufInputStream is) throws IOException {
+    byte tmp = is.readByte();
+    if (tmp >= 0) {
+      return tmp;
+    }
+    int result = tmp & 0x7f;
+    if ((tmp = is.readByte()) >= 0) {
+      result |= tmp << 7;
+    } else {
+      result |= (tmp & 0x7f) << 7;
+      if ((tmp = is.readByte()) >= 0) {
+        result |= tmp << 14;
+      } else {
+        result |= (tmp & 0x7f) << 14;
+        if ((tmp = is.readByte()) >= 0) {
+          result |= tmp << 21;
+        } else {
+          result |= (tmp & 0x7f) << 21;
+          result |= (tmp = is.readByte()) << 28;
+          if (tmp < 0) {
+            // Discard upper 32 bits.
+            for (int i = 0; i < 5; i++) {
+              if (is.readByte() >= 0) {
+                return result;
+              }
+            }
+            throw new CorruptedFrameException("Encountered a malformed varint.");
+          }
+        }
+      }
+    }
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcEncoder.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcEncoder.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcEncoder.java
new file mode 100644
index 0000000..8d3d97c
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcEncoder.java
@@ -0,0 +1,127 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufOutputStream;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelOutboundMessageHandlerAdapter;
+
+import java.io.OutputStream;
+
+import org.apache.drill.exec.proto.GeneralRPCProtos.CompleteRpcMessage;
+import org.apache.drill.exec.proto.GeneralRPCProtos.RpcHeader;
+
+import com.google.protobuf.CodedOutputStream;
+import com.google.protobuf.WireFormat;
+
+/**
+ * Converts an RPCMessage into wire format.
+ */
+class RpcEncoder extends ChannelOutboundMessageHandlerAdapter<OutboundRpcMessage>{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RpcEncoder.class);
+  
+  static final int HEADER_TAG = makeTag(CompleteRpcMessage.HEADER_FIELD_NUMBER, WireFormat.WIRETYPE_LENGTH_DELIMITED);
+  static final int PROTOBUF_BODY_TAG = makeTag(CompleteRpcMessage.PROTOBUF_BODY_FIELD_NUMBER, WireFormat.WIRETYPE_LENGTH_DELIMITED);
+  static final int RAW_BODY_TAG = makeTag(CompleteRpcMessage.RAW_BODY_FIELD_NUMBER, WireFormat.WIRETYPE_LENGTH_DELIMITED);
+  static final int HEADER_TAG_LENGTH = getRawVarintSize(HEADER_TAG);
+  static final int PROTOBUF_BODY_TAG_LENGTH = getRawVarintSize(PROTOBUF_BODY_TAG);
+  static final int RAW_BODY_TAG_LENGTH = getRawVarintSize(RAW_BODY_TAG);
+  
+  
+  @Override
+  public void flush(ChannelHandlerContext ctx, OutboundRpcMessage msg) throws Exception {
+    if(!ctx.channel().isOpen()){
+      return;
+    }
+    
+    try{
+      if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Encoding outbound message {}", msg);
+      // first we build the RpcHeader 
+      RpcHeader header = RpcHeader.newBuilder() //
+          .setMode(msg.mode) //
+          .setCoordinationId(msg.coordinationId) //
+          .setRpcType(msg.rpcType).build();
+      
+      // figure out the full length
+      int headerLength = header.getSerializedSize();
+      int protoBodyLength = msg.pBody.getSerializedSize();
+      int rawBodyLength = msg.dBody == null ? 0 : msg.dBody.readableBytes();
+      int fullLength = //
+          HEADER_TAG_LENGTH + getRawVarintSize(headerLength) + headerLength +   //
+          PROTOBUF_BODY_TAG_LENGTH + getRawVarintSize(protoBodyLength) + protoBodyLength; //
+      
+      if(rawBodyLength > 0){
+        fullLength += (RAW_BODY_TAG_LENGTH + getRawVarintSize(rawBodyLength) + rawBodyLength);
+      }
+
+      // set up buffers.
+      ByteBuf buf = ctx.nextOutboundByteBuffer();
+      OutputStream os = new ByteBufOutputStream(buf);
+      CodedOutputStream cos = CodedOutputStream.newInstance(os);
+
+      // write full length first (this is length delimited stream).
+      cos.writeRawVarint32(fullLength);
+      
+      // write header
+      cos.writeRawVarint32(HEADER_TAG);
+      cos.writeRawVarint32(headerLength);
+      header.writeTo(cos);
+
+      // write protobuf body length and body
+      cos.writeRawVarint32(PROTOBUF_BODY_TAG);
+      cos.writeRawVarint32(protoBodyLength);
+      msg.pBody.writeTo(cos);
+
+      // if exists, write data body and tag.
+      if(msg.dBody != null && msg.dBody.readableBytes() > 0){
+        cos.writeRawVarint32(RAW_BODY_TAG);
+        cos.writeRawVarint32(rawBodyLength);
+        cos.flush(); // need to flush so that dbody goes after if cos is caching.
+        buf.writeBytes(msg.dBody);
+      }else{
+        cos.flush();
+      }
+      if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Wrote message with length header of {} bytes and body of {} bytes.", getRawVarintSize(fullLength), fullLength);
+      if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Sent message.  Ending writer index was {}.", buf.writerIndex());
+    
+    }finally{
+      // make sure to release Rpc Messages unerlying byte buffers.
+      msg.release();
+    }
+  }
+  
+  /** Makes a tag value given a field number and wire type, copied from WireFormat since it isn't public.  */
+  static int makeTag(final int fieldNumber, final int wireType) {
+    return (fieldNumber << 3) | wireType;
+  }
+  
+  public static int getRawVarintSize(int value) {
+    int count = 0;
+    while (true) {
+      if ((value & ~0x7F) == 0) {
+        count++;
+        return count;
+      } else {
+        count++;
+        value >>>= 7;
+      }
+    }
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcException.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcException.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcException.java
new file mode 100644
index 0000000..ca66481
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcException.java
@@ -0,0 +1,45 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import org.apache.drill.common.exceptions.DrillIOException;
+
+/**
+ * Parent class for all rpc exceptions.
+ */
+public class RpcException extends DrillIOException{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RpcException.class);
+
+  public RpcException() {
+    super();
+  }
+
+  public RpcException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public RpcException(String message) {
+    super(message);
+  }
+
+  public RpcException(Throwable cause) {
+    super(cause);
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcExceptionHandler.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcExceptionHandler.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcExceptionHandler.java
new file mode 100644
index 0000000..ef1b88f
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcExceptionHandler.java
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import io.netty.channel.ChannelHandler;
+import io.netty.channel.ChannelHandlerContext;
+
+public class RpcExceptionHandler implements ChannelHandler{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RpcExceptionHandler.class);
+  
+  public RpcExceptionHandler(){
+  }
+  
+  @Override
+  public void beforeAdd(ChannelHandlerContext ctx) throws Exception {
+  }
+
+  @Override
+  public void afterAdd(ChannelHandlerContext ctx) throws Exception {
+  }
+
+  @Override
+  public void beforeRemove(ChannelHandlerContext ctx) throws Exception {
+  }
+
+  @Override
+  public void afterRemove(ChannelHandlerContext ctx) throws Exception {
+  }
+
+  @Override
+  public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
+    if(!ctx.channel().isOpen()) return;
+    logger.info("Exception in pipeline.  Closing channel between local " + ctx.channel().localAddress() + " and remote " + ctx.channel().remoteAddress(), cause);
+    ctx.close();
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcMessage.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcMessage.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcMessage.java
new file mode 100644
index 0000000..fd1938d
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcMessage.java
@@ -0,0 +1,45 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import io.netty.buffer.ByteBuf;
+
+import org.apache.drill.exec.proto.GeneralRPCProtos.RpcMode;
+
+public abstract class RpcMessage {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RpcMessage.class);
+  
+  public RpcMode mode;
+  public int rpcType;
+  public int coordinationId;
+  public ByteBuf dBody;
+  
+  public RpcMessage(RpcMode mode, int rpcType, int coordinationId, ByteBuf dBody) {
+    this.mode = mode;
+    this.rpcType = rpcType;
+    this.coordinationId = coordinationId;
+    this.dBody = dBody;
+  }
+  
+  public abstract int getBodySize();
+
+  void release(){
+    if(dBody != null) dBody.release();
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ZeroCopyProtobufLengthDecoder.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ZeroCopyProtobufLengthDecoder.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ZeroCopyProtobufLengthDecoder.java
new file mode 100644
index 0000000..462bc52
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ZeroCopyProtobufLengthDecoder.java
@@ -0,0 +1,80 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.handler.codec.ByteToMessageDecoder;
+import io.netty.handler.codec.CorruptedFrameException;
+
+import com.google.protobuf.CodedInputStream;
+
+/**
+ * Modified version of ProtobufVarint32FrameDecoder that avoids bytebuf copy.
+ */
+public class ZeroCopyProtobufLengthDecoder extends ByteToMessageDecoder {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ZeroCopyProtobufLengthDecoder.class);
+
+  @Override
+  protected ByteBuf decode(ChannelHandlerContext ctx, ByteBuf in) throws Exception {
+    if(!ctx.channel().isOpen()){
+      logger.info("Channel is closed, discarding remaining {} byte(s) in buffer.", in.readableBytes());
+      in.skipBytes(in.readableBytes());
+      return null;
+    }
+    
+    in.markReaderIndex();
+    final byte[] buf = new byte[5];
+    for (int i = 0; i < buf.length; i ++) {
+        if (!in.isReadable()) {
+            in.resetReaderIndex();
+            return null;
+        }
+
+        buf[i] = in.readByte();
+        if (buf[i] >= 0) {
+          
+            int length = CodedInputStream.newInstance(buf, 0, i + 1).readRawVarint32();
+            
+            if (length < 0) {
+                throw new CorruptedFrameException("negative length: " + length);
+            }
+            if (length == 0){
+                throw new CorruptedFrameException("Received a message of length 0.");
+            }
+
+            if (in.readableBytes() < length) {
+                in.resetReaderIndex();
+                return null;
+            } else {
+                ByteBuf out = in.slice(in.readerIndex(), length);
+                in.retain();
+                in.skipBytes(length);
+                if(RpcConstants.EXTRA_DEBUGGING) logger.debug(String.format("ReaderIndex is %d after length header of %d bytes and frame body of length %d bytes.", in.readerIndex(), i+1, length));
+                return out;
+            }
+        }
+    }
+
+    // Couldn't find the byte whose MSB is off.
+    throw new CorruptedFrameException("length wider than 32-bit");
+    
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitClient.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitClient.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitClient.java
new file mode 100644
index 0000000..b16c6cb
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitClient.java
@@ -0,0 +1,62 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc.bit;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.socket.SocketChannel;
+import io.netty.util.concurrent.GenericFutureListener;
+
+import org.apache.drill.exec.proto.ExecProtos.RpcType;
+import org.apache.drill.exec.rpc.BasicClient;
+import org.apache.drill.exec.rpc.Response;
+import org.apache.drill.exec.rpc.RpcException;
+import org.apache.drill.exec.server.DrillbitContext;
+
+import com.google.protobuf.MessageLite;
+
+public class BitClient  extends BasicClient<RpcType>{
+  
+  private final DrillbitContext context;
+  private final BitComHandler handler;
+  
+  public BitClient(BitComHandler handler, ByteBufAllocator alloc, EventLoopGroup eventLoopGroup, DrillbitContext context) {
+    super(alloc, eventLoopGroup);
+    this.context = context;
+    this.handler = handler;
+  }
+  
+  @Override
+  protected MessageLite getResponseDefaultInstance(int rpcType) throws RpcException {
+    return handler.getResponseDefaultInstance(rpcType);
+  }
+
+  @Override
+  protected Response handle(SocketChannel ch, int rpcType, ByteBuf pBody, ByteBuf dBody) throws RpcException {
+    return handler.handle(context, rpcType, pBody, dBody);
+  }
+
+  @Override
+  protected GenericFutureListener<ChannelFuture> getCloseHandler(SocketChannel ch) {
+    return super.getCloseHandler(ch);
+  }
+
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BitClient.class);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitCom.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitCom.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitCom.java
new file mode 100644
index 0000000..5c1bf21
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitCom.java
@@ -0,0 +1,69 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc.bit;
+
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.socket.SocketChannel;
+import io.netty.util.concurrent.GenericFutureListener;
+
+import java.io.Closeable;
+
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
+import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;
+import org.apache.drill.exec.proto.ExecProtos.FragmentStatus;
+import org.apache.drill.exec.proto.ExecProtos.PlanFragment;
+import org.apache.drill.exec.proto.GeneralRPCProtos.Ack;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.rpc.DrillRpcFuture;
+import org.apache.drill.exec.rpc.RpcBus;
+
+/**
+ * Service that allows one Drillbit to communicate with another. Internally manages whether each particular bit is a server
+ * or a client depending on who initially made the connection. If no connection exists, BitCom is
+ * responsible for making a connection.  BitCom should automatically straight route local BitCommunication rather than connecting to its self.
+ */
+public interface BitCom extends Closeable{
+
+  /**
+   * Send a record batch to another node.  
+   * @param node The node id to send the record batch to.
+   * @param batch The record batch to send.
+   * @return A Future<Ack> object that can be used to determine the outcome of sending.
+   */
+  public abstract DrillRpcFuture<Ack> sendRecordBatch(FragmentContext context, DrillbitEndpoint node, RecordBatch batch);
+
+  /**
+   * Send a query PlanFragment to another bit.   
+   * @param context
+   * @param node
+   * @param fragment
+   * @return
+   */
+  public abstract DrillRpcFuture<FragmentHandle> sendFragment(FragmentContext context, DrillbitEndpoint node, PlanFragment fragment);
+  
+  public abstract DrillRpcFuture<Ack> cancelFragment(FragmentContext context, DrillbitEndpoint node, FragmentHandle handle);
+  
+  public abstract DrillRpcFuture<FragmentStatus> getFragmentStatus(FragmentContext context, DrillbitEndpoint node, FragmentHandle handle);
+  
+  
+  public interface TunnelListener extends GenericFutureListener<ChannelFuture> {
+    public void connectionEstablished(SocketChannel channel, DrillbitEndpoint endpoint, RpcBus<?> bus);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitComHandler.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitComHandler.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitComHandler.java
new file mode 100644
index 0000000..94e3eff
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitComHandler.java
@@ -0,0 +1,136 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc.bit;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.socket.SocketChannel;
+
+import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
+import org.apache.drill.exec.proto.ExecProtos.BitBatchChunk;
+import org.apache.drill.exec.proto.ExecProtos.BitHandshake;
+import org.apache.drill.exec.proto.ExecProtos.BitStatus;
+import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;
+import org.apache.drill.exec.proto.ExecProtos.FragmentStatus;
+import org.apache.drill.exec.proto.ExecProtos.RpcType;
+import org.apache.drill.exec.proto.GeneralRPCProtos.Ack;
+import org.apache.drill.exec.rpc.Response;
+import org.apache.drill.exec.rpc.RpcBus;
+import org.apache.drill.exec.rpc.RpcException;
+import org.apache.drill.exec.rpc.bit.BitCom.TunnelListener;
+import org.apache.drill.exec.rpc.bit.BitComImpl.TunnelModifier;
+import org.apache.drill.exec.server.DrillbitContext;
+
+import com.google.protobuf.MessageLite;
+
+public class BitComHandler {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BitComHandler.class);
+  
+  private final TunnelModifier modifier;
+  
+  public BitComHandler(TunnelModifier modifier){
+    this.modifier = modifier;
+  }
+  
+  public TunnelListener getTunnelListener(RpcBus<?>.ChannelClosedHandler internalHandler){
+    return new Listener(internalHandler);
+  }
+  
+  public class Listener implements TunnelListener {
+    final RpcBus<?>.ChannelClosedHandler internalHandler;
+
+    public Listener(RpcBus<?>.ChannelClosedHandler internalHandler) {
+      this.internalHandler = internalHandler;
+    }
+
+    @Override
+    public void operationComplete(ChannelFuture future) throws Exception {
+      logger.debug("BitTunnel closed, removing from BitCom.");
+      internalHandler.operationComplete(future);
+      BitTunnel t = modifier.remove(future.channel());
+      if(t != null) t.shutdownIfClient();
+    }
+
+    @Override
+    public void connectionEstablished(SocketChannel channel, DrillbitEndpoint endpoint, RpcBus<?> bus) {
+      modifier.create(channel, endpoint, bus);
+    }
+
+  }
+
+  
+
+
+  public MessageLite getResponseDefaultInstance(int rpcType) throws RpcException {
+    switch (rpcType) {
+    case RpcType.ACK_VALUE:
+      return Ack.getDefaultInstance();
+    case RpcType.HANDSHAKE_VALUE:
+      return BitHandshake.getDefaultInstance();
+    case RpcType.RESP_FRAGMENT_HANDLE_VALUE:
+      return FragmentHandle.getDefaultInstance();
+    case RpcType.RESP_FRAGMENT_STATUS_VALUE:
+      return FragmentStatus.getDefaultInstance();
+    case RpcType.RESP_BIT_STATUS_VALUE:
+      return BitStatus.getDefaultInstance();
+    case RpcType.RESP_BATCH_CHUNK_VALUE:
+      return BitBatchChunk.getDefaultInstance();
+      
+    default:
+      throw new UnsupportedOperationException();
+    }
+  }
+
+  protected Response handle(DrillbitContext context, int rpcType, ByteBuf pBody, ByteBuf dBody) throws RpcException {
+    switch (rpcType) {
+    
+    case RpcType.HANDSHAKE_VALUE:
+      // parse incoming handshake.
+      // get endpoint information.
+      // record endpoint information in registry.
+      // respond with our handshake info.
+      return new Response(RpcType.HANDSHAKE, BitHandshake.getDefaultInstance(), null);
+      
+    case RpcType.REQ_BATCH_CHUNK_VALUE:
+      return new Response(RpcType.RESP_BATCH_CHUNK, BitBatchChunk.getDefaultInstance(), null);
+      
+    case RpcType.REQ_BIT_STATUS_VALUE:
+      return new Response(RpcType.RESP_BIT_STATUS, BitStatus.getDefaultInstance(), null);
+      
+    case RpcType.REQ_CANCEL_FRAGMENT_VALUE:
+      return new Response(RpcType.ACK, Ack.getDefaultInstance(), null);
+
+    case RpcType.REQ_FRAGMENT_STATUS_VALUE:
+      return new Response(RpcType.RESP_FRAGMENT_STATUS, FragmentStatus.getDefaultInstance(), null);
+      
+    case RpcType.REQ_INIATILIZE_FRAGMENT_VALUE:
+      return new Response(RpcType.ACK, Ack.getDefaultInstance(), null);
+      
+    case RpcType.REQ_RECORD_BATCH_VALUE:
+      return new Response(RpcType.RESP_BATCH_CHUNK, BitBatchChunk.getDefaultInstance(), null);
+      
+    default:
+      throw new UnsupportedOperationException();
+    }
+
+  }
+  
+
+  
+  
+}


[2/9] Add flatten and join test executions. Abstract graph classes. Update storage engine definition to be a map. Move plan properties to use enum for plan type. Remove unused tests/resources. Update sql parser for change in storage engine definitio

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/defs/OrderDef.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/defs/OrderDef.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/defs/OrderDef.java
new file mode 100644
index 0000000..b0aa61d
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/defs/OrderDef.java
@@ -0,0 +1,60 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.logical.defs;
+
+import org.apache.drill.common.expression.LogicalExpression;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class OrderDef {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(OrderDef.class);
+
+  private final Direction direction;
+  private final LogicalExpression expr;
+
+  @JsonCreator
+  public OrderDef(@JsonProperty("order") Direction direction, @JsonProperty("expr") LogicalExpression expr) {
+    this.expr = expr;
+    // default to ascending unless desc is provided.
+    this.direction = direction == null ? Direction.ASC : direction;
+  }
+  
+  @JsonIgnore
+  public Direction getDirection() {
+    return direction;
+  }
+
+  public LogicalExpression getExpr() {
+    return expr;
+  }
+
+  public String getOrder() {
+    return direction.description;
+  }
+
+  public static enum Direction {
+    ASC("asc"), DESC("desc");
+    public final String description;
+
+    Direction(String d) {
+      description = d;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/defs/PartitionDef.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/defs/PartitionDef.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/defs/PartitionDef.java
new file mode 100644
index 0000000..12047d5
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/defs/PartitionDef.java
@@ -0,0 +1,55 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.logical.defs;
+
+import org.apache.drill.common.expression.LogicalExpression;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class PartitionDef {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PartitionDef.class);
+
+  private final PartitionType partitionType;
+  private final LogicalExpression[] expressions;
+  private final LogicalExpression[] starts;
+  
+  @JsonCreator
+  public PartitionDef(@JsonProperty("partitionType") PartitionType partitionType, @JsonProperty("exprs") LogicalExpression[] expressions, @JsonProperty("starts") LogicalExpression[] starts) {
+    this.partitionType = partitionType;
+    this.expressions = expressions;
+    this.starts = starts;
+  }
+
+  public PartitionType getPartitionType() {
+    return partitionType;
+  }
+
+  public LogicalExpression[] getExpressions() {
+    return expressions;
+  }
+
+  public LogicalExpression[] getStarts() {
+    return starts;
+  }
+  
+
+  public static enum PartitionType{ 
+    RANDOM, HASH, ORDERED;
+  };
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/AdjacencyList.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/AdjacencyList.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/AdjacencyList.java
deleted file mode 100644
index e896b3d..0000000
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/AdjacencyList.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*******************************************************************************
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- ******************************************************************************/
-package org.apache.drill.common.logical.graph;
-
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Set;
-
-import com.google.common.collect.ArrayListMultimap;
-import com.google.common.collect.ListMultimap;
-import com.google.common.collect.Multimaps;
-
-
-public class AdjacencyList<N extends Node<?>> {
-  private Set<N> allNodes = new HashSet<N>();
-  private ListMultimap<N, Edge<N>> adjacencies = ArrayListMultimap.create();
-
-  public void addEdge(N source, N target, int weight) {
-    adjacencies.put(source, new Edge<N>(source, target, weight));
-    allNodes.add(source);
-    allNodes.add(target);
-  }
-
-  public void clearVisited(){
-    for (Edge<N> e : adjacencies.values()) {
-      e.from.visited = false;
-      e.to.visited = false;
-    }
-  }
-  
-  public List<Edge<N>> getAdjacent(N source) {
-    return adjacencies.get(source);
-  }
-
-  
-  public void printEdges(){
-    for (Edge<N> e : adjacencies.values()) {
-      System.out.println(e.from.index + " -> " + e.to.index);
-    }
-  }
-  
-  
-//  public void reverseEdge(Edge<N> e) {
-//    adjacencies.get(e.from).remove(e);
-//    addEdge(e.to, e.from, e.weight);
-//  }
-
-//  public void reverseGraph() {
-//    adjacencies = getReversedList().adjacencies;
-//  }
-
-  public AdjacencyList<N> getReversedList() {
-    AdjacencyList<N> newlist = new AdjacencyList<N>();
-    for (Edge<N> e : adjacencies.values()) {
-      newlist.addEdge(e.to, e.from, e.weight);
-    }
-    return newlist;
-  }
-
-  public Set<N> getNodeSet() {
-    return adjacencies.keySet();
-  }
-
-  /**
-   * Get a list of nodes that have no outbound edges.
-   * @return
-   */
-  public Collection<N> getTerminalNodes(){
-    // we have to use the allNodes list as otherwise destination only nodes won't be found.
-    List<N> nodes = new LinkedList<N>(allNodes);
-    
-    for(Iterator<N> i = nodes.iterator(); i.hasNext(); ){
-      final N n = i.next();
-      
-      // remove any nodes that have one or more outbound edges.
-      List<Edge<N>> adjList = this.getAdjacent(n);
-      if(adjList != null && !adjList.isEmpty()) i.remove();
-     
-    }
-    return nodes;
-  }
-  
-  /**
-   * Get a list of all nodes that have no incoming edges.
-   * @return
-   */
-  public Collection<N> getStartNodes(){
-    Set<N> nodes = new HashSet<N>(getNodeSet());
-    for(Edge<N> e : adjacencies.values()){
-      nodes.remove(e.to);
-    }
-    return nodes;
-  }
-  
-  public Collection<Edge<N>> getAllEdges() {
-    return adjacencies.values();
-  }
-  
-  
-  public void fix(){
-    adjacencies = Multimaps.unmodifiableListMultimap(adjacencies);
-    allNodes =  Collections.unmodifiableSet(allNodes);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/Edge.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/Edge.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/Edge.java
deleted file mode 100644
index 599c9eb..0000000
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/Edge.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*******************************************************************************
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- ******************************************************************************/
-package org.apache.drill.common.logical.graph;
-
-
-public class Edge<N> implements Comparable<Edge<N>> {
-
-  final N from, to;
-  final int weight;
-
-  public Edge(final N argFrom, final N argTo, final int argWeight) {
-    from = argFrom;
-    to = argTo;
-    weight = argWeight;
-  }
-
-  public int compareTo(final Edge<N> argEdge) {
-    return weight - argEdge.weight;
-  }
-
-  @Override
-  public String toString() {
-    return "Edge [from=" + from + ", to=" + to + "]";
-  }
-  
-  
-}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/GraphAlgos.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/GraphAlgos.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/GraphAlgos.java
deleted file mode 100644
index ed3c00b..0000000
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/GraphAlgos.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*******************************************************************************
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- ******************************************************************************/
-package org.apache.drill.common.logical.graph;
-
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GraphAlgos {
-  static final Logger logger = LoggerFactory.getLogger(GraphAlgos.class);
-
-  public static class TopoSorter<N extends Node<?>> {
-    final List<N> sorted = new LinkedList<N>();
-    final AdjacencyList<N> rGraph;
-
-    private TopoSorter(AdjacencyList<N> graph) {
-      graph.clearVisited();
-      
-      this.rGraph = graph.getReversedList();
-      Collection<N> sourceNodes = rGraph.getStartNodes();
-
-      for (N n : sourceNodes) {
-        visit(n);
-      }
-    }
-
-    private void visit(N n) {
-      if (n.visited)
-        return;
-
-      n.visited = true;
-      List<Edge<N>> edges = rGraph.getAdjacent(n);
-      if (edges != null) {
-        for (Edge<N> e : edges) {
-          visit(e.to);
-        }
-      }
-
-      sorted.add(n);
-
-    }
-
-    /**
-     * Execute a depth-first sort on the reversed DAG.
-     * 
-     * @param graph
-     *          The adjacency list for the DAG.
-     * @param sourceNodes
-     *          List of nodes that
-     * @return
-     */
-    public static <N extends Node<?>> List<N> sort(AdjacencyList<N> graph) {
-      TopoSorter<N> ts = new TopoSorter<N>(graph);
-      return ts.sorted;
-    }
-  }
-
-  public static <N extends Node<?>> List<List<N>> checkDirected(AdjacencyList<N> graph) {
-    Tarjan<N> t = new Tarjan<N>();
-    List<List<N>> subgraphs = t.executeTarjan(graph);
-    for (Iterator<List<N>> i = subgraphs.iterator(); i.hasNext();) {
-      List<N> l = i.next();
-      if (l.size() == 1)  i.remove();
-    }
-    return subgraphs;
-  }
-
-  public static class Tarjan<N extends Node<?>> {
-
-    private int index = 0;
-    private List<N> stack = new LinkedList<N>();
-    private List<List<N>> SCC = new LinkedList<List<N>>();
-
-    public List<List<N>> executeTarjan(AdjacencyList<N> graph) {
-      SCC.clear();
-      index = 0;
-      stack.clear();
-      if (graph != null) {
-        List<N> nodeList = new LinkedList<N>(graph.getNodeSet());
-        for (N node : nodeList) {
-          if (node.index == -1) {
-            tarjan(node, graph);
-          }
-        }
-      }
-      return SCC;
-    }
-
-    private List<List<N>> tarjan(N v, AdjacencyList<N> list) {
-      v.index = index;
-      v.lowlink = index;
-      index++;
-      stack.add(0, v);
-      List<Edge<N>> l = list.getAdjacent(v);
-      if (l != null) {
-        for (Edge<N> e : l) {
-          N n = e.to;
-          if (n.index == -1) {
-            tarjan(n, list);
-            v.lowlink = Math.min(v.lowlink, n.lowlink);
-          } else if (stack.contains(n)) {
-            v.lowlink = Math.min(v.lowlink, n.index);
-          }
-        }
-      }
-      if (v.lowlink == v.index) {
-        N n;
-        List<N> component = new LinkedList<N>();
-        do {
-          n = stack.remove(0);
-          component.add(n);
-        } while (n != v);
-        SCC.add(component);
-      }
-      return SCC;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/Node.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/Node.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/Node.java
deleted file mode 100644
index d446b3f..0000000
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/graph/Node.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*******************************************************************************
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- ******************************************************************************/
-package org.apache.drill.common.logical.graph;
-
-
-public class Node<T> implements Comparable<Node<T>> {
-  final T nodeValue;
-  boolean visited = false; // used for Kosaraju's algorithm and Edmonds's
-                           // algorithm
-  int lowlink = -1; // used for Tarjan's algorithm
-  int index = -1; // used for Tarjan's algorithm
-
-  public Node(final T operator) {
-    if(operator == null) throw new IllegalArgumentException("Operator node was null.");
-    this.nodeValue = operator;
-  }
-
-  public int compareTo(final Node<T> argNode) {
-    // just do an identity compare since elsewhere you should ensure that only one node exists for each nodeValue.
-    return argNode == this ? 0 : -1;
-  }
-  
-  @Override
-  public int hashCode() {
-    return nodeValue.hashCode(); 
-  }
-
-  public T getNodeValue(){
-    return nodeValue;
-  }
-
-  @Override
-  public String toString() {
-    return "Node [val=" + nodeValue + "]";
-  }
-
-  
-}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/FieldSet.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/FieldSet.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/FieldSet.java
new file mode 100644
index 0000000..05fc49d
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/FieldSet.java
@@ -0,0 +1,97 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical;
+
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.List;
+
+import org.apache.drill.common.physical.FieldSet.De;
+import org.apache.drill.common.physical.FieldSet.Se;
+
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import com.google.common.collect.Lists;
+
+@JsonSerialize(using = Se.class)
+@JsonDeserialize(using = De.class)
+public class FieldSet {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FieldSet.class);
+  
+  private List<RecordField> incoming = Lists.newArrayList();
+  private List<RecordField> outgoing = Lists.newArrayList();
+  
+  public FieldSet(Iterable<RecordField> fields){
+    for(RecordField f : fields){
+      if(f.getRoute().isIn()){
+        incoming.add(f);
+      }
+      
+      if(f.getRoute().isOut()){
+        outgoing.add(f);
+      }
+    }
+  }
+  
+
+  public static class De extends StdDeserializer<FieldSet> {
+    
+    public De() {
+      super(FieldSet.class);
+    }
+
+    @Override
+    public FieldSet deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException,
+        JsonProcessingException {
+      Iterable<RecordField> fields = jp.readValueAs(new TypeReference<List<RecordField>>(){});
+      logger.debug("Fields {}", fields);
+      return new FieldSet(fields);
+    }
+
+  }
+
+  public static class Se extends StdSerializer<FieldSet> {
+
+    public Se() {
+      super(FieldSet.class);
+    }
+
+    @Override
+    public void serialize(FieldSet value, JsonGenerator jgen, SerializerProvider provider) throws IOException,
+        JsonGenerationException {
+      HashSet<RecordField> fields = new HashSet<RecordField>();
+      for(RecordField f: value.incoming){
+        fields.add(f);
+      }
+      for(RecordField f: value.outgoing){
+        fields.add(f);
+      }
+      jgen.writeObject(Lists.newArrayList(fields));
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/POPConfig.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/POPConfig.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/POPConfig.java
new file mode 100644
index 0000000..39a91f2
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/POPConfig.java
@@ -0,0 +1,24 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical;
+
+public class POPConfig {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(POPConfig.class);
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/POPCost.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/POPCost.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/POPCost.java
new file mode 100644
index 0000000..b2ee440
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/POPCost.java
@@ -0,0 +1,34 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical;
+
+public class POPCost {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(POPCost.class);
+  
+  long outputRecordCount;
+  long outputRecordSize;
+  
+  
+  public POPCost(long outputRecordCount, long outputRecordSize) {
+    super();
+    this.outputRecordCount = outputRecordCount;
+    this.outputRecordSize = outputRecordSize;
+  }
+
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/PhysicalPlan.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/PhysicalPlan.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/PhysicalPlan.java
new file mode 100644
index 0000000..0ef5164
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/PhysicalPlan.java
@@ -0,0 +1,93 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.drill.common.PlanProperties;
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.graph.Graph;
+import org.apache.drill.common.graph.GraphAlgos;
+import org.apache.drill.common.logical.StorageEngineConfig;
+import org.apache.drill.common.physical.pop.PhysicalOperator;
+import org.apache.drill.common.physical.pop.SinkPOP;
+import org.apache.drill.common.physical.pop.SourcePOP;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Lists;
+
+@JsonPropertyOrder({ "head", "storage", "graph" })
+public class PhysicalPlan {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PhysicalPlan.class);
+  
+  Map<String, StorageEngineConfig> storageEngines;
+  PlanProperties properties;
+  Graph<PhysicalOperator, SinkPOP, SourcePOP> graph;
+  
+  @JsonCreator
+  public PhysicalPlan(@JsonProperty("head") PlanProperties properties, @JsonProperty("storage") Map<String, StorageEngineConfig> storageEngines, @JsonProperty("graph") List<PhysicalOperator> operators){
+    this.storageEngines = storageEngines;
+    this.properties = properties;
+    this.graph = Graph.newGraph(operators, SinkPOP.class, SourcePOP.class);
+  }
+  
+  @JsonProperty("graph")
+  public List<PhysicalOperator> getSortedOperators(){
+    List<PhysicalOperator> list = GraphAlgos.TopoSorter.sort(graph);
+    // reverse the list so that nested references are flattened rather than nested.
+    return Lists.reverse(list);
+  }
+  
+  
+  @JsonProperty("storage")
+  public Map<String, StorageEngineConfig> getStorageEngines() {
+    return storageEngines;
+  }
+
+  @JsonProperty("head")
+  public PlanProperties getProperties() {
+    return properties;
+  }
+
+  /** Parses a physical plan. */
+  public static PhysicalPlan parse(DrillConfig config, String planString) {
+    ObjectMapper mapper = config.getMapper();
+    try {
+      PhysicalPlan plan = mapper.readValue(planString, PhysicalPlan.class);
+      return plan;
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  /** Converts a physical plan to a string. (Opposite of {@link #parse}.) */
+  public String unparse(DrillConfig config) {
+    try {
+      return config.getMapper().writeValueAsString(this);
+    } catch (JsonProcessingException e) {
+      throw new RuntimeException(e);
+    }
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/ReadEntry.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/ReadEntry.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/ReadEntry.java
new file mode 100644
index 0000000..47cfb5c
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/ReadEntry.java
@@ -0,0 +1,25 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical;
+
+/** 
+ * Describes a chunk of read work that will be done.
+ */
+public interface ReadEntry {
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/RecordField.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/RecordField.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/RecordField.java
new file mode 100644
index 0000000..821f286
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/RecordField.java
@@ -0,0 +1,79 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical;
+
+import org.apache.drill.common.expression.types.DataType;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class RecordField {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RecordField.class);
+
+  
+  private String name;
+  private DataType type;
+  private Route route;
+  
+  @JsonCreator
+  public RecordField(@JsonProperty("name") String name, @JsonProperty("type") DataType type, @JsonProperty("route") Route route) {
+    super();
+    this.name = name;
+    this.type = type;
+    this.route = route;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public DataType getType() {
+    return type;
+  }
+
+  public Route getRoute() {
+    return route;
+  }
+  
+  
+  
+  public static enum Route {
+    IN(true, false), 
+    OUT(false, true), 
+    THROUGH(true, true), 
+    OPAQUE(true, true);
+    
+    final boolean in;
+    final boolean out;
+    
+    Route(boolean in, boolean out){
+      this.in = in;
+      this.out = out;
+    }
+
+    public boolean isIn() {
+      return in;
+    }
+
+    public boolean isOut() {
+      return out;
+    }
+    
+  }  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/SetSpec.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/SetSpec.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/SetSpec.java
new file mode 100644
index 0000000..5250dbb
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/SetSpec.java
@@ -0,0 +1,36 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical;
+
+import java.util.List;
+
+import org.apache.drill.common.expression.types.DataType;
+import org.apache.drill.common.physical.props.PhysicalProp;
+
+public class SetSpec {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SetSpec.class);
+
+  private List<Field> fields;
+  private List<PhysicalProp> traits;
+
+  public class Field {
+    public String name;
+    public DataType type;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/StitchDef.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/StitchDef.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/StitchDef.java
new file mode 100644
index 0000000..d9a7d33
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/StitchDef.java
@@ -0,0 +1,48 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical;
+
+import org.apache.drill.common.expression.LogicalExpression;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class StitchDef {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StitchDef.class);
+  
+  public static enum StitchMode {RANDOM, NWAY, BLOCK} 
+  
+  private StitchMode mode;
+  private LogicalExpression[] exprs;
+  
+  @JsonCreator 
+  public StitchDef(@JsonProperty("pattern") StitchMode mode, @JsonProperty("exprs") LogicalExpression[] exprs) {
+    super();
+    this.mode = mode;
+    this.exprs = exprs;
+  }
+
+  public StitchMode getMode() {
+    return mode;
+  }
+
+  public LogicalExpression[] getExprs() {
+    return exprs;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/ExchangePOP.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/ExchangePOP.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/ExchangePOP.java
new file mode 100644
index 0000000..4c1f08a
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/ExchangePOP.java
@@ -0,0 +1,56 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical.pop;
+
+import java.util.Iterator;
+
+import org.apache.drill.common.defs.PartitionDef;
+import org.apache.drill.common.physical.FieldSet;
+import org.apache.drill.common.physical.StitchDef;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+
+@JsonTypeName("exchange")
+public class ExchangePOP extends SingleChildPOP{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExchangePOP.class);
+  
+  private PartitionDef partition;
+  private StitchDef stitch;
+  
+  @JsonCreator
+  public ExchangePOP(@JsonProperty("fields") FieldSet fields, @JsonProperty("partition") PartitionDef partition, @JsonProperty("stitch") StitchDef stitch) {
+    super(fields);
+    this.partition = partition;
+    this.stitch = stitch;
+  }
+
+  
+  
+  public PartitionDef getPartition() {
+    return partition;
+  }
+
+  public StitchDef getStitch() {
+    return stitch;
+  }
+
+
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/POPBase.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/POPBase.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/POPBase.java
new file mode 100644
index 0000000..5d44e2a
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/POPBase.java
@@ -0,0 +1,65 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical.pop;
+
+import org.apache.drill.common.config.CommonConstants;
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.graph.GraphVisitor;
+import org.apache.drill.common.physical.FieldSet;
+import org.apache.drill.common.physical.POPCost;
+import org.apache.drill.common.util.PathScanner;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public abstract class POPBase implements PhysicalOperator{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(POPBase.class);
+  
+  private FieldSet fieldSet;
+  
+  
+  public POPBase(FieldSet fieldSet){
+    this.fieldSet = fieldSet;
+  }
+  
+  public synchronized static Class<?>[] getSubTypes(DrillConfig config){
+    Class<?>[] ops = PathScanner.scanForImplementationsArr(PhysicalOperator.class, config.getStringList(CommonConstants.PHYSICAL_OPERATOR_SCAN_PACKAGES));
+    logger.debug("Adding Physical Operator sub types: {}", ((Object) ops) );
+    return ops;
+  }
+  
+  @JsonProperty("fields")
+  public FieldSet getFieldSet(){
+    return fieldSet;
+  }
+
+  @Override
+  public void accept(GraphVisitor<PhysicalOperator> visitor) {
+    visitor.enter(this);
+    if(this.iterator() == null) throw new IllegalArgumentException("Null iterator for pop." + this);
+    for(PhysicalOperator o : this){
+      o.accept(visitor);  
+    }
+    visitor.leave(this);
+  }
+
+  @Override
+  public POPCost getCost() {
+    return null;
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/PhysicalOperator.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/PhysicalOperator.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/PhysicalOperator.java
new file mode 100644
index 0000000..0a8927a
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/PhysicalOperator.java
@@ -0,0 +1,35 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical.pop;
+
+import org.apache.drill.common.graph.GraphValue;
+import org.apache.drill.common.physical.FieldSet;
+import org.apache.drill.common.physical.POPCost;
+
+import com.fasterxml.jackson.annotation.JsonIdentityInfo;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import com.fasterxml.jackson.annotation.ObjectIdGenerators;
+
+@JsonPropertyOrder({"@id"})
+@JsonIdentityInfo(generator=ObjectIdGenerators.IntSequenceGenerator.class, property="@id")
+@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property="pop")
+public interface PhysicalOperator extends GraphValue<PhysicalOperator>{
+  public FieldSet getFieldSet();
+  public POPCost getCost();
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/QuickNWaySortPOP.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/QuickNWaySortPOP.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/QuickNWaySortPOP.java
new file mode 100644
index 0000000..f7fcdb0
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/QuickNWaySortPOP.java
@@ -0,0 +1,50 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical.pop;
+
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.drill.common.defs.OrderDef;
+import org.apache.drill.common.physical.FieldSet;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+
+@JsonTypeName("quicknwaysort")
+public class QuickNWaySortPOP extends SingleChildPOP{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(QuickNWaySortPOP.class);
+  
+  private List<OrderDef> orderings;
+
+  @JsonCreator
+  public QuickNWaySortPOP(@JsonProperty("fields") FieldSet fieldSet, @JsonProperty("orderings") List<OrderDef> orderings) {
+    super(fieldSet);
+    this.orderings = orderings;
+  }
+
+  @JsonProperty("orderings")
+  public List<OrderDef> getOrderings() {
+    return orderings;
+  }
+
+
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/ScanPOP.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/ScanPOP.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/ScanPOP.java
new file mode 100644
index 0000000..30cb2b0
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/ScanPOP.java
@@ -0,0 +1,75 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical.pop;
+
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.drill.common.JSONOptions;
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.physical.FieldSet;
+import org.apache.drill.common.physical.ReadEntry;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+import com.google.common.collect.Iterators;
+import com.google.common.collect.Lists;
+
+@JsonTypeName("scan")
+public class ScanPOP extends POPBase implements SourcePOP{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ScanPOP.class);
+  
+  private List<JSONOptions> readEntries;
+  private String storageEngine;
+  
+  @JsonCreator
+  public ScanPOP(@JsonProperty("storageengine") String storageEngine, @JsonProperty("entries") List<JSONOptions> readEntries, @JsonProperty("fields") FieldSet fieldSet) {
+    super(fieldSet);
+    this.storageEngine = storageEngine;
+    this.readEntries = readEntries;
+  }
+
+  @JsonProperty("entries")
+  public List<JSONOptions> getReadEntries() {
+    return readEntries;
+  }
+  
+  public <T extends ReadEntry> List<T> getReadEntries(DrillConfig config, Class<T> clazz){
+    List<T> e = Lists.newArrayList();
+    for(JSONOptions o : readEntries){
+      e.add(o.getWith(config,  clazz));
+    }
+    return e;
+  }
+
+  @Override
+  public Iterator<PhysicalOperator> iterator() {
+    return Iterators.emptyIterator();
+  }
+
+  public static org.slf4j.Logger getLogger() {
+    return logger;
+  }
+
+  @JsonProperty("storageengine")
+  public String getStorageEngine() {
+    return storageEngine;
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/SingleChildPOP.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/SingleChildPOP.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/SingleChildPOP.java
new file mode 100644
index 0000000..cf0c08b
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/SingleChildPOP.java
@@ -0,0 +1,41 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical.pop;
+
+import java.util.Iterator;
+
+import org.apache.drill.common.physical.FieldSet;
+
+import com.google.common.collect.Iterators;
+
+public abstract class SingleChildPOP extends POPBase{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SingleChildPOP.class);
+  
+  public PhysicalOperator child;
+
+  public SingleChildPOP(FieldSet fieldSet) {
+    super(fieldSet);
+  }
+
+  @Override
+  public Iterator<PhysicalOperator> iterator() {
+    return Iterators.singletonIterator(child);
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/SinkPOP.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/SinkPOP.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/SinkPOP.java
new file mode 100644
index 0000000..da0dcd6
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/SinkPOP.java
@@ -0,0 +1,22 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical.pop;
+
+public interface SinkPOP extends PhysicalOperator{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SinkPOP.class);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/SourcePOP.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/SourcePOP.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/SourcePOP.java
new file mode 100644
index 0000000..1b7c8e9
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/SourcePOP.java
@@ -0,0 +1,22 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical.pop;
+
+public interface SourcePOP extends PhysicalOperator{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SourcePOP.class);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/StorePOP.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/StorePOP.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/StorePOP.java
new file mode 100644
index 0000000..2fbaa99
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/pop/StorePOP.java
@@ -0,0 +1,54 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical.pop;
+
+import java.util.List;
+
+import org.apache.drill.common.JSONOptions;
+import org.apache.drill.common.defs.PartitionDef;
+import org.apache.drill.common.physical.FieldSet;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+
+@JsonTypeName("store")
+public class StorePOP extends SingleChildPOP implements SinkPOP{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StorePOP.class);
+
+  public static enum StoreMode {SYSTEM_CHOICE, PREDEFINED_PARTITIONS};
+  
+  private StoreMode mode;
+  private PartitionDef partitioning;
+  
+  @JsonCreator
+  public StorePOP(@JsonProperty("storageengine") String storageEngineName, @JsonProperty("fields") FieldSet fieldSet, @JsonProperty("mode") StoreMode mode, @JsonProperty("entries") List<JSONOptions> entries) {
+    super(fieldSet);
+  }
+
+  public StoreMode getMode() {
+    return mode;
+  }
+
+  public PartitionDef getPartitioning() {
+    return partitioning;
+  }
+
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/OrderProp.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/OrderProp.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/OrderProp.java
new file mode 100644
index 0000000..5e618e7
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/OrderProp.java
@@ -0,0 +1,45 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical.props;
+
+import org.apache.drill.common.logical.defs.OrderDef;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+
+@JsonTypeName("ordered")
+public class OrderProp implements PhysicalProp{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(OrderProp.class);
+  
+  private final OrderDef[] orderings;
+
+  @JsonCreator
+  public OrderProp(@JsonProperty("fields") OrderDef[] orderings) {
+    super();
+    this.orderings = orderings;
+  }
+  
+  @JsonProperty("fields")
+  public OrderDef[] getOrderings(){
+    return orderings;
+  }
+  
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/PartitionProp.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/PartitionProp.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/PartitionProp.java
new file mode 100644
index 0000000..d855b73
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/PartitionProp.java
@@ -0,0 +1,36 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical.props;
+
+import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.logical.defs.PartitionDef;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+
+@JsonTypeName("partitioned")
+public class PartitionProp extends PartitionDef implements PhysicalProp{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PartitionProp.class);
+
+  @JsonCreator
+  public PartitionProp(@JsonProperty("partitionType") PartitionType partitionType, @JsonProperty("exprs") LogicalExpression[] expressions, @JsonProperty("starts") LogicalExpression[] starts) {
+    super(partitionType, expressions, starts);
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/PhysicalProp.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/PhysicalProp.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/PhysicalProp.java
new file mode 100644
index 0000000..0776d66
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/PhysicalProp.java
@@ -0,0 +1,24 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical.props;
+
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+
+@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property="trait")
+public interface PhysicalProp {
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/SegmentProp.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/SegmentProp.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/SegmentProp.java
new file mode 100644
index 0000000..d76fe48
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/props/SegmentProp.java
@@ -0,0 +1,42 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical.props;
+
+import org.apache.drill.common.expression.FieldReference;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+
+@JsonTypeName("segmented")
+public class SegmentProp implements PhysicalProp{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SegmentProp.class);
+  
+  private FieldReference segments;
+
+  @JsonCreator
+  public SegmentProp(@JsonProperty("segments") FieldReference segments) {
+    super();
+    this.segments = segments;
+  }
+
+  public FieldReference getSegments() {
+    return segments;
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/resources/drill-default.conf
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/resources/drill-default.conf b/sandbox/prototype/common/src/main/resources/drill-default.conf
index 1b51bfd..760a6d2 100644
--- a/sandbox/prototype/common/src/main/resources/drill-default.conf
+++ b/sandbox/prototype/common/src/main/resources/drill-default.conf
@@ -1,6 +1,11 @@
-drill.logical: {
-  operator.packages: ["org.apache.drill.common.logical.data"],
-  expression.packages: ["org.apache.drill.common.expression"],
-  function.packages: ["org.apache.drill.common.expression"],
-  storage.packages: []
-}
\ No newline at end of file
+drill: {
+  logical: {
+    operator.packages: ["org.apache.drill.common.logical.data"],
+    expression.packages: ["org.apache.drill.common.expression"],
+    function.packages: ["org.apache.drill.common.expression"],
+    storage.packages: []
+  },
+  physical: {
+    operator.packages: ["org.apache.drill.common.physical.pop"]
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/test/java/org/apache/drill/ExpressionTest.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/java/org/apache/drill/ExpressionTest.java b/sandbox/prototype/common/src/test/java/org/apache/drill/ExpressionTest.java
deleted file mode 100644
index d0ce6fb..0000000
--- a/sandbox/prototype/common/src/test/java/org/apache/drill/ExpressionTest.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package org.apache.drill;
-
-import static org.junit.Assert.*;
-
-import org.junit.Test;
-
-public class ExpressionTest {
-  
-  @Test
-  public void do1(){
-    assertTrue(true);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/test/java/org/apache/drill/common/physical/ParsePhysicalPlan.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/java/org/apache/drill/common/physical/ParsePhysicalPlan.java b/sandbox/prototype/common/src/test/java/org/apache/drill/common/physical/ParsePhysicalPlan.java
new file mode 100644
index 0000000..9656823
--- /dev/null
+++ b/sandbox/prototype/common/src/test/java/org/apache/drill/common/physical/ParsePhysicalPlan.java
@@ -0,0 +1,37 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.util.FileUtils;
+import org.junit.Test;
+
+import com.google.common.base.Charsets;
+import com.google.common.io.Files;
+
+public class ParsePhysicalPlan {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ParsePhysicalPlan.class);
+  
+  
+  @Test 
+  public void parseSimplePlan() throws Exception{
+    DrillConfig c = DrillConfig.create();
+    PhysicalPlan plan = PhysicalPlan.parse(c, Files.toString(FileUtils.getResourceAsFile("/dsort-physical.json"), Charsets.UTF_8));
+    System.out.println(plan.unparse(c));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/test/java/org/apache/drill/storage/MockStorageEngineConfig.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/java/org/apache/drill/storage/MockStorageEngineConfig.java b/sandbox/prototype/common/src/test/java/org/apache/drill/storage/MockStorageEngineConfig.java
index 9f2d9cb..bc24b2e 100644
--- a/sandbox/prototype/common/src/test/java/org/apache/drill/storage/MockStorageEngineConfig.java
+++ b/sandbox/prototype/common/src/test/java/org/apache/drill/storage/MockStorageEngineConfig.java
@@ -31,8 +31,8 @@ public class MockStorageEngineConfig extends StorageEngineConfigBase{
   private String url;
   
   @JsonCreator
-  public MockStorageEngineConfig(@JsonProperty("name") String name, @JsonProperty("url") String url) {
-    super(name);
+  public MockStorageEngineConfig(@JsonProperty("url") String url) {
+    this.url = url;
   }
 
   public String getUrl() {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/test/resources/dsort-physical.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/resources/dsort-physical.json b/sandbox/prototype/common/src/test/resources/dsort-physical.json
new file mode 100644
index 0000000..3c57a0a
--- /dev/null
+++ b/sandbox/prototype/common/src/test/resources/dsort-physical.json
@@ -0,0 +1,76 @@
+{
+    head:{
+        type:"APACHE_DRILL_PHYSICAL",
+        version:"1",
+        generator:{
+            type:"manual"
+        }
+    },
+    storage:{
+        fs1:{
+            type:"mock"
+        }
+    },
+    graph:[
+        {
+            @id:1,
+            pop:"scan",
+            storageengine:"fs1",
+            entries:[{}],
+            fields:[
+                { "name":"key", route: "OUT", type:"LATE"},
+                { "name":"value", route: "OUT", type:"LATE"}
+            ]
+        },
+        {
+            @id:2,
+            child: 1,
+            pop:"quicknwaysort",
+            orderings:[
+                {
+                    order: "DESC",
+                    expr: "data.key"
+                }
+            ],
+            fields:[
+                { "name":"key", route: "THROUGH", type:"LATE"},
+                { "name":"value", route: "OPAQUE", type:"LATE"}
+            ]
+
+        },
+        {
+            @id:3,
+            child: 2,
+            pop:"exchange",
+            partition:{
+                mode:"RANGE",
+                exprs:["key"]
+            },
+            stitch:{
+                mode:"RANDOM"
+            },
+            fields:[
+                { "name":"key", route: "THROUGH", type:"LATE"},
+                { "name":"value", route: "OPAQUE", type:"LATE"}
+            ]
+        },
+        {
+            @id:4,
+            child:3,
+            pop: "store",
+            mode: "SYSTEM_CHOICE",
+            storageengine: "fs1",
+            entries:[
+                {
+                    path:"/sort/sorted/${partition_number}.seq",
+                    key:"Text",
+                    type:"JAVA_SEQUENCE"
+                }
+            ],
+            fields:[
+                { "name":"key", route: "IN", type:"LATE"},
+                { "name":"value", route: "IN", type:"LATE"}
+            ] 
+        }           
+    ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/test/resources/example1.sql
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/resources/example1.sql b/sandbox/prototype/common/src/test/resources/example1.sql
deleted file mode 100644
index 8e4a413..0000000
--- a/sandbox/prototype/common/src/test/resources/example1.sql
+++ /dev/null
@@ -1,136 +0,0 @@
-
-// data: row per event, each row includes a user value and a day value.
-
-select 'day', count(distinct 'user') as cnt from events, group by day ;
-
-/* Logical Plan
-scan data 
-	group by day{
-		group by user{
-		}combine as user
-		transform 1 as userCnt
-		aggregate sum(userCnt) as cnt
-	}combine as day
-project day, cnt
-
-*/
-
-
-/* Physical Plan (simple)
-scan day, user
-hash_aggregate(day+user, 1 as cnt1)
-hash_aggregate(day, sum(cnt1) as cnt)
-*/
-
-
-/* Physical Plan (distributed-small)
-scan day, user
-streaming_aggregate(day+user, 1 as ignore, partition(day+user) )
-exchange()
-hash_aggregate(day+user, 1 as cnt1)
-streaming_aggregate(day, sum(cnt))
-exchange()
-hash_aggregate(day, sum(cnt))
-union_all()
-*/
-
-
-/* Physical Plan (distributed-large)
-scan day, user
-streaming_aggregate(day+user, 1 as ignore, partition(day+user) )
-exchange()
-hash_aggregate(day+user, 1 as cnt1)
-streaming_aggregate(day, sum(cnt), partition(day))
-exchange()
-hash_aggregate(day, sum(cnt))
-exchange()
-union_all()
-*/
-
-
-/* Physical Plan (distributed-large-rack-aware)
-scan day, user
-streaming_aggregate(day+user, 1 as ignore, rack-partition(day), partition(user))
-exchange()
-hash_aggregate(user, 1 as cnt1)
-streaming_aggregate(day, sum(cnt), partition(day))
-exchange()
-hash_aggregate(day, sum(cnt))
-exchange()
-union_all()
-*/
-
-
-
-
-### Goal
-For each day, what is the total number of unique visitors.
-
-### Data Source
-#### events table
-`record: { user: "1", interaction: "add to cart", datetime: "12/1/2011 3:45pm" }`
-
-
-### SQL Query
-<pre><code>
-SELECT 
-  CONVERT(date, e.datatime) AS 'day', 
-  COUNT(DISTINCT 'e.user') as cnt 
-  FROM events e
-  GROUP BY day 
-</code></pre>
-
-### Logical Query (pseudo)
-<pre><code>scan data 
-        transform convert(date, data.datetime) as day
-	group by day{
-		group by user{
-		}combine as user
-		transform 1 as userCnt
-		aggregate sum(userCnt) as cnt
-	}combine as day
-project day, cnt</code></pre>
-
-
-### Physical Query (pseudo)
-#### Simple
-<pre><code>scan convert(date, datetime) as day, user
-hash_aggregate(day+user, 1 as cnt1)
-hash_aggregate(day, sum(cnt1) as cnt)
-</code></pre>
-
-
-#### Physical Plan (distributed-small)
-<pre><code>scan convert(date, datetime) as day, user
-streaming_aggregate(day+user, 1 as ignore, partition(day+user) )
-exchange()
-hash_aggregate(day+user, 1 as cnt1)
-streaming_aggregate(day, sum(cnt))
-exchange()
-hash_aggregate(day, sum(cnt))
-union_all()
-</code></pre>
-
-#### Physical Plan (distributed-large)
-<pre><code>scan convert(date, datetime) as day, user
-streaming_aggregate(day+user, 1 as ignore, partition(day+user) )
-exchange()
-hash_aggregate(day+user, 1 as cnt1)
-streaming_aggregate(day, sum(cnt), partition(day))
-exchange()
-hash_aggregate(day, sum(cnt))
-exchange()
-union_all()
-</code></pre>
-
-####Physical Plan (distributed-large-rack-aware)
-<pre><code>scan convert(date, datetime) as day, user
-streaming_aggregate(day+user, 1 as ignore, rack-partition(day), partition(user))
-exchange()
-hash_aggregate(user, 1 as cnt1)
-streaming_aggregate(day, sum(cnt), partition(day))
-exchange()
-hash_aggregate(day, sum(cnt))
-exchange()
-union_all()
-</code></pre>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/test/resources/example2.sql
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/resources/example2.sql b/sandbox/prototype/common/src/test/resources/example2.sql
deleted file mode 100644
index 843ff62..0000000
--- a/sandbox/prototype/common/src/test/resources/example2.sql
+++ /dev/null
@@ -1,98 +0,0 @@
-// give me all users who have an out of state phone number.
-
-
-SELECT c.id, FLATTEN( LEFT(c.number, 3)) AS prefix 
-	FROM contacts c
-	JOIN areacodes a ON c.state != a.state AND c.prefix == a.prefix
-	GROUP BY c.id, c.state, count(1) as prefixCount
-	ORDER by c.id, c.state;
-
-	
-	
-	
-/*Logical
- * 
-
-scan contacts c
-	explode(c.number){
-	transform( left(c.number, 3), prefix)
-	}flatten(prefix)
-scan areacodes a
-join a,c, (c.state != a.state && c.prefix == a.prefix)
-group c.id, c.state{{
-		aggregate(count(1) as prefixCount)
-		}combine(c.state)
-	}combine(c.id)
-order(c.id, c.state)
-	
-	
-*/
-	
-/* 
-  
-  
-//Physical Simple 
-scan areacodes a, a.prefix, a.state
-scan contacts c, c.id, c.number
-materialize( LEFT(c.number, 3) as prefix)
-loop_join a,c on {conditions}
-hash_aggregate(c.id+c.state, count(1))
-
-
-// Physical distributed		
-scan areacodes a, a.prefix, a.state
-scan contacts c, c.id, c.number
-materialize( LEFT(c.number, 3) as prefix)
-partition(a, a.prefix)
-partition(c, c.prefix)
-loop_join
-	
-	
-/* Physical Plan (simple)
-scan day, user
-hash_aggregate(day+user, 1 as cnt1)
-hash_aggregate(day, sum(cnt1) as cnt)
-*/
-	
-	
-### Goal
-Generate a list of user ids who have at least one out of state phone number.
-
-### Data Source
-#### contacts table
-`record: { id: "1", number: [ "415-555-1212", "408-555-1212" ] }`
-
-#### areacode table
-
-`record: {prefix: "503", state: "OR" }`
-
-### Drill Query
-<pre><code>SELECT c.id, FLATTEN( LEFT(c.number, 3)) AS prefix 
-	FROM contacts c
-	JOIN areacodes a ON c.state != a.state AND c.prefix == a.prefix
-	GROUP BY c.id, c.state, count(1) as prefixCount
-	ORDER by c.id, c.state;
-</code></pre>
-
-### Logical Query (pseudo)
-<pre><code>scan contacts c
-	explode(c.number){
-	transform( left(c.number, 3), prefix)
-	}flatten(prefix)
-scan areacodes a
-join a,c, (c.state != a.state && c.prefix == a.prefix)
-group c.id, c.state{{
-		aggregate(count(1) as prefixCount)
-		}combine(c.state)
-	}combine(c.id)
-order(c.id, c.state)
-</code></pre>
-
-
-### Physical Query (pseudo)
-#### Simple
-scan areacodes a, a.prefix, a.state
-scan contacts c, c.id, c.number
-materialize( LEFT(c.number, 3) as prefix)
-loop_join a,c on {conditions}
-hash_aggregate(c.id+c.state, count(1))

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/test/resources/example3.sql
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/resources/example3.sql b/sandbox/prototype/common/src/test/resources/example3.sql
deleted file mode 100644
index 7022261..0000000
--- a/sandbox/prototype/common/src/test/resources/example3.sql
+++ /dev/null
@@ -1,3 +0,0 @@
-// Goal
-
-select user, 
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/test/resources/logback.xml
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/resources/logback.xml b/sandbox/prototype/common/src/test/resources/logback.xml
index fbccc38..b79b811 100644
--- a/sandbox/prototype/common/src/test/resources/logback.xml
+++ b/sandbox/prototype/common/src/test/resources/logback.xml
@@ -16,31 +16,30 @@
     </encoder>
   </appender>
 
+<!-- 
   <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
     <file>/logs/test-common.log</file>
     <encoder>
       <pattern>%date %level [%thread] %logger{10} [%file:%line] %msg%n</pattern>
     </encoder>
     <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-	    <!-- daily rollover -->
 	    <fileNamePattern>/logs/test-common.%d{yyyy-MM-dd}.log</fileNamePattern>
-	    <!-- keep 30 days' worth of history -->
 	    <maxHistory>30</maxHistory>
     </rollingPolicy>
   </appender>
-  
+  --> 
   <logger name="org.apache.drill" additivity="false">
     <level value="debug" />
     <appender-ref ref="SOCKET" />
     <appender-ref ref="STDOUT" />
-    <appender-ref ref="FILE" />
+<!--     <appender-ref ref="FILE" /> -->
   </logger>
 
   <root>
     <level value="error" />
     <appender-ref ref="SOCKET" />
     <appender-ref ref="STDOUT" />
-    <appender-ref ref="FILE" />
+<!--     <appender-ref ref="FILE" /> -->
   </root>
 
 </configuration>
\ No newline at end of file


[4/9] basic framework for physical plan. abstraction of graph classes.

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/server/StartupOptions.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/server/StartupOptions.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/server/StartupOptions.java
new file mode 100644
index 0000000..66bbad8
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/server/StartupOptions.java
@@ -0,0 +1,66 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.server;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import com.beust.jcommander.JCommander;
+import com.beust.jcommander.Parameter;
+import com.beust.jcommander.Parameters;
+
+@Parameters(separators = "=")
+public class StartupOptions {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StartupOptions.class);
+
+  @Parameter(names={"-h", "--help"}, description="Provide description of usage.", help=true)
+  private boolean help = false;
+  
+  @Parameter(names= {"-d", "--debug"}, description="Whether you want to run the program in debug mode.", required=false)
+  private boolean debug = false;
+  
+  @Parameter(names= {"-c", "--config"}, description="Configuration file you want to load.  Defaults to loading 'drill-override.conf' from the classpath.", required=false)
+  private String configLocation = null;
+
+  @Parameter
+  private List<String> exccess = new ArrayList<String>();
+
+  public boolean isDebug() {
+    return debug;
+  }
+
+  public String getConfigLocation() {
+    return configLocation;
+  }
+
+  public List<String> getExccess() {
+    return exccess;
+  }
+
+  public static StartupOptions parse(String[] cliArgs) {
+    logger.debug("Parsing arguments.");
+    StartupOptions args = new StartupOptions();
+    JCommander jc = new JCommander(args, cliArgs);
+    if(args.help){
+      jc.usage();
+      System.exit(0);
+    }
+    return args;
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/service/ServiceEngine.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/service/ServiceEngine.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/service/ServiceEngine.java
new file mode 100644
index 0000000..97db72e
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/service/ServiceEngine.java
@@ -0,0 +1,73 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.service;
+
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.channel.nio.NioEventLoopGroup;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.exception.DrillbitStartupException;
+import org.apache.drill.exec.rpc.NamedThreadFactory;
+import org.apache.drill.exec.rpc.bit.BitCom;
+import org.apache.drill.exec.rpc.bit.BitComImpl;
+import org.apache.drill.exec.rpc.user.UserServer;
+import org.apache.drill.exec.server.DrillbitContext;
+
+import com.google.common.io.Closeables;
+
+public class ServiceEngine implements Closeable{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ServiceEngine.class);
+  
+  UserServer userServer;
+  BitComImpl bitCom;
+  int userPort;
+  int bitPort;
+  DrillbitContext context;
+  
+  public ServiceEngine(DrillbitContext context){
+    ByteBufAllocator allocator = context.getAllocator().getUnderlyingAllocator();
+    userServer = new UserServer(allocator, new NioEventLoopGroup(1, new NamedThreadFactory("UserServer-")), context);
+    bitCom = new BitComImpl(context);
+  }
+  
+  public void start() throws DrillbitStartupException, InterruptedException{
+    userPort = userServer.bind(context.getConfig().getInt(ExecConstants.INITIAL_USER_PORT));
+    bitPort = bitCom.start();
+  }
+  
+  public int getBitPort(){
+    return bitPort;
+  }
+  
+  public int getUserPort(){
+    return userPort;
+  }
+
+  public BitCom getBitCom(){
+    return bitCom;
+  }
+  
+  @Override
+  public void close() throws IOException {
+    Closeables.closeQuietly(userServer);
+    Closeables.closeQuietly(bitCom);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/QueryOptimizerRule.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/QueryOptimizerRule.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/QueryOptimizerRule.java
new file mode 100644
index 0000000..0536206
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/QueryOptimizerRule.java
@@ -0,0 +1,21 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.store;
+
+public interface QueryOptimizerRule {
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordReader.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordReader.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordReader.java
new file mode 100644
index 0000000..9fc4165
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordReader.java
@@ -0,0 +1,49 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.store;
+
+import org.apache.drill.exec.exception.ExecutionSetupException;
+import org.apache.drill.exec.ops.OutputMutator;
+import org.apache.drill.exec.record.BatchSchema;
+
+public interface RecordReader {
+
+  /**
+   * Configure the RecordReader with the provided schema and the record batch that should be written to.
+   * 
+   * @param knownSchema
+   *          The set of fields that should be written to as well as the expected types for those fields. In the case
+   *          that RecordReader has a known schema and the expectedSchema does not match the actual schema, a
+   *          ExceptionSetupException will be thrown.
+   * @param output
+   *          The place where output for a particular scan should be written. The record reader is responsible for
+   *          mutating the set of schema values for that particular record.
+   * @throws ExecutionSetupException
+   */
+  public abstract void setup(BatchSchema expectedSchema, OutputMutator output) throws ExecutionSetupException;
+
+  /**
+   * Increment record reader forward, writing into the provided output batch.  
+   * 
+   * @return The number of additional records added to the output.
+   */
+  public abstract int next();
+
+  public abstract void cleanup();
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordRecorder.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordRecorder.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordRecorder.java
new file mode 100644
index 0000000..c5fcc42
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordRecorder.java
@@ -0,0 +1,36 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.store;
+
+import java.io.IOException;
+
+import org.apache.drill.exec.record.RecordBatch;
+
+public interface RecordRecorder {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RecordRecorder.class);
+  
+  public void setup() throws IOException;
+  
+  /**
+   * 
+   * @param batch
+   * @return
+   */
+  public boolean record(RecordBatch batch);
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/StorageEngine.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/StorageEngine.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/StorageEngine.java
new file mode 100644
index 0000000..83749c7
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/StorageEngine.java
@@ -0,0 +1,92 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.store;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.drill.common.logical.data.Scan;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
+
+import com.google.common.collect.ListMultimap;
+
+public interface StorageEngine {
+  public boolean supportsRead();
+
+  public boolean supportsWrite();
+
+  public enum PartitionCapabilities {
+    NONE, HASH, RANGE;
+  }
+
+  public List<QueryOptimizerRule> getOptimizerRules();
+
+  /**
+   * Get the set of read entries required for a particular Scan (read) node. This is somewhat analogous to traditional
+   * MapReduce. The difference is, this is the most granular split paradigm.
+   * 
+   * @param scan
+   *          The configured scan entries.
+   * @return
+   * @throws IOException
+   */
+  public Collection<ReadEntry> getReadEntries(Scan scan) throws IOException;
+
+  /**
+   * Get the set of Drillbit endpoints that are available for each read entry. Note that it is possible for a read entry
+   * to have no Drillbit locations. In that case, the multimap will contain no values for that read entry.
+   * 
+   * @return Multimap of ReadEntry > List<DrillbitEndpoint> for ReadEntries with available locations.
+   */
+  public ListMultimap<ReadEntry, DrillbitEndpoint> getReadLocations(Collection<ReadEntry> entries);
+
+  /**
+   * Get a particular reader for a fragment context.
+   * @param context
+   * @param readEntry
+   * @return
+   * @throws IOException
+   */
+  public RecordReader getReader(FragmentContext context, ReadEntry readEntry) throws IOException;
+
+  /**
+   * 
+   * @param context
+   * @param writeEntry
+   * @return
+   * @throws IOException
+   */
+  public RecordRecorder getWriter(FragmentContext context, WriteEntry writeEntry) throws IOException;
+
+  
+  public interface ReadEntry {
+    public Cost getCostEstimate();
+  }
+
+  public interface WriteEntry {
+  }
+
+  public static class Cost {
+    public long disk;
+    public long network;
+    public long memory;
+    public long cpu;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/StorageEngineRegistry.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/StorageEngineRegistry.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/StorageEngineRegistry.java
new file mode 100644
index 0000000..b2e31e9
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/store/StorageEngineRegistry.java
@@ -0,0 +1,82 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.store;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.logical.StorageEngineConfig;
+import org.apache.drill.common.util.PathScanner;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.exception.SetupException;
+import org.apache.drill.exec.server.DrillbitContext;
+
+public class StorageEngineRegistry {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StorageEngineRegistry.class);
+  
+  private Map<Object, Constructor<? extends StorageEngine>> availableEngines = new HashMap<Object, Constructor<? extends StorageEngine>>();
+  private Map<StorageEngineConfig, StorageEngine> activeEngines = new HashMap<StorageEngineConfig, StorageEngine>();
+
+  private DrillbitContext context;
+  public StorageEngineRegistry(DrillbitContext context){
+    this.context = context;
+    setup(context.getConfig());
+  }
+  
+  @SuppressWarnings("unchecked")
+  public void setup(DrillConfig config){
+    Collection<Class<? extends StorageEngine>> engines = PathScanner.scanForImplementations(StorageEngine.class, config.getStringList(ExecConstants.STORAGE_ENGINE_SCAN_PACKAGES));
+    logger.debug("Loading storage engines {}", engines);
+    for(Class<? extends StorageEngine> engine: engines){
+      int i =0;
+      for(Constructor<?> c : engine.getConstructors()){
+        Class<?>[] params = c.getParameterTypes();
+        if(params.length != 2 || params[1] == DrillbitContext.class || !StorageEngineConfig.class.isAssignableFrom(params[0])){
+          logger.debug("Skipping ReferenceStorageEngine constructor {} for engine class {} since it doesn't implement a [constructor(StorageEngineConfig, DrillbitContext)]", c, engine);
+          continue;
+        }
+        availableEngines.put(params[0], (Constructor<? extends StorageEngine>) c);
+        i++;
+      }
+      if(i == 0){
+        logger.debug("Skipping registration of ReferenceStorageEngine {} as it doesn't have a constructor with the parameters of (StorangeEngineConfig, Config)", engine.getCanonicalName());
+      }
+    }
+  }
+  
+  public StorageEngine getEngine(StorageEngineConfig engineConfig) throws SetupException{
+    StorageEngine engine = activeEngines.get(engineConfig);
+    if(engine != null) return engine;
+    Constructor<? extends StorageEngine> c = availableEngines.get(engineConfig.getClass());
+    if(c == null) throw new SetupException(String.format("Failure finding StorageEngine constructor for config %s", engineConfig));
+    try {
+      return c.newInstance(engineConfig, context);
+    } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
+      Throwable t = e instanceof InvocationTargetException ? ((InvocationTargetException)e).getTargetException() : e;
+      if(t instanceof SetupException) throw ((SetupException) t);
+      throw new SetupException(String.format("Failure setting up new storage engine configuration for config %s", engineConfig), t);
+    }
+  }
+  
+
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/protobuf/Coordination.proto
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/protobuf/Coordination.proto b/sandbox/prototype/exec/java-exec/src/main/protobuf/Coordination.proto
new file mode 100644
index 0000000..43c408d
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/protobuf/Coordination.proto
@@ -0,0 +1,32 @@
+package exec;
+
+option java_package = "org.apache.drill.exec.proto";
+option java_outer_classname = "CoordinationProtos";
+option optimize_for = LITE_RUNTIME;
+
+message DrillbitEndpoint{
+  optional string address = 1;
+  optional int32 user_port = 2;
+  optional int32 bit_port = 3;
+  optional Roles roles = 4;
+}
+
+message DrillServiceInstance{
+  optional string id = 1;
+  optional int64 registrationTimeUTC = 2;
+  optional DrillbitEndpoint endpoint = 3;
+}
+
+message WorkQueueStatus{
+	optional DrillbitEndpoint endpoint = 1;
+	optional int32 queue_length = 2;
+	optional int64 report_time = 3;
+}
+
+message Roles{
+	optional bool sql_query = 1 [default = true];
+	optional bool logical_plan = 2 [default = true];
+	optional bool physical_plan = 3 [default = true];
+	optional bool java_executor = 4 [default = true];
+	optional bool distributed_cache = 5 [default = true];
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/protobuf/ExecutionProtos.proto
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/protobuf/ExecutionProtos.proto b/sandbox/prototype/exec/java-exec/src/main/protobuf/ExecutionProtos.proto
new file mode 100644
index 0000000..cd8bda2
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/protobuf/ExecutionProtos.proto
@@ -0,0 +1,65 @@
+package exec.bit;
+
+option java_package = "org.apache.drill.exec.proto";
+option java_outer_classname = "ExecProtos";
+option optimize_for = LITE_RUNTIME;
+import "SchemaDef.proto";
+import "Coordination.proto";
+
+
+////// UserToBit RPC ///////
+enum RpcType {
+    HANDSHAKE = 0;
+    ACK = 1;
+    GOODBYE = 2;
+    
+    // bit requests
+    REQ_INIATILIZE_FRAGMENT = 3; // Returns Handle
+    REQ_RECORD_BATCH = 4; // send record batch overview, returns Ack
+    REQ_BATCH_CHUNK = 5; // send additional batch chunk, returns Ack.
+    REQ_CANCEL_FRAGMENT = 6; // send a cancellation message for a fragment, returns Ack
+	REQ_FRAGMENT_STATUS = 7; // get a fragment status, returns FragmentStatus
+	REQ_BIT_STATUS = 8; // get bit status.
+	    
+    // bit responses
+    RESP_FRAGMENT_HANDLE = 9;
+    RESP_FRAGMENT_STATUS = 10;
+	RESP_BIT_STATUS = 11;
+	RESP_BATCH_CHUNK = 12;
+}
+
+
+message BitColumnData {
+
+    enum ColumnEncoding {
+      PROTOBUF = 0;
+    }
+    
+	message BitColumn {
+		optional int32 field = 1;
+		optional int32 length = 2;
+		optional ColumnEncoding mode = 3;
+	}	
+	
+	optional SchemaDef schema = 1;
+	optional int32 record_count = 2;
+	optional int32 total_size = 3;
+	repeated BitColumn column = 4;
+	
+}
+
+
+message BitHandshake{
+	optional DrillbitEndpoint endpoint = 1;
+}
+
+message BitBatchChunk {}
+message BitStatus {}
+message FragmentStatus {}
+message RecordBatchHeader {}
+message PlanFragment {}
+
+message FragmentHandle {
+	optional int64 fragment_id = 1;
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/protobuf/GeneralRPC.proto
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/protobuf/GeneralRPC.proto b/sandbox/prototype/exec/java-exec/src/main/protobuf/GeneralRPC.proto
new file mode 100644
index 0000000..ebc7dca
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/protobuf/GeneralRPC.proto
@@ -0,0 +1,35 @@
+package exec.rpc;
+
+option java_package = "org.apache.drill.exec.proto";
+option java_outer_classname = "GeneralRPCProtos";
+option optimize_for = LITE_RUNTIME;
+
+message Ack{
+	optional bool ok = 1;
+}
+
+enum RpcMode {
+  REQUEST = 0;
+  RESPONSE = 1;
+  RESPONSE_FAILURE = 2;
+}
+
+message RpcHeader{
+	optional RpcMode mode = 1; 
+	optional int32 coordination_id = 2; // reusable coordination identifier.  Sender defines.  Server returns on return.  Irrelevant for purely single direction rpc.
+	optional int32 rpc_type = 3; // a rpc mode specific rpc type.
+}
+
+message CompleteRpcMessage {
+    optional RpcHeader header = 1; // required
+    optional bytes protobuf_body = 2; // required
+    optional bytes raw_body = 3; // optional
+}
+
+// Class to be used when an unexpected exception occurs while a rpc call is being evaluated.
+message RpcFailure {
+  optional int64 error_id = 1; // for server trackback.
+  optional int32 error_code = 2; // system defined error code.
+  optional string short_error = 3;
+  optional string long_error = 4;
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/protobuf/SchemaDef.proto
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/protobuf/SchemaDef.proto b/sandbox/prototype/exec/java-exec/src/main/protobuf/SchemaDef.proto
new file mode 100644
index 0000000..44e2df9
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/protobuf/SchemaDef.proto
@@ -0,0 +1,37 @@
+package exec;
+
+option java_package = "org.apache.drill.exec.proto";
+option java_outer_classname = "SchemaDefProtos";
+option optimize_for = LITE_RUNTIME;
+
+
+// Schema Definitions //
+enum DataType {
+  LATE = 0;
+  INT32 = 1;
+  INT64 = 2;
+  FLOAT32 = 3;
+  FLOAT64 = 4;
+  UTF8 = 5;
+  BYTES = 6;
+}
+
+enum DataMode {
+  REQUIRED = 0;
+  OPTIONAL = 1;
+  REPEATED = 2;
+  MAP = 3; 
+}
+
+message SchemaDef {
+  repeated FieldDef field = 1;
+}
+
+message FieldDef {
+  optional string name = 1;
+  optional DataMode mode = 2;
+  
+  // If DataMode == 0-2, type should be populated and fields should be empty.  Otherwise, type should empty and fields should be defined. 
+  optional DataType type = 3;
+  repeated FieldDef fields = 4;
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/protobuf/User.proto
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/protobuf/User.proto b/sandbox/prototype/exec/java-exec/src/main/protobuf/User.proto
new file mode 100644
index 0000000..225d1a0
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/protobuf/User.proto
@@ -0,0 +1,93 @@
+package exec.user;
+
+option java_package = "org.apache.drill.exec.proto";
+option java_outer_classname = "UserProtos";
+option optimize_for = LITE_RUNTIME;
+import "SchemaDef.proto";
+
+////// UserToBit RPC ///////
+enum RpcType {
+    HANDSHAKE = 0;
+    ACK = 1;
+    GOODBYE = 2;
+    
+    // user to bit
+    RUN_QUERY = 3;
+    REQUEST_RESULTS = 4;
+    
+    // bit to user
+	QUERY_RESULT = 6;
+	QUERY_HANDLE = 7;
+}
+
+message UserToBitHandshake {
+    optional bool support_listening = 1;
+    optional int32 rpc_version = 2;
+}
+
+message RequestResults {
+  optional int64 query_id = 1;
+  optional int32 maximum_responses = 2;
+}
+
+message RunQuery {
+  optional QueryResultsMode mode = 1;
+  optional string plan = 2;
+}
+
+enum QueryResultsMode {
+	STREAM_FULL = 1; // Server will inform the client regularly on the status of the query. Once the query is completed, service will inform the client as each query chunk is made available.
+	STREAM_FIRST = 2; // Server will inform the client regularly on the status of the query.  Once the query is completed, server will inform the client of the first query chunk.
+	QUERY_FOR_STATUS = 3; // Client will need to query for status of query.
+}
+
+
+message BitToUserHandshake {
+	optional int32 rpc_version = 1;
+}
+
+message QueryHandle {
+  	optional int64 query_id = 1;
+}
+
+message NodeStatus {
+	optional int32 node_id = 1;
+	optional int64 memory_footprint = 2;
+}
+
+message QueryResult {
+	enum Outcome {
+	  RUNNING = 0;
+	  FAILED = 1;
+	  COMPLETED = 2;
+	  WAITING = 3;
+	}
+	
+	optional Outcome outcome = 1;
+	optional SchemaDef schema = 2;
+	optional bool is_last_chunk = 3;
+	optional int32 row_count = 4;
+	optional int64 records_scan = 5;
+	optional int64 records_error = 6;
+	optional int64 submission_time = 7;
+	repeated NodeStatus node_status = 8;	
+	repeated Error error = 9;
+}
+
+message TextErrorLocation{
+    optional int32 start_column = 2;
+    optional int32 start_row = 3;
+    optional int32 end_column = 4;
+    optional int32 end_row = 5;
+}
+
+message Error{
+    optional int64 error_id = 1; // for debug tracing purposes
+    optional string host = 2;
+    optional int32 error_type = 3; 
+    optional string message = 4;
+    optional TextErrorLocation error = 5; //optional, used when providing location of error within a piece of text.
+}
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/resources/drill-module.conf
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/resources/drill-module.conf b/sandbox/prototype/exec/java-exec/src/main/resources/drill-module.conf
new file mode 100644
index 0000000..ad18d6e
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/resources/drill-module.conf
@@ -0,0 +1,28 @@
+//  This file tells Drill to consider this module when class path scanning.  
+//  This file can also include any supplementary configuration information.  
+//  This file is in HOCON format, see https://github.com/typesafehub/config/blob/master/HOCON.md for more information.
+drill.exec: {
+  cluster-id: "drillbits1"
+  rpc: {
+  	user.port : 31010,
+  	bit.port : 31011
+  },
+  optimizer: {
+    implementation: "org.apache.drill.exec.opt.IdentityOptimizer"
+  },
+  
+  zk: {
+	connect: "localhost:2181",
+	root: "/drill",
+	refresh: 500,
+	timeout: 1000,
+	retry: {
+	  count: 7200,
+	  delay: 500
+	}    
+  }
+
+  network: {
+    start: 35000
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/test/java/BBOutputStream.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/test/java/BBOutputStream.java b/sandbox/prototype/exec/java-exec/src/test/java/BBOutputStream.java
new file mode 100644
index 0000000..7f03dfa
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/test/java/BBOutputStream.java
@@ -0,0 +1,38 @@
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+public class BBOutputStream extends OutputStream{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BBOutputStream.class);
+
+  private ByteBuffer buf;
+  
+  public BBOutputStream(ByteBuffer buf) {
+    this.buf = buf;
+  }
+
+  @Override
+  public void write(int b) throws IOException {
+    buf.put((byte) b);
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/test/java/CompressingBytesColumn.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/test/java/CompressingBytesColumn.java b/sandbox/prototype/exec/java-exec/src/test/java/CompressingBytesColumn.java
new file mode 100644
index 0000000..4fb67ed
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/test/java/CompressingBytesColumn.java
@@ -0,0 +1,46 @@
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+public class CompressingBytesColumn {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CompressingBytesColumn.class);
+
+  int mb = 1024*1024;
+  
+  ByteBuffer values = ByteBuffer.allocateDirect(20*mb);
+  ByteBuffer fromCompressBuffer = ByteBuffer.allocateDirect(mb);
+  ByteBuffer toCompressBuffer = ByteBuffer.allocateDirect(mb);
+
+  
+  public CompressingBytesColumn(){
+  }
+  
+  public void add(byte[] bytes, int start, int length){
+    
+  }
+  
+  public void add(ByteBuffer buffer){
+    
+  }
+  public void write(OutputStream stream){
+    
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/test/java/ExternalSort.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/test/java/ExternalSort.java b/sandbox/prototype/exec/java-exec/src/test/java/ExternalSort.java
new file mode 100644
index 0000000..c6233ae
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/test/java/ExternalSort.java
@@ -0,0 +1,21 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+public class ExternalSort {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExternalSort.class);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/test/java/GenerateExternalSortData.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/test/java/GenerateExternalSortData.java b/sandbox/prototype/exec/java-exec/src/test/java/GenerateExternalSortData.java
new file mode 100644
index 0000000..dca7d27
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/test/java/GenerateExternalSortData.java
@@ -0,0 +1,124 @@
+import java.io.BufferedReader;
+import java.nio.ByteBuffer;
+import java.nio.IntBuffer;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+
+import org.apache.hadoop.conf.Configuration;
+import org.xerial.snappy.Snappy;
+
+import com.google.common.base.Charsets;
+import com.google.protobuf.CodedOutputStream;
+
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+public class GenerateExternalSortData {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(GenerateExternalSortData.class);
+  
+  /** Convert sequence file in to compressed columnar format. 
+   * 
+   * @param args
+   * @throws Exception
+   */
+  public static void main(String[] args) throws Exception{
+    int mb = 1024*1024;
+    final int blockSize = 1024;
+    ByteBuffer keys = ByteBuffer.allocateDirect(2*mb);
+    ByteBuffer values = ByteBuffer.allocateDirect(20*mb);
+    ByteBuffer fromCompressBuffer = ByteBuffer.allocateDirect(mb);
+    ByteBuffer toCompressBuffer = ByteBuffer.allocateDirect(mb);
+    
+    ByteBuffer valueLengthB = ByteBuffer.allocateDirect(blockSize*4);
+    IntBuffer valueLengths = valueLengthB.asIntBuffer();
+    //Opaque value stored as len,data.
+    
+    //
+    //Snappy.compress(uncompressed, compressed);
+    String file = "/opt/data/tera1gb/part-00000";
+    Configuration config = new Configuration();
+    //SequenceFile.Reader sf = new SequenceFile.Reader(FileSystem.getLocal(config), new Path(file), config);
+    
+    BufferedReader reader = Files.newBufferedReader(FileSystems.getDefault().getPath(file), Charsets.UTF_8);
+    
+    CodedOutputStream cos = CodedOutputStream.newInstance(new BBOutputStream(values));
+    
+    long originalBytes = 0;
+    long compressedBytes = 0;
+    String l;
+    int round = 0;
+    long nanos = 0;
+    long x1 = System.nanoTime();
+    while((l = reader.readLine()) != null){
+      
+      byte[] bytes = l.getBytes();
+      keys.put(bytes, 0, 10);
+      int len = bytes.length - 10;
+      originalBytes += len;
+      
+      
+      // Compress the value.
+      long n1 = System.nanoTime();
+      fromCompressBuffer.put(bytes, 10, len);
+      fromCompressBuffer.flip();
+      int newLen = Snappy.compress(fromCompressBuffer, toCompressBuffer);
+      cos.writeRawVarint32(newLen);
+      toCompressBuffer.flip();
+      values.put(toCompressBuffer);
+      fromCompressBuffer.clear();
+      toCompressBuffer.clear();
+      nanos += (System.nanoTime() - n1);
+
+      compressedBytes += newLen;
+      //valueLengths.put(newLen);
+      
+      round++;
+      
+      if(round >= blockSize){
+        // flush
+        keys.clear();
+        values.clear();
+        round = 0;
+        
+      }
+      
+      
+    }
+    
+    System.out.println("Uncompressed: " + originalBytes);
+    System.out.println("Compressed: " + compressedBytes);
+    System.out.println("CompressionTime: " + nanos/1000/1000);
+    System.out.println("Total Time: " + (System.nanoTime() - x1)/1000/1000);
+    
+  }
+  
+  private static void convertToDeltas(IntBuffer b){
+    b.flip();
+    int min = Integer.MAX_VALUE;
+    for(int i =0; i < b.limit(); i++){
+      min = Math.min(b.get(i), min);
+    }
+    
+    for(int i =0; i < b.limit(); i++){
+      int cur = b.get(i);
+      b.put(i, cur - min);
+    }
+    
+    
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/record/column/SimpleExec.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/record/column/SimpleExec.java b/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/record/column/SimpleExec.java
new file mode 100644
index 0000000..e3747e1
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/record/column/SimpleExec.java
@@ -0,0 +1,30 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record.column;
+
+import org.junit.Test;
+
+public class SimpleExec {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SimpleExec.class);
+  
+  @Test
+  public void columnarAnd() throws Exception{
+   
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestOpenBitSet.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestOpenBitSet.java b/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestOpenBitSet.java
new file mode 100644
index 0000000..66f69de
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestOpenBitSet.java
@@ -0,0 +1,361 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record.vector;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.buffer.UnpooledByteBufAllocator;
+
+import java.util.BitSet;
+import java.util.Random;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+@Ignore
+public class TestOpenBitSet {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestOpenBitSet.class);
+
+  Random random = new Random();
+  ByteBufAllocator allocator = UnpooledByteBufAllocator.DEFAULT;
+  
+  public int atLeast(int val){
+    return val + random.nextInt(val);
+  }
+  
+  
+  public Random random() {
+    return new Random();
+  }
+
+  void doGet(BitSet a, BufBitSet b) {
+    int max = a.size();
+    for (int i = 0; i < max; i++) {
+      if (a.get(i) != b.get(i)) {
+        fail("mismatch: BitSet=[" + i + "]=" + a.get(i));
+      }
+      if (a.get(i) != b.get((long) i)) {
+        fail("mismatch: BitSet=[" + i + "]=" + a.get(i));
+      }
+    }
+  }
+
+  void doGetFast(BitSet a, BufBitSet b, int max) {
+    for (int i = 0; i < max; i++) {
+      if (a.get(i) != b.fastGet(i)) {
+        fail("mismatch: BitSet=[" + i + "]=" + a.get(i));
+      }
+      if (a.get(i) != b.fastGet((long) i)) {
+        fail("mismatch: BitSet=[" + i + "]=" + a.get(i));
+      }
+    }
+  }
+
+  void doNextSetBit(BitSet a, BufBitSet b) {
+    int aa = -1, bb = -1;
+    do {
+      aa = a.nextSetBit(aa + 1);
+      bb = b.nextSetBit(bb + 1);
+      assertEquals(aa, bb);
+    } while (aa >= 0);
+  }
+
+  void doNextSetBitLong(BitSet a, BufBitSet b) {
+    int aa = -1, bb = -1;
+    do {
+      aa = a.nextSetBit(aa + 1);
+      bb = (int) b.nextSetBit((long) (bb + 1));
+      assertEquals(aa, bb);
+    } while (aa >= 0);
+  }
+
+  void doPrevSetBit(BitSet a, BufBitSet b) {
+    int aa = a.size() + random().nextInt(100);
+    int bb = aa;
+    do {
+      // aa = a.prevSetBit(aa-1);
+      aa--;
+      while ((aa >= 0) && (!a.get(aa))) {
+        aa--;
+      }
+      bb = b.prevSetBit(bb - 1);
+      assertEquals(aa, bb);
+    } while (aa >= 0);
+  }
+
+  void doPrevSetBitLong(BitSet a, BufBitSet b) {
+    int aa = a.size() + random().nextInt(100);
+    int bb = aa;
+    do {
+      // aa = a.prevSetBit(aa-1);
+      aa--;
+      while ((aa >= 0) && (!a.get(aa))) {
+        aa--;
+      }
+      bb = (int) b.prevSetBit((long) (bb - 1));
+      assertEquals(aa, bb);
+    } while (aa >= 0);
+  }
+
+  // test interleaving different OpenBitSetIterator.next()/skipTo()
+  void doIterate(BitSet a, BufBitSet b, int mode) {
+    // if (mode == 1) doIterate1(a, b);
+    // if (mode == 2) doIterate2(a, b);
+  }
+
+  //
+  // void doIterate1(BitSet a, OpenBitSet b) {
+  // int aa = -1, bb = -1;
+  // OpenBitSetIterator iterator = new OpenBitSetIterator(b);
+  // do {
+  // aa = a.nextSetBit(aa + 1);
+  // bb = random().nextBoolean() ? iterator.nextDoc() : iterator.advance(bb + 1);
+  // assertEquals(aa == -1 ? DocIdSetIterator.NO_MORE_DOCS : aa, bb);
+  // } while (aa >= 0);
+  // }
+  //
+  // void doIterate2(BitSet a, OpenBitSet b) {
+  // int aa = -1, bb = -1;
+  // OpenBitSetIterator iterator = new OpenBitSetIterator(b);
+  // do {
+  // aa = a.nextSetBit(aa + 1);
+  // bb = random().nextBoolean() ? iterator.nextDoc() : iterator.advance(bb + 1);
+  // assertEquals(aa == -1 ? DocIdSetIterator.NO_MORE_DOCS : aa, bb);
+  // } while (aa >= 0);
+  // }
+
+  void doRandomSets(int maxSize, int iter, int mode) {
+    BitSet a0 = null;
+    BufBitSet b0 = null;
+
+    for (int i = 0; i < iter; i++) {
+      int sz = random().nextInt(maxSize);
+      BitSet a = new BitSet(sz);
+      BufBitSet b = new BufBitSet(sz, allocator);
+
+      // test the various ways of setting bits
+      if (sz > 0) {
+        int nOper = random().nextInt(sz);
+        for (int j = 0; j < nOper; j++) {
+          int idx;
+
+          idx = random().nextInt(sz);
+          a.set(idx);
+          b.fastSet(idx);
+
+          idx = random().nextInt(sz);
+          a.set(idx);
+          b.fastSet((long) idx);
+
+          idx = random().nextInt(sz);
+          a.clear(idx);
+          b.fastClear(idx);
+
+          idx = random().nextInt(sz);
+          a.clear(idx);
+          b.fastClear((long) idx);
+
+          idx = random().nextInt(sz);
+          a.flip(idx);
+          b.fastFlip(idx);
+
+          boolean val = b.flipAndGet(idx);
+          boolean val2 = b.flipAndGet(idx);
+          assertTrue(val != val2);
+
+          idx = random().nextInt(sz);
+          a.flip(idx);
+          b.fastFlip((long) idx);
+
+          val = b.flipAndGet((long) idx);
+          val2 = b.flipAndGet((long) idx);
+          assertTrue(val != val2);
+
+          val = b.getAndSet(idx);
+          assertTrue(val2 == val);
+          assertTrue(b.get(idx));
+
+          if (!val) b.fastClear(idx);
+          assertTrue(b.get(idx) == val);
+        }
+      }
+
+      // test that the various ways of accessing the bits are equivalent
+      doGet(a, b);
+      doGetFast(a, b, sz);
+
+      // test ranges, including possible extension
+      int fromIndex, toIndex;
+      fromIndex = random().nextInt(sz + 80);
+      toIndex = fromIndex + random().nextInt((sz >> 1) + 1);
+      BitSet aa = (BitSet) a.clone();
+      aa.flip(fromIndex, toIndex);
+      BufBitSet bb = b.cloneTest();
+      bb.flip(fromIndex, toIndex);
+
+      doIterate(aa, bb, mode); // a problem here is from flip or doIterate
+
+      fromIndex = random().nextInt(sz + 80);
+      toIndex = fromIndex + random().nextInt((sz >> 1) + 1);
+      aa = (BitSet) a.clone();
+      aa.clear(fromIndex, toIndex);
+      bb = b.cloneTest();
+      bb.clear(fromIndex, toIndex);
+
+      doNextSetBit(aa, bb); // a problem here is from clear() or nextSetBit
+      doNextSetBitLong(aa, bb);
+
+      doPrevSetBit(aa, bb);
+      doPrevSetBitLong(aa, bb);
+
+      fromIndex = random().nextInt(sz + 80);
+      toIndex = fromIndex + random().nextInt((sz >> 1) + 1);
+      aa = (BitSet) a.clone();
+      aa.set(fromIndex, toIndex);
+      bb = b.cloneTest();
+      bb.set(fromIndex, toIndex);
+
+      doNextSetBit(aa, bb); // a problem here is from set() or nextSetBit
+      doNextSetBitLong(aa, bb);
+
+      doPrevSetBit(aa, bb);
+      doPrevSetBitLong(aa, bb);
+
+      if (a0 != null) {
+        assertEquals(a.equals(a0), b.equals(b0));
+
+        assertEquals(a.cardinality(), b.cardinality());
+
+        BitSet a_and = (BitSet) a.clone();
+        a_and.and(a0);
+        BitSet a_or = (BitSet) a.clone();
+        a_or.or(a0);
+        BitSet a_xor = (BitSet) a.clone();
+        a_xor.xor(a0);
+        BitSet a_andn = (BitSet) a.clone();
+        a_andn.andNot(a0);
+
+        BufBitSet b_and = b.cloneTest();
+        assertEquals(b, b_and);
+        b_and.and(b0);
+        BufBitSet b_or = b.cloneTest();
+        b_or.or(b0);
+        BufBitSet b_xor = b.cloneTest();
+        b_xor.xor(b0);
+        BufBitSet b_andn = b.cloneTest();
+        b_andn.andNot(b0);
+
+        doIterate(a_and, b_and, mode);
+        doIterate(a_or, b_or, mode);
+        doIterate(a_xor, b_xor, mode);
+        doIterate(a_andn, b_andn, mode);
+
+        assertEquals(a_and.cardinality(), b_and.cardinality());
+        assertEquals(a_or.cardinality(), b_or.cardinality());
+        assertEquals(a_xor.cardinality(), b_xor.cardinality());
+        assertEquals(a_andn.cardinality(), b_andn.cardinality());
+
+        // test non-mutating popcounts
+        assertEquals(b_and.cardinality(), BufBitSet.intersectionCount(b, b0));
+        assertEquals(b_or.cardinality(), BufBitSet.unionCount(b, b0));
+        assertEquals(b_xor.cardinality(), BufBitSet.xorCount(b, b0));
+        assertEquals(b_andn.cardinality(), BufBitSet.andNotCount(b, b0));
+      }
+
+      a0 = a;
+      b0 = b;
+    }
+  }
+
+  // large enough to flush obvious bugs, small enough to run in <.5 sec as part of a
+  // larger testsuite.
+  @Test
+  public void testSmall() {
+    doRandomSets(atLeast(1200), atLeast(1000), 1);
+    doRandomSets(atLeast(1200), atLeast(1000), 2);
+  }
+
+  // uncomment to run a bigger test (~2 minutes).
+  /*
+   * public void testBig() { doRandomSets(2000,200000, 1); doRandomSets(2000,200000, 2); }
+   */
+
+  @Test
+  public void testEquals() {
+    BufBitSet b1 = new BufBitSet(1111, allocator);
+    BufBitSet b2 = new BufBitSet(2222, allocator);
+    assertTrue(b1.equals(b2));
+    assertTrue(b2.equals(b1));
+    b1.set(10);
+    assertFalse(b1.equals(b2));
+    assertFalse(b2.equals(b1));
+    b2.set(10);
+    assertTrue(b1.equals(b2));
+    assertTrue(b2.equals(b1));
+    b2.set(2221);
+    assertFalse(b1.equals(b2));
+    assertFalse(b2.equals(b1));
+    b1.set(2221);
+    assertTrue(b1.equals(b2));
+    assertTrue(b2.equals(b1));
+
+    // try different type of object
+    assertFalse(b1.equals(new Object()));
+  }
+
+  @Test
+  public void testHashCodeEquals() {
+    BufBitSet bs1 = new BufBitSet(200, allocator);
+    BufBitSet bs2 = new BufBitSet(64, allocator);
+    bs1.set(3);
+    bs2.set(3);
+    assertEquals(bs1, bs2);
+    assertEquals(bs1.hashCode(), bs2.hashCode());
+  }
+
+  private BufBitSet makeOpenBitSet(int[] a) {
+    BufBitSet bs = new BufBitSet(64, allocator);
+    for (int e : a) {
+      bs.set(e);
+    }
+    return bs;
+  }
+
+  private BitSet makeBitSet(int[] a) {
+    BitSet bs = new BitSet();
+    for (int e : a) {
+      bs.set(e);
+    }
+    return bs;
+  }
+
+  private void checkPrevSetBitArray(int[] a) {
+    BufBitSet obs = makeOpenBitSet(a);
+    BitSet bs = makeBitSet(a);
+    doPrevSetBit(bs, obs);
+  }
+
+  public void testPrevSetBit() {
+    checkPrevSetBitArray(new int[] {});
+    checkPrevSetBitArray(new int[] { 0 });
+    checkPrevSetBitArray(new int[] { 0, 2 });
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/UserRpcTest.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/UserRpcTest.java b/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/UserRpcTest.java
new file mode 100644
index 0000000..c8ce877
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/UserRpcTest.java
@@ -0,0 +1,107 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc.user;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.buffer.PooledByteBufAllocator;
+import io.netty.channel.nio.NioEventLoopGroup;
+
+import java.util.concurrent.TimeUnit;
+
+import org.apache.drill.exec.proto.UserProtos.QueryHandle;
+import org.apache.drill.exec.proto.UserProtos.QueryResultsMode;
+import org.apache.drill.exec.proto.UserProtos.RunQuery;
+import org.apache.drill.exec.rpc.DrillRpcFuture;
+import org.apache.drill.exec.rpc.NamedThreadFactory;
+import org.junit.Test;
+
+public class UserRpcTest {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(UserRpcTest.class);
+  
+  
+  
+  
+  @Test
+  public void doBasicRpcTest() throws Exception {
+    final int bufferSize = 25000;
+    final int batchSize = 1000;
+    final int batchCount = 100;
+
+    
+    int sends = 0;
+    int receives = 0;
+    long nanoSend = 0;
+    long nanoReceive = 0;
+
+    
+    try {
+      ByteBufAllocator bb = new PooledByteBufAllocator(true);
+//      ByteBufAllocator bb = UnpooledByteBufAllocator.DEFAULT;
+      UserServer s = new UserServer(bb, new NioEventLoopGroup(1, new NamedThreadFactory("Server-")), null);
+      s.bind(31515);
+
+      logger.debug("Starting user client.");
+      UserClient c = new UserClient(bb, new NioEventLoopGroup(1, new NamedThreadFactory("Client-")));
+
+      logger.debug("Connecting as client to server.");
+      c.connectAsClient("localhost", 31515);
+
+      
+      @SuppressWarnings("unchecked")
+      DrillRpcFuture<QueryHandle>[] handles = new DrillRpcFuture[batchSize];
+
+      for (int x = 0; x < batchCount; x++) {
+        long s1 = System.nanoTime();
+        for (int i = 0; i < batchSize; i++) {
+          sends++;
+          ByteBuf rawBody = bb.buffer(bufferSize);
+          rawBody.writerIndex(bufferSize);
+          if(rawBody.readableBytes() != bufferSize) throw new RuntimeException();
+          handles[i] = c.submitQuery(RunQuery.newBuilder().setMode(QueryResultsMode.QUERY_FOR_STATUS).build(), rawBody);
+        }
+        
+        long s2 = System.nanoTime();
+
+        for (int i = 0; i < batchSize; i++) {
+          handles[i].checkedGet(2, TimeUnit.SECONDS).getQueryId();
+          receives++;
+        }
+
+        long s3 = System.nanoTime();
+        nanoSend += (s2-s1);
+        nanoReceive += (s3-s2);
+        logger.debug("Submission time {}ms, return time {}ms", (s2 - s1) / 1000 / 1000, (s3 - s2) / 1000 / 1000);
+      }
+      // logger.debug("Submitting query.");
+      // DrillRpcFuture<QueryHandle> handleFuture =
+      // c.submitQuery(RunQuery.newBuilder().setMode(QueryResultsMode.QUERY_FOR_STATUS).build());
+      //
+      // logger.debug("Got query id handle of {}", handleFuture.get(2, TimeUnit.SECONDS).getQueryId());
+    } catch (Exception e) {
+      logger.error("Exception of type {} occurred while doing test.", e.getClass().getCanonicalName());
+      throw e;
+    } finally{
+      long mbsTransferred = (1l * bufferSize * batchSize * batchCount)/1024/1024;
+      double sSend = nanoSend*1.0d/1000/1000/1000;
+      double sReceive = nanoReceive*1.0d/1000/1000/1000;
+      logger.info(String.format("Completed %d sends and %d receives.  Total data transferred was %d.  Send bw: %f, Receive bw: %f.", sends, receives, mbsTransferred, mbsTransferred*1.0/sSend, mbsTransferred*1.0/sReceive));
+      logger.info("Completed {} sends and {} receives.", sends, receives);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/server/StartDrillbit.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/server/StartDrillbit.java b/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/server/StartDrillbit.java
new file mode 100644
index 0000000..7b353df
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/test/java/org/apache/drill/exec/server/StartDrillbit.java
@@ -0,0 +1,31 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.server;
+
+import org.apache.drill.exec.exception.DrillbitStartupException;
+import org.junit.Test;
+
+public class StartDrillbit {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StartDrillbit.class);
+  
+  
+  @Test public void startDrillbit() throws DrillbitStartupException, InterruptedException{
+    Drillbit.main(new String[0]);
+    
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/test/resources/logback.xml
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/test/resources/logback.xml b/sandbox/prototype/exec/java-exec/src/test/resources/logback.xml
new file mode 100644
index 0000000..b79b811
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/test/resources/logback.xml
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<configuration>
+
+  <appender name="SOCKET" class="de.huxhorn.lilith.logback.appender.ClassicMultiplexSocketAppender">
+    <Compressing>true</Compressing> 
+    <ReconnectionDelay>10000</ReconnectionDelay>
+    <IncludeCallerData>true</IncludeCallerData>
+    <RemoteHosts>localhost</RemoteHosts>
+  </appender>
+
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+    <!-- encoders are assigned the type
+         ch.qos.logback.classic.encoder.PatternLayoutEncoder by default -->
+    <encoder>
+      <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
+    </encoder>
+  </appender>
+
+<!-- 
+  <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+    <file>/logs/test-common.log</file>
+    <encoder>
+      <pattern>%date %level [%thread] %logger{10} [%file:%line] %msg%n</pattern>
+    </encoder>
+    <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+	    <fileNamePattern>/logs/test-common.%d{yyyy-MM-dd}.log</fileNamePattern>
+	    <maxHistory>30</maxHistory>
+    </rollingPolicy>
+  </appender>
+  --> 
+  <logger name="org.apache.drill" additivity="false">
+    <level value="debug" />
+    <appender-ref ref="SOCKET" />
+    <appender-ref ref="STDOUT" />
+<!--     <appender-ref ref="FILE" /> -->
+  </logger>
+
+  <root>
+    <level value="error" />
+    <appender-ref ref="SOCKET" />
+    <appender-ref ref="STDOUT" />
+<!--     <appender-ref ref="FILE" /> -->
+  </root>
+
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/ref/pom.xml
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/ref/pom.xml b/sandbox/prototype/exec/ref/pom.xml
index b253f6b..045a36b 100644
--- a/sandbox/prototype/exec/ref/pom.xml
+++ b/sandbox/prototype/exec/ref/pom.xml
@@ -8,9 +8,9 @@
 		<groupId>org.apache.drill.exec</groupId>
 		<version>1.0-SNAPSHOT</version>
 	</parent>
-	
+
 	<artifactId>ref</artifactId>
-	
+
 	<name>Logical Plan Execution Reference Implementation</name>
 
 	<dependencies>
@@ -36,8 +36,9 @@
 				</exclusion>
 			</exclusions>
 		</dependency>
-		
 
+
+		
 		<dependency>
 			<groupId>com.carrotsearch</groupId>
 			<artifactId>hppc</artifactId>


[3/9] git commit: Add flatten and join test executions. Abstract graph classes. Update storage engine definition to be a map. Move plan properties to use enum for plan type. Remove unused tests/resources. Update sql parser for change in storage engi

Posted by ja...@apache.org.
Add flatten and join test executions.  Abstract graph classes.  Update storage engine definition to be a map.  Move plan properties to use enum for plan type.  Remove unused tests/resources.  Update sql parser for change in storage engine definition.  Add basic physical plan implementation.


Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/2a6e1b33
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/2a6e1b33
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/2a6e1b33

Branch: refs/heads/execwork
Commit: 2a6e1b33e93824c1147e7e257a46aceb768da8d8
Parents: b48d0f0
Author: Jacques Nadeau <ja...@apache.org>
Authored: Sat Mar 16 17:56:35 2013 -0700
Committer: Jacques Nadeau <ja...@apache.org>
Committed: Sat Mar 16 18:26:42 2013 -0700

----------------------------------------------------------------------
 .../java/org/apache/drill/common/JSONOptions.java  |  106 ++++++++
 .../org/apache/drill/common/PlanProperties.java    |   32 +++
 .../drill/common/config/CommonConstants.java       |    1 +
 .../apache/drill/common/config/DrillConfig.java    |    2 +
 .../org/apache/drill/common/defs/OrderDef.java     |   60 +++++
 .../org/apache/drill/common/defs/PartitionDef.java |   58 +++++
 .../drill/common/expression/types/DataType.java    |  104 +++++++-
 .../common/expression/types/LateBindType.java      |    2 +-
 .../common/expression/visitors/OpVisitor.java      |    6 +-
 .../apache/drill/common/graph/AdjacencyList.java   |  185 +++++++++++++++
 .../drill/common/graph/AdjacencyListBuilder.java   |   74 ++++++
 .../java/org/apache/drill/common/graph/Edge.java   |   42 ++++
 .../java/org/apache/drill/common/graph/Graph.java  |   68 ++++++
 .../org/apache/drill/common/graph/GraphAlgos.java  |  145 +++++++++++
 .../org/apache/drill/common/graph/GraphValue.java  |   26 ++
 .../apache/drill/common/graph/GraphVisitor.java    |   25 ++
 .../org/apache/drill/common/graph/Visitable.java   |   22 ++
 .../apache/drill/common/logical/JSONOptions.java   |  120 ----------
 .../apache/drill/common/logical/LogicalPlan.java   |  121 +++-------
 .../apache/drill/common/logical/OperatorGraph.java |  145 -----------
 .../drill/common/logical/PlanProperties.java       |   30 ---
 .../drill/common/logical/StorageEngineConfig.java  |    1 -
 .../common/logical/StorageEngineConfigBase.java    |    9 -
 .../common/logical/UnexpectedOperatorType.java     |    5 +-
 .../drill/common/logical/data/LogicalOperator.java |    7 +-
 .../common/logical/data/LogicalOperatorBase.java   |    4 +-
 .../apache/drill/common/logical/data/Order.java    |   49 +----
 .../org/apache/drill/common/logical/data/Scan.java |    2 +-
 .../apache/drill/common/logical/data/Store.java    |   49 +----
 .../apache/drill/common/logical/defs/OrderDef.java |   60 +++++
 .../drill/common/logical/defs/PartitionDef.java    |   55 +++++
 .../drill/common/logical/graph/AdjacencyList.java  |  123 ----------
 .../apache/drill/common/logical/graph/Edge.java    |   42 ----
 .../drill/common/logical/graph/GraphAlgos.java     |  137 -----------
 .../apache/drill/common/logical/graph/Node.java    |   53 ----
 .../org/apache/drill/common/physical/FieldSet.java |   97 ++++++++
 .../apache/drill/common/physical/POPConfig.java    |   24 ++
 .../org/apache/drill/common/physical/POPCost.java  |   34 +++
 .../apache/drill/common/physical/PhysicalPlan.java |   93 ++++++++
 .../apache/drill/common/physical/ReadEntry.java    |   25 ++
 .../apache/drill/common/physical/RecordField.java  |   79 ++++++
 .../org/apache/drill/common/physical/SetSpec.java  |   36 +++
 .../apache/drill/common/physical/StitchDef.java    |   48 ++++
 .../drill/common/physical/pop/ExchangePOP.java     |   56 +++++
 .../apache/drill/common/physical/pop/POPBase.java  |   65 +++++
 .../common/physical/pop/PhysicalOperator.java      |   35 +++
 .../common/physical/pop/QuickNWaySortPOP.java      |   50 ++++
 .../apache/drill/common/physical/pop/ScanPOP.java  |   75 ++++++
 .../drill/common/physical/pop/SingleChildPOP.java  |   41 ++++
 .../apache/drill/common/physical/pop/SinkPOP.java  |   22 ++
 .../drill/common/physical/pop/SourcePOP.java       |   22 ++
 .../apache/drill/common/physical/pop/StorePOP.java |   54 +++++
 .../drill/common/physical/props/OrderProp.java     |   45 ++++
 .../drill/common/physical/props/PartitionProp.java |   36 +++
 .../drill/common/physical/props/PhysicalProp.java  |   24 ++
 .../drill/common/physical/props/SegmentProp.java   |   42 ++++
 .../common/src/main/resources/drill-default.conf   |   17 +-
 .../test/java/org/apache/drill/ExpressionTest.java |   14 -
 .../drill/common/physical/ParsePhysicalPlan.java   |   37 +++
 .../drill/storage/MockStorageEngineConfig.java     |    4 +-
 .../common/src/test/resources/dsort-physical.json  |   76 ++++++
 .../common/src/test/resources/example1.sql         |  136 -----------
 .../common/src/test/resources/example2.sql         |   98 --------
 .../common/src/test/resources/example3.sql         |    3 -
 .../common/src/test/resources/logback.xml          |    9 +-
 .../common/src/test/resources/logical_plan1.json   |  139 -----------
 .../common/src/test/resources/simple_plan.json     |  134 -----------
 .../src/test/resources/storage_engine_plan.json    |    9 +-
 sandbox/prototype/exec/ref/pom.xml                 |    8 +-
 .../org/apache/drill/exec/ref/rops/OrderROP.java   |    6 +-
 .../apache/drill/exec/ref/rse/ClasspathRSE.java    |    6 +-
 .../org/apache/drill/exec/ref/rse/ConsoleRSE.java  |   22 +-
 .../apache/drill/exec/ref/rse/FileSystemRSE.java   |    8 +-
 .../org/apache/drill/exec/ref/rse/QueueRSE.java    |   12 +-
 .../org/apache/drill/exec/ref/RunSimplePlan.java   |   24 ++
 .../exec/ref/src/test/resources/simple_join.json   |   24 +--
 .../exec/ref/src/test/resources/simple_plan.json   |   24 +--
 .../src/test/resources/simple_plan_flattened.json  |   28 +-
 .../main/java/org/apache/drill/jdbc/Driver.java    |    2 +-
 .../org/apache/drill/optiq/DrillImplementor.java   |   10 +-
 .../java/org/apache/drill/optiq/DrillScan.java     |    3 +-
 81 files changed, 2330 insertions(+), 1496 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/JSONOptions.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/JSONOptions.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/JSONOptions.java
new file mode 100644
index 0000000..ad4926b
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/JSONOptions.java
@@ -0,0 +1,106 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common;
+
+import java.io.IOException;
+
+import org.apache.drill.common.JSONOptions.De;
+import org.apache.drill.common.JSONOptions.Se;
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.exceptions.LogicalPlanParsingException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonLocation;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.TreeNode;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+
+@JsonSerialize(using = Se.class)
+@JsonDeserialize(using = De.class)
+public class JSONOptions {
+  
+  final static Logger logger = LoggerFactory.getLogger(JSONOptions.class);
+  
+  private JsonNode root;
+  private JsonLocation location;
+  
+  private JSONOptions(JsonNode n, JsonLocation location){
+    this.root = n;
+    this.location = location;
+  }
+  
+  public <T> T getWith(DrillConfig config, Class<T> c){
+    try {
+      //logger.debug("Read tree {}", root);
+      return config.getMapper().treeToValue(root, c);
+    } catch (JsonProcessingException e) {
+      throw new LogicalPlanParsingException(String.format("Failure while trying to convert late bound json options to type of %s. Reference was originally located at line %d, column %d.", c.getCanonicalName(), location.getLineNr(), location.getColumnNr()), e);
+    }
+  }
+  
+  public JsonNode path(String name){
+    return root.path(name);
+  }
+  
+  public static class De extends StdDeserializer<JSONOptions> {
+    
+    public De() {
+      super(JSONOptions.class);
+      logger.debug("Creating Deserializer.");
+    }
+
+    @Override
+    public JSONOptions deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException,
+        JsonProcessingException {
+      JsonLocation l = jp.getTokenLocation();
+//      logger.debug("Reading tree.");
+      TreeNode n = jp.readValueAsTree();
+//      logger.debug("Tree {}", n);
+      if(n instanceof JsonNode){
+        return new JSONOptions( (JsonNode) n, l); 
+      }else{
+        throw new IllegalArgumentException(String.format("Received something other than a JsonNode %s", n));
+      }
+    }
+
+  }
+
+  public static class Se extends StdSerializer<JSONOptions> {
+
+    public Se() {
+      super(JSONOptions.class);
+    }
+
+    @Override
+    public void serialize(JSONOptions value, JsonGenerator jgen, SerializerProvider provider) throws IOException,
+        JsonGenerationException {
+      jgen.writeTree(value.root);
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/PlanProperties.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/PlanProperties.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/PlanProperties.java
new file mode 100644
index 0000000..57d367a
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/PlanProperties.java
@@ -0,0 +1,32 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common;
+
+
+public class PlanProperties {
+  public static enum PlanType {APACHE_DRILL_LOGICAL, APACHE_DRILL_PHYSICAL}
+
+  public PlanType type;
+  public int version;
+	public Generator generator = new Generator();
+	
+	public static class Generator{
+		public String type;
+		public String info;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/CommonConstants.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/CommonConstants.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/CommonConstants.java
index 0687979..95bc979 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/CommonConstants.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/CommonConstants.java
@@ -22,6 +22,7 @@ public interface CommonConstants {
   public static final String CONFIG_OVERRIDE = "drill-override.conf";
   
   public static final String LOGICAL_OPERATOR_SCAN_PACKAGES = "drill.logical.operator.packages";
+  public static final String PHYSICAL_OPERATOR_SCAN_PACKAGES = "drill.physical.operator.packages";
   public static final String STORAGE_ENGINE_CONFIG_SCAN_PACKAGES = "drill.logical.storage.packages";
   public static final String DRILL_JAR_MARKER_FILE = "drill-module.conf";
   public static final String LOGICAL_FUNCTION_SCAN_PACKAGES = "drill.logical.function.packages";

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/DrillConfig.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
index 460b5fa..a775867 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
@@ -26,6 +26,7 @@ import java.util.concurrent.CopyOnWriteArrayList;
 import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.common.logical.StorageEngineConfigBase;
 import org.apache.drill.common.logical.data.LogicalOperatorBase;
+import org.apache.drill.common.physical.pop.POPBase;
 import org.apache.drill.common.util.PathScanner;
 
 import com.fasterxml.jackson.core.JsonParser.Feature;
@@ -52,6 +53,7 @@ public final class DrillConfig extends NestedConfig{
     mapper.configure(Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
     mapper.configure(Feature.ALLOW_COMMENTS, true);
     mapper.registerSubtypes(LogicalOperatorBase.getSubTypes(this));
+    mapper.registerSubtypes(POPBase.getSubTypes(this));
     mapper.registerSubtypes(StorageEngineConfigBase.getSubTypes(this));
     
   };

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/defs/OrderDef.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/defs/OrderDef.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/defs/OrderDef.java
new file mode 100644
index 0000000..e873496
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/defs/OrderDef.java
@@ -0,0 +1,60 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.defs;
+
+import org.apache.drill.common.expression.LogicalExpression;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class OrderDef {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(OrderDef.class);
+
+  private final Direction direction;
+  private final LogicalExpression expr;
+
+  @JsonCreator
+  public OrderDef(@JsonProperty("order") Direction direction, @JsonProperty("expr") LogicalExpression expr) {
+    this.expr = expr;
+    // default to ascending unless desc is provided.
+    this.direction = direction == null ? Direction.ASC : direction;
+  }
+  
+  @JsonIgnore
+  public Direction getDirection() {
+    return direction;
+  }
+
+  public LogicalExpression getExpr() {
+    return expr;
+  }
+
+  public String getOrder() {
+    return direction.description;
+  }
+
+  public static enum Direction {
+    ASC("asc"), DESC("desc");
+    public final String description;
+
+    Direction(String d) {
+      description = d;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/defs/PartitionDef.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/defs/PartitionDef.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/defs/PartitionDef.java
new file mode 100644
index 0000000..181c327
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/defs/PartitionDef.java
@@ -0,0 +1,58 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.defs;
+
+import org.apache.drill.common.expression.LogicalExpression;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class PartitionDef {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PartitionDef.class);
+
+  private final PartitionType partitionType;
+  private final LogicalExpression[] expressions;
+  private final LogicalExpression[] starts;
+  
+  @JsonCreator
+  public PartitionDef(@JsonProperty("mode") PartitionType partitionType, @JsonProperty("exprs") LogicalExpression[] expressions, @JsonProperty("starts") LogicalExpression[] starts) {
+    this.partitionType = partitionType;
+    this.expressions = expressions;
+    this.starts = starts;
+  }
+
+  @JsonProperty("mode")
+  public PartitionType getPartitionType() {
+    return partitionType;
+  }
+
+  @JsonProperty("exprs")
+  public LogicalExpression[] getExpressions() {
+    return expressions;
+  }
+
+  @JsonProperty("starts")
+  public LogicalExpression[] getStarts() {
+    return starts;
+  }
+  
+
+  public static enum PartitionType{ 
+    RANDOM, HASH, RANGE;
+  };
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/DataType.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/DataType.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/DataType.java
index ac9aec7..776a9e8 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/DataType.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/DataType.java
@@ -17,7 +17,28 @@
  ******************************************************************************/
 package org.apache.drill.common.expression.types;
 
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+
+@JsonSerialize(using = DataType.Se.class)
+@JsonDeserialize(using = DataType.De.class)
 public abstract class DataType {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DataType.class);
   
   public static enum Comparability{
     UNKNOWN, NONE, EQUAL, ORDERED;
@@ -30,21 +51,78 @@ public abstract class DataType {
   public abstract Comparability getComparability();
   public abstract boolean isNumericType();
   
-  
   public static final DataType LATEBIND = new LateBindType();
-  public static final DataType BOOLEAN = new AtomType("boolean", Comparability.EQUAL, false);
-  public static final DataType BYTES = new AtomType("bytes", Comparability.ORDERED, false);
-  public static final DataType NVARCHAR = new AtomType("varchar", Comparability.ORDERED, false);
-  public static final DataType FLOAT32 = new AtomType("float32", Comparability.ORDERED, true);
-  public static final DataType FLOAT64 = new AtomType("float64", Comparability.ORDERED, true);
-  public static final DataType INT64 = new AtomType("int64", Comparability.ORDERED, true);
-  public static final DataType INT32 = new AtomType("int32", Comparability.ORDERED, true);
+  public static final DataType BOOLEAN = new AtomType("BOOLEAN", Comparability.EQUAL, false);
+  public static final DataType BYTES = new AtomType("BYTES", Comparability.ORDERED, false);
+  public static final DataType NVARCHAR = new AtomType("VARCHAR", Comparability.ORDERED, false);
+  public static final DataType FLOAT32 = new AtomType("FLOAT32", Comparability.ORDERED, true);
+  public static final DataType FLOAT64 = new AtomType("FLOAT64", Comparability.ORDERED, true);
+  public static final DataType INT64 = new AtomType("INT64", Comparability.ORDERED, true);
+  public static final DataType INT32 = new AtomType("INT32", Comparability.ORDERED, true);
 //  public static final DataType INT16 = new AtomType("int16", Comparability.ORDERED, true);
 //  public static final DataType BIG_INTEGER = new AtomType("bigint", Comparability.ORDERED, true);
 //  public static final DataType BIG_DECIMAL = new AtomType("bigdecimal", Comparability.ORDERED, true);
-  public static final DataType DATE = new AtomType("date", Comparability.ORDERED, false);
-  public static final DataType DATETIME = new AtomType("datetime", Comparability.ORDERED, false);
-  public static final DataType MAP = new AtomType("map", Comparability.NONE, false);
-  public static final DataType ARRAY = new AtomType("array", Comparability.NONE, false);
-  public static final DataType NULL = new AtomType("null", Comparability.NONE, false);
+  public static final DataType DATE = new AtomType("DATE", Comparability.ORDERED, false);
+  public static final DataType DATETIME = new AtomType("DATETIME", Comparability.ORDERED, false);
+  public static final DataType MAP = new AtomType("MAP", Comparability.NONE, false);
+  public static final DataType ARRAY = new AtomType("ARRAY", Comparability.NONE, false);
+  public static final DataType NULL = new AtomType("NULL", Comparability.NONE, false);
+  
+  
+  static final Map<String, DataType> TYPES;
+  static {
+    Field[] fields = DataType.class.getFields();
+    Map<String, DataType> types = new HashMap<String, DataType>();
+    for(Field f : fields){
+      //logger.debug("Reviewing {}, Field: {}", f.getClass(), f);
+      if(Modifier.isStatic(f.getModifiers())){
+        try {
+          Object o = f.get(null);
+          //logger.debug("Object {}", o);
+          
+          if(o instanceof DataType) types.put(((DataType) o).getName(), (DataType) o);
+        } catch (IllegalArgumentException | IllegalAccessException e) {
+          logger.warn("Failure while reading DataType.", e);
+        }
+      }
+    }
+    TYPES = Collections.unmodifiableMap(types);
+    
+  }
+  
+  public static DataType getDataType(String name){
+    if(TYPES.containsKey(name)){
+      return TYPES.get(name);
+    }else{
+      throw new IllegalArgumentException(String.format("Unknown type requested of [%s].", name));
+    }
+  }
+
+  public static class De extends StdDeserializer<DataType> {
+
+    public De() {
+      super(DataType.class);
+    }
+
+    @Override
+    public DataType deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException,
+        JsonProcessingException {
+      return getDataType(this._parseString(jp, ctxt));
+    }
+
+  }
+
+  public static class Se extends StdSerializer<DataType> {
+
+    public Se() {
+      super(DataType.class);
+    }
+
+    @Override
+    public void serialize(DataType value, JsonGenerator jgen, SerializerProvider provider) throws IOException,
+        JsonGenerationException {
+      jgen.writeString(value.getName());
+    }
+
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/LateBindType.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/LateBindType.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/LateBindType.java
index 8938e28..da3d87a 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/LateBindType.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/LateBindType.java
@@ -21,7 +21,7 @@ class LateBindType extends DataType {
 
   @Override
   public String getName() {
-    return "late";
+    return "LATE";
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/visitors/OpVisitor.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/visitors/OpVisitor.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/visitors/OpVisitor.java
index 8dd7e16..983a258 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/visitors/OpVisitor.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/visitors/OpVisitor.java
@@ -17,10 +17,8 @@
  ******************************************************************************/
 package org.apache.drill.common.expression.visitors;
 
+import org.apache.drill.common.graph.GraphVisitor;
 import org.apache.drill.common.logical.data.LogicalOperator;
 
-public interface OpVisitor {
-  public boolean enter(LogicalOperator o);
-  public void leave(LogicalOperator o);
-  public boolean visit(LogicalOperator o);
+public interface OpVisitor extends GraphVisitor<LogicalOperator> {
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/AdjacencyList.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/AdjacencyList.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/AdjacencyList.java
new file mode 100644
index 0000000..f23eeca
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/AdjacencyList.java
@@ -0,0 +1,185 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.graph;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ListMultimap;
+import com.google.common.collect.Multimaps;
+
+class AdjacencyList<V extends GraphValue<V>> {
+  private Set<Node> allNodes = new HashSet<Node>();
+  private ListMultimap<Node, Edge<Node>> adjacencies = ArrayListMultimap.create();
+
+  void addEdge(Node source, Node target, int weight) {
+    adjacencies.put(source, new Edge<Node>(source, target, weight));
+    allNodes.add(source);
+    allNodes.add(target);
+  }
+
+  void clearVisited() {
+    for (Edge<Node> e : adjacencies.values()) {
+      e.from.visited = false;
+      e.to.visited = false;
+    }
+  }
+
+  Node getNewNode(V value){
+    return new Node(value);
+  }
+  
+  public List<Edge<Node>> getAdjacent(AdjacencyList<V>.Node source) {
+    return adjacencies.get(source);
+  }
+
+  public void printEdges() {
+    for (Edge<Node> e : adjacencies.values()) {
+      System.out.println(e.from.index + " -> " + e.to.index);
+    }
+  }
+
+  public AdjacencyList<V> getReversedList() {
+    AdjacencyList<V> newlist = new AdjacencyList<V>();
+    for (Edge<Node> e : adjacencies.values()) {
+      newlist.addEdge(e.to, e.from, e.weight);
+    }
+    return newlist;
+  }
+
+  public Set<Node> getNodeSet() {
+    return adjacencies.keySet();
+  }
+
+
+  Collection<Node> getInternalLeafNodes() {
+    // we have to use the allNodes list as otherwise destination only nodes won't be found.
+    List<Node> nodes = new LinkedList<Node>(allNodes);
+
+    for (Iterator<Node> i = nodes.iterator(); i.hasNext();) {
+      final Node n = i.next();
+
+      // remove any nodes that have one or more outbound edges.
+      List<Edge<Node>> adjList = this.getAdjacent(n);
+      if (adjList != null && !adjList.isEmpty()) i.remove();
+
+    }
+    return nodes;
+  }
+  
+  /**
+   * Get a list of nodes that have no outbound edges.
+   * 
+   * @return
+   */
+  public Collection<V> getLeafNodes(){
+    return convert(getInternalLeafNodes());
+  }
+
+
+  Collection<Node> getInternalRootNodes() {
+    Set<Node> nodes = new HashSet<Node>(getNodeSet());
+    for (Edge<Node> e : adjacencies.values()) {
+      nodes.remove(e.to);
+    }
+    return nodes;
+  }
+
+  /**
+   * Get a list of all nodes that have no incoming edges.
+   * 
+   * @return
+   */
+  public List<V> getRootNodes(){
+    return convert(getInternalRootNodes());
+  }
+  
+  public Collection<Edge<Node>> getAllEdges() {
+    return adjacencies.values();
+  }
+
+  public void fix(boolean requireDirected) {
+    adjacencies = Multimaps.unmodifiableListMultimap(adjacencies);
+    allNodes = Collections.unmodifiableSet(allNodes);
+
+    if (requireDirected) {
+      List<List<Node>> cyclicReferences = GraphAlgos.checkDirected(this);
+      if (cyclicReferences.size() > 0) {
+        throw new IllegalArgumentException(
+            "A logical plan must be a valid DAG.  You have cyclic references in your graph.  " + cyclicReferences);
+      }
+    }
+  }
+
+  List<V> convert(Collection<Node> nodes) {
+    List<V> out = new ArrayList<V>(nodes.size());
+    for (Node o : nodes) {
+      out.add(o.getNodeValue());
+    }
+    return out;
+  }
+
+  class Node implements Comparable<Node> {
+    final V nodeValue;
+    boolean visited = false; // used for Kosaraju's algorithm and Edmonds's
+                             // algorithm
+    int lowlink = -1; // used for Tarjan's algorithm
+    int index = -1; // used for Tarjan's algorithm
+
+    public Node(final V operator) {
+      if (operator == null) throw new IllegalArgumentException("Operator node was null.");
+      this.nodeValue = operator;
+    }
+
+    public int compareTo(final Node argNode) {
+      // just do an identity compare since elsewhere you should ensure that only one node exists for each nodeValue.
+      return argNode == this ? 0 : -1;
+    }
+
+    @Override
+    public int hashCode() {
+      return nodeValue.hashCode();
+    }
+
+    public V getNodeValue() {
+      return nodeValue;
+    }
+
+    @Override
+    public String toString() {
+      return "Node [val=" + nodeValue + "]";
+    }
+
+  }
+  
+  public static <V extends GraphValue<V>> AdjacencyList<V> newInstance(Collection<V> nodes){
+    AdjacencyList<V> list = new AdjacencyList<V>();
+    AdjacencyListBuilder<V> builder = new AdjacencyListBuilder<V>(list);
+    for(V v : nodes){
+      v.accept(builder); 
+    }
+    return builder.getAdjacencyList();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/AdjacencyListBuilder.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/AdjacencyListBuilder.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/AdjacencyListBuilder.java
new file mode 100644
index 0000000..1668477
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/AdjacencyListBuilder.java
@@ -0,0 +1,74 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.graph;
+
+import java.util.HashMap;
+import java.util.Map;
+
+ class AdjacencyListBuilder<V extends GraphValue<V>> implements GraphVisitor<V> {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AdjacencyListBuilder.class);
+
+  private Map<V, AdjacencyList<V>.Node> ops = new HashMap<V, AdjacencyList<V>.Node>();
+  private final AdjacencyList<V> parent;
+  
+  public AdjacencyListBuilder(AdjacencyList<V> parent) {
+    this.parent = parent;
+  }
+
+
+  protected boolean requireDirected() {
+    return true;
+  }
+
+  public boolean enter(V o) {
+    visit(o);
+    return true;
+  }
+
+  @Override
+  public void leave(V o) {
+  }
+
+  @Override
+  public boolean visit(V o) {
+    if (o == null) throw new IllegalArgumentException("Null operator.");
+
+    if (!ops.containsKey(o)) {
+      ops.put(o, parent.getNewNode(o));
+      return true;
+    }
+
+    return true;
+  }
+
+  public AdjacencyList<V> getAdjacencyList() {
+    logger.debug("Values; {}", ops.values().toArray());
+    AdjacencyList<V> a = new AdjacencyList<V>();
+
+    for (AdjacencyList<V>.Node from : ops.values()) {
+      for (V t : from.getNodeValue()) {
+        AdjacencyList<V>.Node to = ops.get(t);
+        a.addEdge(from, to, 0);
+      }
+
+    }
+    a.fix(true);
+    return a;
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/Edge.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/Edge.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/Edge.java
new file mode 100644
index 0000000..d444b9b
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/Edge.java
@@ -0,0 +1,42 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.graph;
+
+
+class Edge<N> implements Comparable<Edge<N>> {
+
+  final N from, to;
+  final int weight;
+
+  public Edge(final N argFrom, final N argTo, final int argWeight) {
+    from = argFrom;
+    to = argTo;
+    weight = argWeight;
+  }
+
+  public int compareTo(final Edge<N> argEdge) {
+    return weight - argEdge.weight;
+  }
+
+  @Override
+  public String toString() {
+    return "Edge [from=" + from + ", to=" + to + "]";
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/Graph.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/Graph.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/Graph.java
new file mode 100644
index 0000000..295a81a
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/Graph.java
@@ -0,0 +1,68 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.graph;
+
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.drill.common.logical.UnexpectedOperatorType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class Graph<G extends GraphValue<G>, R extends G, T extends G> {
+
+  static final Logger logger = LoggerFactory.getLogger(Graph.class);
+
+  private AdjacencyList<G> adjList;
+  private final List<R> roots;
+  private final List<T> leaves;
+
+  public Graph(List<G> operators, Class<R> root, Class<T> leaf) {
+    adjList = AdjacencyList.newInstance(operators);
+    roots = checkOperatorType(adjList.getRootNodes(), root, String.format("Root nodes must be a subclass of %s.", root.getSimpleName()));
+    leaves = checkOperatorType(adjList.getLeafNodes(), leaf, String.format("Leaf nodes must be a subclass of %s.", leaf.getSimpleName()));
+  }
+
+  @SuppressWarnings("unchecked")
+  private <O extends G> List<O> checkOperatorType(Collection<G> ops, Class<O> classIdentifier, String error){
+    for(G o : ops){
+      if(!classIdentifier.isAssignableFrom(o.getClass())){
+        throw new UnexpectedOperatorType(o, error);
+      }
+    }
+    return (List<O>) ops;
+  }
+  
+  public AdjacencyList<G> getAdjList() {
+    return adjList;
+  }
+
+  public Collection<R> getRoots() {
+    return roots;
+  }
+
+  public Collection<T> getLeaves() {
+    return leaves;
+  }
+
+  public static <G extends GraphValue<G>, R extends G, T extends G> Graph<G, R, T> newGraph(List<G> operators, Class<R> root, Class<T> leaf){
+    return new Graph<G, R, T>(operators, root, leaf);
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/GraphAlgos.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/GraphAlgos.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/GraphAlgos.java
new file mode 100644
index 0000000..b83f1bd
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/GraphAlgos.java
@@ -0,0 +1,145 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.graph;
+
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class GraphAlgos {
+  static final Logger logger = LoggerFactory.getLogger(GraphAlgos.class);
+
+  public static class TopoSorter<V extends GraphValue<V>> {
+    final List<AdjacencyList<V>.Node> sorted = new LinkedList<AdjacencyList<V>.Node>();
+    final AdjacencyList<V> rGraph;
+
+    private TopoSorter(AdjacencyList<V> graph) {
+      graph.clearVisited();
+
+      this.rGraph = graph.getReversedList();
+      Collection<AdjacencyList<V>.Node> sourceNodes = rGraph.getInternalRootNodes();
+
+      for (AdjacencyList<V>.Node n : sourceNodes) {
+        visit(n);
+      }
+    }
+
+    private void visit(AdjacencyList<V>.Node n) {
+      if (n.visited) return;
+
+      n.visited = true;
+      List<Edge<AdjacencyList<V>.Node>> edges = rGraph.getAdjacent(n);
+      if (edges != null) {
+        for (Edge<AdjacencyList<V>.Node> e : edges) {
+          visit(e.to);
+        }
+      }
+
+      sorted.add(n);
+
+    }
+
+    /**
+     * Execute a depth-first sort on the reversed DAG.
+     * 
+     * @param graph
+     *          The adjacency list for the DAG.
+     * @param sourceNodes
+     *          List of nodes that
+     * @return
+     */
+    static <V extends GraphValue<V>> List<AdjacencyList<V>.Node> sortInternal(AdjacencyList<V> graph) {
+      TopoSorter<V> ts = new TopoSorter<V>(graph);
+      return ts.sorted;
+    }
+
+    public static <V extends GraphValue<V>> List<V> sort(Graph<V, ?, ?> graph) {
+      AdjacencyList<V> l = graph.getAdjList();
+      return l.convert(sortInternal(l));
+    }
+  }
+
+  static <V extends GraphValue<V>> List<List<AdjacencyList<V>.Node>> checkDirected(AdjacencyList<V> graph) {
+    Tarjan<V> t = new Tarjan<V>();
+    List<List<AdjacencyList<V>.Node>> subgraphs = t.executeTarjan(graph);
+    for (Iterator<List<AdjacencyList<V>.Node>> i = subgraphs.iterator(); i.hasNext();) {
+      List<AdjacencyList<V>.Node> l = i.next();
+      if (l.size() == 1) i.remove();
+    }
+    return subgraphs;
+  }
+
+  public static <V extends GraphValue<V>> List<List<AdjacencyList<V>.Node>> checkDirected(Graph<V, ?, ?> graph) {
+    return checkDirected(graph.getAdjList());
+  }
+
+  public static class Tarjan<V extends GraphValue<V>> {
+
+    private int index = 0;
+    private List<AdjacencyList<V>.Node> stack = new LinkedList<AdjacencyList<V>.Node>();
+    private List<List<AdjacencyList<V>.Node>> SCC = new LinkedList<List<AdjacencyList<V>.Node>>();
+
+    public List<List<AdjacencyList<V>.Node>> executeTarjan(AdjacencyList<V> graph) {
+      SCC.clear();
+      index = 0;
+      stack.clear();
+      if (graph != null) {
+        List<AdjacencyList<V>.Node> nodeList = new LinkedList<AdjacencyList<V>.Node>(graph.getNodeSet());
+        for (AdjacencyList<V>.Node node : nodeList) {
+          if (node.index == -1) {
+            tarjan(node, graph);
+          }
+        }
+      }
+      return SCC;
+    }
+
+    private List<List<AdjacencyList<V>.Node>> tarjan(AdjacencyList<V>.Node v, AdjacencyList<V> list) {
+      v.index = index;
+      v.lowlink = index;
+      index++;
+      stack.add(0, v);
+      List<Edge<AdjacencyList<V>.Node>> l = list.getAdjacent(v);
+      if (l != null) {
+        for (Edge<AdjacencyList<V>.Node> e : l) {
+          AdjacencyList<V>.Node n = e.to;
+          if (n.index == -1) {
+            tarjan(n, list);
+            v.lowlink = Math.min(v.lowlink, n.lowlink);
+          } else if (stack.contains(n)) {
+            v.lowlink = Math.min(v.lowlink, n.index);
+          }
+        }
+      }
+      if (v.lowlink == v.index) {
+        AdjacencyList<V>.Node n;
+        List<AdjacencyList<V>.Node> component = new LinkedList<AdjacencyList<V>.Node>();
+        do {
+          n = stack.remove(0);
+          component.add(n);
+        } while (n != v);
+        SCC.add(component);
+      }
+      return SCC;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/GraphValue.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/GraphValue.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/GraphValue.java
new file mode 100644
index 0000000..b1eeede
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/GraphValue.java
@@ -0,0 +1,26 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.graph;
+
+
+public interface GraphValue<T> extends Iterable<T>{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(GraphValue.class);
+  
+  public void accept(GraphVisitor<T> visitor);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/GraphVisitor.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/GraphVisitor.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/GraphVisitor.java
new file mode 100644
index 0000000..5ecdaea
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/GraphVisitor.java
@@ -0,0 +1,25 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.graph;
+
+
+public interface GraphVisitor<T> {
+  public boolean enter(T o);
+  public void leave(T o);
+  public boolean visit(T o);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/Visitable.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/Visitable.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/Visitable.java
new file mode 100644
index 0000000..90ee1d1
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/graph/Visitable.java
@@ -0,0 +1,22 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.graph;
+
+public interface Visitable<T extends Visitable<?>> {
+  public void accept(T node);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/JSONOptions.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/JSONOptions.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/JSONOptions.java
deleted file mode 100644
index d4d97f6..0000000
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/JSONOptions.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*******************************************************************************
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- ******************************************************************************/
-package org.apache.drill.common.logical;
-
-import java.io.IOException;
-
-import org.apache.drill.common.exceptions.LogicalPlanParsingException;
-import org.apache.drill.common.logical.JSONOptions.De;
-import org.apache.drill.common.logical.JSONOptions.Se;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.fasterxml.jackson.core.JsonGenerationException;
-import com.fasterxml.jackson.core.JsonGenerator;
-import com.fasterxml.jackson.core.JsonLocation;
-import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.JsonParser.Feature;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.core.TreeNode;
-import com.fasterxml.jackson.databind.DeserializationContext;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.SerializationFeature;
-import com.fasterxml.jackson.databind.SerializerProvider;
-import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
-import com.fasterxml.jackson.databind.annotation.JsonSerialize;
-import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
-import com.fasterxml.jackson.databind.ser.std.StdSerializer;
-
-@JsonSerialize(using = Se.class)
-@JsonDeserialize(using = De.class)
-public class JSONOptions {
-  private static volatile ObjectMapper MAPPER;
-  
-  final static Logger logger = LoggerFactory.getLogger(JSONOptions.class);
-  
-  private JsonNode root;
-  private JsonLocation location;
-  
-  private JSONOptions(JsonNode n, JsonLocation location){
-    this.root = n;
-    this.location = location;
-  }
-  
-  public <T> T getWith(Class<T> c){
-    try {
-      //logger.debug("Read tree {}", root);
-      return getMapper().treeToValue(root, c);
-    } catch (JsonProcessingException e) {
-      throw new LogicalPlanParsingException(String.format("Failure while trying to convert late bound json options to type of %s. Reference was originally located at line %d, column %d.", c.getCanonicalName(), location.getLineNr(), location.getColumnNr()), e);
-    }
-  }
-  
-  public JsonNode path(String name){
-    return root.path(name);
-  }
-  
-  private static synchronized ObjectMapper getMapper(){
-    if(MAPPER == null){
-      ObjectMapper mapper = new ObjectMapper();
-      mapper.enable(SerializationFeature.INDENT_OUTPUT);
-      mapper.configure(Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
-      mapper.configure(Feature.ALLOW_COMMENTS, true);
-      MAPPER = mapper;
-    }
-    return MAPPER;
-  }
-  
-  public static class De extends StdDeserializer<JSONOptions> {
-    
-    public De() {
-      super(JSONOptions.class);
-      logger.debug("Creating Deserializer.");
-    }
-
-    @Override
-    public JSONOptions deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException,
-        JsonProcessingException {
-      JsonLocation l = jp.getTokenLocation();
-      //logger.debug("Reading tree.");
-      TreeNode n = jp.readValueAsTree();
-      if(n instanceof JsonNode){
-        return new JSONOptions( (JsonNode) n, l); 
-      }else{
-        ctxt.mappingException("Failure reading json options as JSON value.");
-        return null;
-      }
-    }
-
-  }
-
-  public static class Se extends StdSerializer<JSONOptions> {
-
-    public Se() {
-      super(JSONOptions.class);
-    }
-
-    @Override
-    public void serialize(JSONOptions value, JsonGenerator jgen, SerializerProvider provider) throws IOException,
-        JsonGenerationException {
-      jgen.writeTree(value.root);
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/LogicalPlan.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/LogicalPlan.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/LogicalPlan.java
index 74f7ee9..a2f2499 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/LogicalPlan.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/LogicalPlan.java
@@ -19,17 +19,16 @@ package org.apache.drill.common.logical;
 
 import java.io.File;
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.drill.common.PlanProperties;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.exceptions.LogicalPlanParsingException;
-import org.apache.drill.common.logical.OperatorGraph.OpNode;
+import org.apache.drill.common.graph.Graph;
+import org.apache.drill.common.graph.GraphAlgos;
 import org.apache.drill.common.logical.data.LogicalOperator;
-import org.apache.drill.common.logical.graph.GraphAlgos;
+import org.apache.drill.common.logical.data.SinkOperator;
+import org.apache.drill.common.logical.data.SourceOperator;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -40,94 +39,51 @@ import com.fasterxml.jackson.annotation.JsonPropertyOrder;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Charsets;
-import com.google.common.base.Function;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
 import com.google.common.io.Files;
 
-@JsonPropertyOrder({"head", "storage", "query"})
+@JsonPropertyOrder({ "head", "storage", "query" })
 public class LogicalPlan {
   static final Logger logger = LoggerFactory.getLogger(LogicalPlan.class);
-  
-	private final PlanProperties properties;
-	private final Map<String, StorageEngineConfig> storageEngines;
-	private final OperatorGraph graph;
-	
-	private static volatile ObjectMapper MAPPER;
-	
-	@SuppressWarnings("unchecked")
+
+  private final PlanProperties properties;
+  private final Map<String, StorageEngineConfig> storageEngineMap;
+  private final Graph<LogicalOperator, SourceOperator, SinkOperator> graph;
+
   @JsonCreator
-	public LogicalPlan(@JsonProperty("head") PlanProperties head, @JsonProperty("storage") List<StorageEngineConfig> storageEngines, @JsonProperty("query") List<LogicalOperator> operators){
-	  if(storageEngines == null) storageEngines = Collections.EMPTY_LIST;
-	  this.properties = head;
-	  this.storageEngines = new HashMap<String, StorageEngineConfig>(storageEngines.size());
-    for(StorageEngineConfig store: storageEngines){
-      StorageEngineConfig old = this.storageEngines.put(store.getName(), store);
-      if(old != null) throw new LogicalPlanParsingException(String.format("Each storage engine must have a unique name.  You provided more than one data source with the same name of '%s'", store.getName()));
-    }
-    
-    this.graph = new OperatorGraph(operators);
-	}
-	
-	@JsonProperty("query")
-	public List<LogicalOperator> getSortedOperators(){
-	  List<OpNode> nodes = GraphAlgos.TopoSorter.sort(graph.getAdjList());
-	  Iterable<LogicalOperator> i = Iterables.transform(nodes, new Function<OpNode, LogicalOperator>(){
-	    public LogicalOperator apply(OpNode o){
-	      return o.getNodeValue();
-	    }
-	  });
-	  return Lists.newArrayList(i);
-	}
+  public LogicalPlan(@JsonProperty("head") PlanProperties head,
+      @JsonProperty("storage") Map<String, StorageEngineConfig> storageEngineMap,
+      @JsonProperty("query") List<LogicalOperator> operators) {
+    this.storageEngineMap = storageEngineMap;
+    this.properties = head;
+    this.graph = Graph.newGraph(operators, SourceOperator.class, SinkOperator.class);
+  }
 
-	public StorageEngineConfig getStorageEngine(String name){
-	  StorageEngineConfig ds = storageEngines.get(name);
-	  if(ds == null) throw new LogicalPlanParsingException(String.format("Unknown data source named [%s].", name));
-	  return ds;
-	}
-	
-	@JsonIgnore
-	public OperatorGraph getGraph(){
-	  return graph;
-	}
-	
-	@JsonProperty("head")
-  public PlanProperties getProperties() {
-    return properties;
+  @JsonProperty("query")
+  public List<LogicalOperator> getSortedOperators() {
+    return GraphAlgos.TopoSorter.sort(graph);
   }
 
+  public StorageEngineConfig getStorageEngine(String name) {
+    return storageEngineMap.get(name);
+  }
+
+  @JsonIgnore
+  public Graph<LogicalOperator, SourceOperator, SinkOperator> getGraph() {
+    return graph;
+  }
 
-	@JsonProperty("storage") 
-  public List<StorageEngineConfig> getStorageEngines() {
-    return new ArrayList<StorageEngineConfig>(storageEngines.values());
+  @JsonProperty("head")
+  public PlanProperties getProperties() {
+    return properties;
   }
-	
-//	public static LogicalPlan readFromString(String planString, DrillConfig config) throws JsonParseException, JsonMappingException, IOException{
-//	  ObjectMapper mapper = config.getMapper();
-//    LogicalPlan plan = mapper.readValue(planString, LogicalPlan.class);
-//    return plan;
-//	}
-//	
-//	public static LogicalPlan readFromResourcePath(String fileName, DrillConfig config) throws IOException{
-//	  URL u = LogicalPlan.class.getResource(fileName);
-//	  if(u == null) throw new FileNotFoundException(String.format("Unable to find file on path %s", fileName));
-//	  return readFromFile(u.getFile(), config);
-//	}
-//	
-//	public static LogicalPlan readFromFile(String fileName, DrillConfig config) throws IOException{
-//	  String planString = Files.toString(new File(fileName), Charsets.UTF_8);
-//	  return readFromString(planString, config);
-//	}
-//	
-	public String toJsonString(DrillConfig config) throws JsonProcessingException{
-    return config.getMapper().writeValueAsString(this);  
-	}
 
+  @JsonProperty("storage")
+  public Map<String, StorageEngineConfig> getStorageEngines() {
+    return storageEngineMap;
+  }
 
-  public static void main(String[] args) throws Exception {
-    DrillConfig config = DrillConfig.create();
-    String externalPlan = Files.toString(new File("src/test/resources/simple_plan.json"), Charsets.UTF_8);
-    LogicalPlan plan = parse(config, externalPlan);
+  public String toJsonString(DrillConfig config) throws JsonProcessingException {
+    return config.getMapper().writeValueAsString(this);
   }
 
   /** Parses a logical plan. */
@@ -135,7 +91,6 @@ public class LogicalPlan {
     ObjectMapper mapper = config.getMapper();
     try {
       LogicalPlan plan = mapper.readValue(planString, LogicalPlan.class);
-      System.out.println(mapper.writeValueAsString(plan));
       return plan;
     } catch (IOException e) {
       throw new RuntimeException(e);

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/OperatorGraph.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/OperatorGraph.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/OperatorGraph.java
deleted file mode 100644
index 8480b8a..0000000
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/OperatorGraph.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/*******************************************************************************
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- ******************************************************************************/
-package org.apache.drill.common.logical;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.drill.common.expression.visitors.OpVisitor;
-import org.apache.drill.common.logical.data.LogicalOperator;
-import org.apache.drill.common.logical.data.SinkOperator;
-import org.apache.drill.common.logical.data.SourceOperator;
-import org.apache.drill.common.logical.graph.AdjacencyList;
-import org.apache.drill.common.logical.graph.GraphAlgos;
-import org.apache.drill.common.logical.graph.Node;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class OperatorGraph {
-
-  static final Logger logger = LoggerFactory.getLogger(OperatorGraph.class);
-
-  private AdjacencyList<OpNode> adjList;
-  private final Collection<SourceOperator> sources;
-  private final Collection<SinkOperator> sinks;
-
-  public OperatorGraph(List<LogicalOperator> operators) {
-    AdjacencyListBuilder b = new AdjacencyListBuilder();
-
-    // Some of these operators are operator chains hidden through the use of sequences. This is okay because the
-    // adjacency list builder is responsible for grabbing these as well.
-    for (LogicalOperator o : operators) {
-      o.accept(b);
-    }
-
-    adjList = b.getAdjacencyList();
-
-     List<List<OpNode>> cyclicReferences = GraphAlgos.checkDirected(adjList);
-     if(cyclicReferences.size() > 0){
-     throw new
-     IllegalArgumentException("A logical plan must be a valid DAG.  You have cyclic references in your graph.  " +
-     cyclicReferences);
-     }
-    sources = convert(adjList.getStartNodes(), SourceOperator.class, "Error determing list of source operators.");
-    // logger.debug("Source list {}", sources);
-    sinks = convert(adjList.getTerminalNodes(), SinkOperator.class, "Error determing list of source operators.");
-    // logger.debug("Sink list {}", sinks);
-
-  }
-
-  public AdjacencyList<OpNode> getAdjList() {
-    return adjList;
-  }
-
-  public Collection<SourceOperator> getSources() {
-    return sources;
-  }
-
-  public Collection<SinkOperator> getSinks() {
-    return sinks;
-  }
-
-  @SuppressWarnings("unchecked")
-  private <T extends LogicalOperator> Collection<T> convert(Collection<OpNode> nodes, Class<T> classIdentifier,
-      String error) {
-    List<T> out = new ArrayList<T>(nodes.size());
-    for (OpNode o : nodes) {
-      LogicalOperator lo = o.getNodeValue();
-      if (classIdentifier.isAssignableFrom(lo.getClass())) {
-        out.add((T) lo);
-      } else {
-        throw new UnexpectedOperatorType(classIdentifier, lo, error);
-      }
-    }
-    return out;
-  }
-
-  public class AdjacencyListBuilder implements OpVisitor {
-    Map<LogicalOperator, OpNode> ops = new HashMap<LogicalOperator, OpNode>();
-
-    public boolean enter(LogicalOperator o) {
-      visit(o);
-      return true;
-    }
-
-    @Override
-    public void leave(LogicalOperator o) {
-//      for (LogicalOperator child : o) {
-//        child.accept(this);
-//      }
-    }
-
-    @Override
-    public boolean visit(LogicalOperator o) {
-      if(o == null) throw new IllegalArgumentException("Null operator.");
-      
-      if (!ops.containsKey(o)) {
-        ops.put(o, new OpNode(o));
-        return true;
-      }
-
-      return true;
-    }
-
-    public AdjacencyList<OpNode> getAdjacencyList() {
-      logger.debug("Values; {}", ops.values().toArray());
-      AdjacencyList<OpNode> a = new AdjacencyList<OpNode>();
-      for (OpNode from : ops.values()) {
-        for (LogicalOperator t : from.getNodeValue()) {
-          OpNode to = ops.get(t);
-          a.addEdge(from, to, 0);
-        }
-
-      }
-      a.fix();
-      return a;
-    }
-
-  }
-
-  public static class OpNode extends Node<LogicalOperator> {
-
-    public OpNode(LogicalOperator operator) {
-      super(operator);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/PlanProperties.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/PlanProperties.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/PlanProperties.java
deleted file mode 100644
index 11b5f6c..0000000
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/PlanProperties.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*******************************************************************************
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- ******************************************************************************/
-package org.apache.drill.common.logical;
-
-
-public class PlanProperties {
-	public String type = "apache_drill_logical_plan";
-	public int version;
-	public Generator generator = new Generator();
-	
-	public static class Generator{
-		public String type;
-		public String info;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/StorageEngineConfig.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/StorageEngineConfig.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/StorageEngineConfig.java
index f08c3d4..3a893d6 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/StorageEngineConfig.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/StorageEngineConfig.java
@@ -23,5 +23,4 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo;
 
 @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property="type")
 public interface StorageEngineConfig{
-  public String getName();
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/StorageEngineConfigBase.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/StorageEngineConfigBase.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/StorageEngineConfigBase.java
index 814cc21..853196c 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/StorageEngineConfigBase.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/StorageEngineConfigBase.java
@@ -28,15 +28,6 @@ import com.fasterxml.jackson.annotation.JsonProperty;
 public abstract class StorageEngineConfigBase implements StorageEngineConfig{
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StorageEngineConfigBase.class);
   
-  private final String name;
-  
-  public StorageEngineConfigBase(@JsonProperty("name") String name) {
-    this.name = name;
-  }
-
-  public String getName() {
-    return name;
-  }
   
   public synchronized static Class<?>[] getSubTypes(DrillConfig config){
     List<String> packages = config.getStringList(CommonConstants.STORAGE_ENGINE_CONFIG_SCAN_PACKAGES);

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/UnexpectedOperatorType.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/UnexpectedOperatorType.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/UnexpectedOperatorType.java
index e2af21d..d515503 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/UnexpectedOperatorType.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/UnexpectedOperatorType.java
@@ -17,7 +17,6 @@
  ******************************************************************************/
 package org.apache.drill.common.logical;
 
-import org.apache.drill.common.logical.data.LogicalOperator;
 
 public class UnexpectedOperatorType extends ValidationError{
 
@@ -25,8 +24,8 @@ public class UnexpectedOperatorType extends ValidationError{
     super(message);
   }
   
-  public <A extends LogicalOperator, B extends LogicalOperator> UnexpectedOperatorType(Class<A> expected, B operator, String message) {
-    super(message + " Expected operator of type " + expected.getSimpleName() + " but received operator of type " + operator.getClass().getCanonicalName());
+  public UnexpectedOperatorType(Object operator, String message) {
+    super(message + " Received node of type " + operator.getClass().getCanonicalName());
   }
 
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/LogicalOperator.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/LogicalOperator.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/LogicalOperator.java
index e2bda45..a5301bb 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/LogicalOperator.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/LogicalOperator.java
@@ -20,7 +20,7 @@ package org.apache.drill.common.logical.data;
 import java.util.Collection;
 import java.util.List;
 
-import org.apache.drill.common.expression.visitors.OpVisitor;
+import org.apache.drill.common.graph.GraphValue;
 import org.apache.drill.common.logical.ValidationError;
 
 import com.fasterxml.jackson.annotation.JsonIdentityInfo;
@@ -31,11 +31,8 @@ import com.fasterxml.jackson.annotation.ObjectIdGenerators;
 @JsonPropertyOrder({"@id", "memo", "input"}) // op will always be first since it is wrapped.
 @JsonIdentityInfo(generator=ObjectIdGenerators.IntSequenceGenerator.class, property="@id")
 @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property="op")
-public interface LogicalOperator extends Iterable<LogicalOperator>{
+public interface LogicalOperator extends GraphValue<LogicalOperator>{
 	
-	//public static final Class<?>[] SUB_TYPES = {Write.class, CollapsingAggregate.class, Segment.class, Filter.class, Flatten.class, Join.class, Order.class, Limit.class, Project.class, Scan.class, Sequence.class, Transform.class, Union.class, WindowFrame.class};
-	
-	public void accept(OpVisitor visitor);
 	public void registerAsSubscriber(LogicalOperator operator);
 	public void setupAndValidate(List<LogicalOperator> operators, Collection<ValidationError> errors);
 	

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/LogicalOperatorBase.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/LogicalOperatorBase.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/LogicalOperatorBase.java
index 89ffbb2..5802056 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/LogicalOperatorBase.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/LogicalOperatorBase.java
@@ -24,7 +24,7 @@ import java.util.List;
 
 import org.apache.drill.common.config.CommonConstants;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.expression.visitors.OpVisitor;
+import org.apache.drill.common.graph.GraphVisitor;
 import org.apache.drill.common.logical.ValidationError;
 import org.apache.drill.common.util.PathScanner;
 
@@ -54,7 +54,7 @@ public abstract class LogicalOperatorBase implements LogicalOperator{
   }
 
   @Override
-  public void accept(OpVisitor visitor) {
+  public void accept(GraphVisitor<LogicalOperator> visitor) {
     if(visitor.enter(this)){
       for(LogicalOperator o : children){
         o.accept(visitor);

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Order.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Order.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Order.java
index 1c8108e..d06f193 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Order.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Order.java
@@ -17,27 +17,26 @@
  ******************************************************************************/
 package org.apache.drill.common.logical.data;
 
+import org.apache.drill.common.defs.OrderDef;
 import org.apache.drill.common.expression.FieldReference;
-import org.apache.drill.common.expression.LogicalExpression;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
 
 @JsonTypeName("order")
 public class Order extends SingleInputOperator {
 
-  private final Ordering[] orderings;
+  private final OrderDef[] orderings;
   private final FieldReference within;
 
   @JsonCreator
-  public Order(@JsonProperty("within") FieldReference within, @JsonProperty("orderings") Ordering... orderings) {
+  public Order(@JsonProperty("within") FieldReference within, @JsonProperty("orderings") OrderDef... orderings) {
     this.orderings = orderings;
     this.within = within;
   }
   
-  public Ordering[] getOrderings() {
+  public OrderDef[] getOrderings() {
     return orderings;
   }
   
@@ -45,46 +44,6 @@ public class Order extends SingleInputOperator {
     return within;
   }
 
-  public static class Ordering {
-
-    private final Direction direction;
-    private final LogicalExpression expr;
-
-    @JsonCreator
-    public Ordering(@JsonProperty("order") String strOrder, @JsonProperty("expr") LogicalExpression expr) {
-      this.expr = expr;
-      this.direction = Direction.DESC.description.equals(strOrder) ? Direction.DESC : Direction.ASC; // default
-                                                                                                     // to
-                                                                                                     // ascending
-                                                                                                     // unless
-                                                                                                     // desc
-                                                                                                     // is
-                                                                                                     // provided.
-    }
-
-    @JsonIgnore
-    public Direction getDirection() {
-      return direction;
-    }
-
-    public LogicalExpression getExpr() {
-      return expr;
-    }
-
-    public String getOrder() {
-      return direction.description;
-    }
-
-  }
-
-  public static enum Direction {
-    ASC("asc"), DESC("desc");
-    public final String description;
-
-    Direction(String d) {
-      description = d;
-    }
-  }
   
   
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Scan.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Scan.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Scan.java
index f22a5bc..c8d396b 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Scan.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Scan.java
@@ -17,8 +17,8 @@
  ******************************************************************************/
 package org.apache.drill.common.logical.data;
 
+import org.apache.drill.common.JSONOptions;
 import org.apache.drill.common.expression.FieldReference;
-import org.apache.drill.common.logical.JSONOptions;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/2a6e1b33/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Store.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Store.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Store.java
index 4168468..0569b8a 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Store.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/logical/data/Store.java
@@ -17,9 +17,8 @@
  ******************************************************************************/
 package org.apache.drill.common.logical.data;
 
-import org.apache.drill.common.exceptions.ExpressionParsingException;
-import org.apache.drill.common.expression.LogicalExpression;
-import org.apache.drill.common.logical.JSONOptions;
+import org.apache.drill.common.JSONOptions;
+import org.apache.drill.common.defs.PartitionDef;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
@@ -31,10 +30,10 @@ public class Store extends SinkOperator{
   
   private final String storageEngine;
   private final JSONOptions target;
-  private final PartitionOptions partition;
+  private final PartitionDef partition;
 
   @JsonCreator
-  public Store(@JsonProperty("storageengine") String storageEngine, @JsonProperty("target") JSONOptions target, @JsonProperty("partition") PartitionOptions partition) {
+  public Store(@JsonProperty("storageengine") String storageEngine, @JsonProperty("target") JSONOptions target, @JsonProperty("partition") PartitionDef partition) {
     super();
     this.storageEngine = storageEngine;
     this.target = target;
@@ -49,48 +48,10 @@ public class Store extends SinkOperator{
     return target;
   }
 
-  public PartitionOptions getPartition() {
+  public PartitionDef getPartition() {
     return partition;
   }
 
-  public static enum PartitionType{ 
-    RANDOM, HASH, ORDERED;
-    
-    public static PartitionType resolve(String val){
-      for(PartitionType pt : PartitionType.values()){
-        if(pt.name().equalsIgnoreCase(val)) return pt;
-      }
-      throw new ExpressionParsingException(String.format("Unable to determine partitioning type type for value '%s'.", val));
-
-    }
-    
-  };
   
-  public static class PartitionOptions{
-    private final PartitionType partitionType;
-    private final LogicalExpression[] expressions;
-    private final LogicalExpression[] starts;
-    
-    @JsonCreator
-    public PartitionOptions(@JsonProperty("partitionType") String partitionType, @JsonProperty("exprs") LogicalExpression[] expressions, @JsonProperty("starts") LogicalExpression[] starts) {
-      this.partitionType = PartitionType.resolve(partitionType);
-      this.expressions = expressions;
-      this.starts = starts;
-    }
-
-    public PartitionType getPartitionType() {
-      return partitionType;
-    }
-
-    public LogicalExpression[] getExpressions() {
-      return expressions;
-    }
-
-    public LogicalExpression[] getStarts() {
-      return starts;
-    }
-    
-    
-  }
   
 }


[8/9] basic framework for physical plan. abstraction of graph classes.

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/rse/ReferenceStorageEngine.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/rse/ReferenceStorageEngine.java b/sandbox/prototype/exec/java-exec/rse/ReferenceStorageEngine.java
new file mode 100644
index 0000000..41cba45
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/rse/ReferenceStorageEngine.java
@@ -0,0 +1,45 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ref.rse;
+
+import java.io.IOException;
+import java.util.Collection;
+
+import org.apache.drill.common.logical.data.Scan;
+import org.apache.drill.common.logical.data.Store;
+import org.apache.drill.exec.ref.rops.ROP;
+
+
+public interface ReferenceStorageEngine {
+  public boolean supportsRead();
+  public boolean supportsWrite();
+
+  public enum PartitionCapabilities {
+    NONE, HASH, SORTED;
+  }
+
+  public enum MemoryFormat {
+    RECORD, FIELD;
+  }
+
+  public Collection<ReadEntry> getReadEntries(Scan scan) throws IOException;
+  public RecordReader getReader(ReadEntry readEntry, ROP parentROP) throws IOException;
+  public RecordRecorder getWriter(Store store) throws IOException;
+
+  public interface ReadEntry{}
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/BufferAllocator.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/BufferAllocator.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/BufferAllocator.java
new file mode 100644
index 0000000..a398607
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/BufferAllocator.java
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+
+import java.io.Closeable;
+
+import org.apache.drill.exec.server.DrillbitContext;
+
+/**
+ * Wrapper class to deal with byte buffer allocation. Ensures users only use designated methods.  Also allows inser 
+ */
+public abstract class BufferAllocator implements Closeable{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BufferAllocator.class);
+  
+  /**
+   * Allocate a new or reused buffer of the provided size.  Note that the buffer may technically be larger than the requested size for rounding purposes.  However, the buffers capacity will be set to the configured size.
+   * @param size The size in bytes.
+   * @return A new ByteBuf.
+   */
+  public abstract ByteBuf buffer(int size);
+  
+  public abstract ByteBufAllocator getUnderlyingAllocator();
+  
+  /**
+   * Close and release all buffers generated from this buffer pool.
+   */
+  @Override
+  public abstract void close(); 
+  
+  public static BufferAllocator getAllocator(DrillbitContext context){
+    // TODO: support alternative allocators (including a debugging allocator that records all allocation locations for each buffer).
+    return new DirectBufferAllocator();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/DirectBufferAllocator.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/DirectBufferAllocator.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/DirectBufferAllocator.java
new file mode 100644
index 0000000..8c81dd6
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/DirectBufferAllocator.java
@@ -0,0 +1,47 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.buffer.PooledByteBufAllocator;
+
+public class DirectBufferAllocator extends BufferAllocator{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DirectBufferAllocator.class);
+
+  private final PooledByteBufAllocator buffer = new PooledByteBufAllocator(true);
+  
+  @Override
+  public ByteBuf buffer(int size) {
+    return buffer.directBuffer(size);
+  }
+
+  
+  @Override
+  public ByteBufAllocator getUnderlyingAllocator() {
+    return buffer;
+  }
+
+
+  @Override
+  public void close() {
+    // TODO: collect all buffers and release them away using a weak hashmap so we don't impact pool work
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
new file mode 100644
index 0000000..d81870b
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -0,0 +1,31 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec;
+
+public interface ExecConstants {
+  public static final String ZK_RETRY_TIMES = "drill.exec.zk.retry.count";
+  public static final String ZK_RETRY_DELAY = "drill.exec.zk.retry.delay";
+  public static final String ZK_CONNECTION = "drill.exec.zk.connect";
+  public static final String ZK_TIMEOUT = "drill.exec.zk.timeout";
+  public static final String ZK_ROOT = "drill.exec.zk.root";
+  public static final String ZK_REFRESH = "drill.exec.zk.refresh";
+  public static final String STORAGE_ENGINE_SCAN_PACKAGES = "drill.exec.storage.packages";
+  public static final String SERVICE_NAME = "drill.exec.cluster-id";
+  public static final String INITIAL_BIT_PORT = "drill.exec.rpc.bit.port";
+  public static final String INITIAL_USER_PORT = "drill.exec.rpc.user.port";
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java
new file mode 100644
index 0000000..1684960
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java
@@ -0,0 +1,38 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.cache;
+
+import java.io.Closeable;
+import java.util.List;
+
+import org.apache.drill.exec.exception.DrillbitStartupException;
+import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
+import org.apache.drill.exec.proto.CoordinationProtos.WorkQueueStatus;
+
+
+public interface DistributedCache extends Closeable{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DistributedCache.class);
+  
+  public void run(DrillbitEndpoint endpoint) throws DrillbitStartupException;
+  
+  public void saveOptimizedPlan(TemplatizedLogicalPlan logical, TemplatizedPhysicalPlan physical);
+  public TemplatizedPhysicalPlan getOptimizedPlan(TemplatizedLogicalPlan logical);
+  
+  public void updateLocalQueueLength(int length);
+  public List<WorkQueueStatus> getQueueLengths(); 
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/HazelCache.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/HazelCache.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/HazelCache.java
new file mode 100644
index 0000000..cc73799
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/HazelCache.java
@@ -0,0 +1,133 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.cache;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
+import org.apache.drill.exec.proto.CoordinationProtos.WorkQueueStatus;
+
+import com.beust.jcommander.internal.Lists;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+import com.hazelcast.config.Config;
+import com.hazelcast.core.Hazelcast;
+import com.hazelcast.core.HazelcastInstance;
+import com.hazelcast.core.IMap;
+import com.hazelcast.core.ITopic;
+import com.hazelcast.core.Message;
+import com.hazelcast.core.MessageListener;
+import com.hazelcast.nio.DataSerializable;
+
+public class HazelCache implements DistributedCache {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(HazelCache.class);
+
+  private final String instanceName;
+  private HazelcastInstance instance;
+  private ITopic<WrappedWorkQueueStatus> workQueueLengths;
+  private DrillbitEndpoint endpoint;
+  private Cache<WorkQueueStatus, Integer>  endpoints;
+  private IMap<TemplatizedLogicalPlan, TemplatizedPhysicalPlan> optimizedPlans;
+  
+  public HazelCache(DrillConfig config) {
+    this.instanceName = config.getString(ExecConstants.SERVICE_NAME);
+  }
+
+  private class Listener implements MessageListener<WrappedWorkQueueStatus>{
+
+    @Override
+    public void onMessage(Message<WrappedWorkQueueStatus> wrapped) {
+      logger.debug("Received new queue length message.");
+      endpoints.put(wrapped.getMessageObject().status, 0);
+    }
+    
+  }
+  
+  public void run(DrillbitEndpoint endpoint) {
+    Config c = new Config();
+    // todo, utilize cluster member ship to set up other nodes.
+    c.setInstanceName(instanceName);
+    instance = Hazelcast.newHazelcastInstance(c);
+    workQueueLengths = instance.getTopic("queue-length");
+    optimizedPlans = instance.getMap("plan-optimizations");
+    this.endpoint = endpoint;
+    endpoints = CacheBuilder.newBuilder().maximumSize(2000).build();
+    workQueueLengths.addMessageListener(new Listener());
+  }
+
+  @Override
+  public void saveOptimizedPlan(TemplatizedLogicalPlan logical, TemplatizedPhysicalPlan physical) {
+    optimizedPlans.put(logical, physical);
+  }
+
+  @Override
+  public TemplatizedPhysicalPlan getOptimizedPlan(TemplatizedLogicalPlan logical) {
+    return optimizedPlans.get(logical);
+  }
+
+  @Override
+  public void updateLocalQueueLength(int length) {
+    workQueueLengths.publish(new WrappedWorkQueueStatus(WorkQueueStatus.newBuilder().setEndpoint(endpoint)
+        .setQueueLength(length).setReportTime(System.currentTimeMillis()).build()));
+  }
+
+  @Override
+  public List<WorkQueueStatus> getQueueLengths() {
+    return Lists.newArrayList(endpoints.asMap().keySet());
+  }
+
+  public class WrappedWorkQueueStatus implements DataSerializable {
+
+    public WorkQueueStatus status;
+
+    public WrappedWorkQueueStatus(WorkQueueStatus status) {
+      this.status = status;
+    }
+
+    @Override
+    public void readData(DataInput arg0) throws IOException {
+      int len = arg0.readShort();
+      byte[] b = new byte[len];
+      arg0.readFully(b);
+      this.status = WorkQueueStatus.parseFrom(b);
+    }
+
+    @Override
+    public void writeData(DataOutput arg0) throws IOException {
+      byte[] b = status.toByteArray();
+      if (b.length > Short.MAX_VALUE) throw new IOException("Unexpectedly long value.");
+      arg0.writeShort(b.length);
+      arg0.write(b);
+    }
+
+  }
+
+  @Override
+  public void close() throws IOException {
+    this.instance.getLifecycleService().shutdown();
+  }
+  
+
+  
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/TemplatizedLogicalPlan.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/TemplatizedLogicalPlan.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/TemplatizedLogicalPlan.java
new file mode 100644
index 0000000..5ad9ef1
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/TemplatizedLogicalPlan.java
@@ -0,0 +1,22 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.cache;
+
+public class TemplatizedLogicalPlan {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TemplatizedLogicalPlan.class);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/TemplatizedPhysicalPlan.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/TemplatizedPhysicalPlan.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/TemplatizedPhysicalPlan.java
new file mode 100644
index 0000000..643720c
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/cache/TemplatizedPhysicalPlan.java
@@ -0,0 +1,22 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.cache;
+
+public class TemplatizedPhysicalPlan {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TemplatizedPhysicalPlan.class);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/ClusterCoordinator.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/ClusterCoordinator.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/ClusterCoordinator.java
new file mode 100644
index 0000000..9c7eab2
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/ClusterCoordinator.java
@@ -0,0 +1,47 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.coord;
+
+import java.io.Closeable;
+import java.util.List;
+
+import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
+
+/**
+ * Pluggable interface built to manage cluster coordination. Allows Drillbit or DrillClient to register its capabilities
+ * as well as understand other node's existence and capabilities.
+ **/
+public abstract class ClusterCoordinator implements Closeable {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ClusterCoordinator.class);
+
+  public abstract void start() throws Exception;
+
+  public abstract RegistrationHandle register(DrillbitEndpoint data);
+
+  public abstract void unregister(RegistrationHandle handle);
+
+  /**
+   * Get a list of avialable Drillbit endpoints.  Thread-safe.  Could be slightly out of date depending on refresh policy.
+   * @return A list of available endpoints.
+   */
+  public abstract List<DrillbitEndpoint> getAvailableEndpoints();
+
+  public interface RegistrationHandle {
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/DrillServiceInstanceHelper.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/DrillServiceInstanceHelper.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/DrillServiceInstanceHelper.java
new file mode 100644
index 0000000..289aa3c
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/DrillServiceInstanceHelper.java
@@ -0,0 +1,57 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.coord;
+
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.proto.CoordinationProtos.DrillServiceInstance;
+import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
+
+import com.netflix.curator.x.discovery.ServiceInstance;
+import com.netflix.curator.x.discovery.ServiceInstanceBuilder;
+import com.netflix.curator.x.discovery.details.InstanceSerializer;
+
+public class DrillServiceInstanceHelper {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillServiceInstanceHelper.class);
+
+  
+  public static final InstanceSerializer<DrillbitEndpoint> SERIALIZER = new DrillServiceInstanceSerializer();
+  
+  private static class DrillServiceInstanceSerializer implements InstanceSerializer<DrillbitEndpoint>{
+
+    @Override
+    public byte[] serialize(ServiceInstance<DrillbitEndpoint> i) throws Exception {
+      DrillServiceInstance.Builder b = DrillServiceInstance.newBuilder();
+      b.setId(i.getId());
+      b.setRegistrationTimeUTC(i.getRegistrationTimeUTC());
+      b.setEndpoint(i.getPayload());
+      return b.build().toByteArray();
+    }
+
+    @Override
+    public ServiceInstance<DrillbitEndpoint> deserialize(byte[] bytes) throws Exception {
+      DrillServiceInstance i = DrillServiceInstance.parseFrom(bytes);
+      ServiceInstanceBuilder<DrillbitEndpoint> b = ServiceInstance.<DrillbitEndpoint>builder();
+      b.id(i.getId());
+      b.name(ExecConstants.SERVICE_NAME);
+      b.registrationTimeUTC(i.getRegistrationTimeUTC());
+      b.payload(i.getEndpoint());
+      return b.build();
+    }
+    
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/ZKClusterCoordinator.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/ZKClusterCoordinator.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/ZKClusterCoordinator.java
new file mode 100644
index 0000000..b3cd27f
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/ZKClusterCoordinator.java
@@ -0,0 +1,145 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.coord;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
+
+import com.google.common.base.Throwables;
+import com.netflix.curator.RetryPolicy;
+import com.netflix.curator.framework.CuratorFramework;
+import com.netflix.curator.framework.CuratorFrameworkFactory;
+import com.netflix.curator.framework.state.ConnectionState;
+import com.netflix.curator.retry.RetryNTimes;
+import com.netflix.curator.x.discovery.ServiceDiscovery;
+import com.netflix.curator.x.discovery.ServiceDiscoveryBuilder;
+import com.netflix.curator.x.discovery.ServiceInstance;
+import com.netflix.curator.x.discovery.details.ServiceCache;
+import com.netflix.curator.x.discovery.details.ServiceCacheListener;
+
+/** Manages cluster coordination utilizing zookeeper. **/
+public class ZKClusterCoordinator extends ClusterCoordinator {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ZKClusterCoordinator.class);
+
+  private String basePath;
+  private CuratorFramework curator;
+  private ServiceDiscovery<DrillbitEndpoint> discovery;
+  private ServiceCache<DrillbitEndpoint> serviceCache;
+  private volatile List<DrillbitEndpoint> endpoints = Collections.emptyList();
+  private final String serviceName;
+  public ZKClusterCoordinator(DrillConfig config) throws IOException {
+    
+    this.basePath = config.getString(ExecConstants.ZK_ROOT);
+    this.serviceName =  config.getString(ExecConstants.SERVICE_NAME);
+    
+    RetryPolicy rp = new RetryNTimes(config.getInt(ExecConstants.ZK_RETRY_TIMES),
+        config.getInt(ExecConstants.ZK_RETRY_DELAY));
+    
+    curator = CuratorFrameworkFactory.builder()
+        .connectionTimeoutMs(config.getInt(ExecConstants.ZK_TIMEOUT))
+        .retryPolicy(rp)
+        .connectString(config.getString(ExecConstants.ZK_CONNECTION))
+        .build(); 
+    
+    discovery = getDiscovery();
+    serviceCache = discovery.serviceCacheBuilder().name(serviceName).refreshPaddingMs(config.getInt(ExecConstants.ZK_REFRESH)).build();
+  }
+
+  public void start() throws Exception {
+    logger.debug("Starting ZKClusterCoordination.");
+    curator.start();
+    discovery.start();
+    serviceCache.start();
+    serviceCache.addListener(new ZKListener());
+  }
+  
+  private class ZKListener implements ServiceCacheListener{
+    
+    @Override
+    public void stateChanged(CuratorFramework client, ConnectionState newState) {
+    }
+
+    @Override
+    public void cacheChanged() {
+      logger.debug("Cache changed, updating.");
+      try {
+        Collection<ServiceInstance<DrillbitEndpoint>> instances = discovery.queryForInstances(serviceName);
+        List<DrillbitEndpoint> newEndpoints = new ArrayList<DrillbitEndpoint>(instances.size());
+        for(ServiceInstance<DrillbitEndpoint> si : instances){
+          newEndpoints.add(si.getPayload());
+        }
+        endpoints = newEndpoints;
+      } catch (Exception e) {
+        logger.error("Failure while update Drillbit service location cache.", e);
+      }
+    }
+  }
+
+  public void close() throws IOException{
+    serviceCache.close();
+    discovery.close();
+    curator.close();
+  }
+  
+  @Override
+  public RegistrationHandle register(DrillbitEndpoint data) {
+    try {
+      ServiceInstance<DrillbitEndpoint> si = getSI(data);
+      discovery.registerService(si);
+      return new ZKRegistrationHandle(si.getId());
+    } catch (Exception e) {
+      Throwables.propagate(e);
+      return null;
+    }
+  }
+
+  @Override
+  public void unregister(RegistrationHandle handle) {
+    if( !( handle instanceof ZKRegistrationHandle)) throw new UnsupportedOperationException("Unknown handle type");
+    
+    ZKRegistrationHandle h = (ZKRegistrationHandle) handle;
+    try {
+      ServiceInstance<DrillbitEndpoint> si = ServiceInstance.<DrillbitEndpoint>builder().address("").port(0).id(h.id).name(ExecConstants.SERVICE_NAME).build();
+      discovery.unregisterService(si);
+    } catch (Exception e) {
+      Throwables.propagate(e);
+    }
+  }
+
+  @Override
+  public List<DrillbitEndpoint> getAvailableEndpoints() {
+    return this.endpoints;
+  }
+  
+  private ServiceInstance<DrillbitEndpoint> getSI(DrillbitEndpoint ep) throws Exception{
+    return ServiceInstance.<DrillbitEndpoint>builder().name(ExecConstants.SERVICE_NAME).payload(ep).build();
+  }
+  
+  
+
+  public ServiceDiscovery<DrillbitEndpoint> getDiscovery() {
+    return ServiceDiscoveryBuilder.builder(DrillbitEndpoint.class).basePath(basePath).client(curator).serializer(DrillServiceInstanceHelper.SERIALIZER).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/ZKRegistrationHandle.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/ZKRegistrationHandle.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/ZKRegistrationHandle.java
new file mode 100644
index 0000000..b8a7648
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/coord/ZKRegistrationHandle.java
@@ -0,0 +1,32 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.coord;
+
+import org.apache.drill.exec.coord.ClusterCoordinator.RegistrationHandle;
+
+public class ZKRegistrationHandle implements RegistrationHandle {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ZKRegistrationHandle.class);
+  
+  public final String id;
+
+  public ZKRegistrationHandle(String id) {
+    super();
+    this.id = id;
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/disk/Spool.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/disk/Spool.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/disk/Spool.java
new file mode 100644
index 0000000..346b531
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/disk/Spool.java
@@ -0,0 +1,29 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.disk;
+
+import java.io.IOException;
+
+import org.apache.drill.exec.record.RecordBatch;
+
+public interface Spool {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Spool.class);
+  
+  public void write(RecordBatch batch) throws IOException;
+  public void read(RecordBatch batch) throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/DrillbitStartupException.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/DrillbitStartupException.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/DrillbitStartupException.java
new file mode 100644
index 0000000..3c36171
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/DrillbitStartupException.java
@@ -0,0 +1,46 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.exception;
+
+import org.apache.drill.common.exceptions.DrillException;
+
+public class DrillbitStartupException extends DrillException{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillbitStartupException.class);
+
+  public DrillbitStartupException() {
+    super();
+  }
+
+  public DrillbitStartupException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+    super(message, cause, enableSuppression, writableStackTrace);
+  }
+
+  public DrillbitStartupException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public DrillbitStartupException(String message) {
+    super(message);
+  }
+
+  public DrillbitStartupException(Throwable cause) {
+    super(cause);
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/ExecutionSetupException.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/ExecutionSetupException.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/ExecutionSetupException.java
new file mode 100644
index 0000000..a4899bd
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/ExecutionSetupException.java
@@ -0,0 +1,45 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.exception;
+
+import org.apache.drill.common.exceptions.DrillException;
+
+public class ExecutionSetupException extends DrillException{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExecutionSetupException.class);
+  
+  public ExecutionSetupException() {
+    super();
+  }
+
+  public ExecutionSetupException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+    super(message, cause, enableSuppression, writableStackTrace);
+  }
+
+  public ExecutionSetupException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public ExecutionSetupException(String message) {
+    super(message);
+  }
+
+  public ExecutionSetupException(Throwable cause) {
+    super(cause);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/SchemaChangeException.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/SchemaChangeException.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/SchemaChangeException.java
new file mode 100644
index 0000000..24883aa
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/SchemaChangeException.java
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.exception;
+
+import org.apache.drill.common.exceptions.DrillException;
+
+public class SchemaChangeException extends DrillException{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SchemaChangeException.class);
+
+  public SchemaChangeException() {
+    super();
+  }
+
+  public SchemaChangeException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+    super(message, cause, enableSuppression, writableStackTrace);
+  }
+
+  public SchemaChangeException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public SchemaChangeException(String message) {
+    super(message);
+  }
+
+  public SchemaChangeException(Throwable cause) {
+    super(cause);
+  }
+  
+  public SchemaChangeException(String message, Object...objects){
+    super(String.format(message, objects));
+  }
+
+  public SchemaChangeException(String message, Throwable cause, Object...objects){
+    super(String.format(message, objects), cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/SetupException.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/SetupException.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/SetupException.java
new file mode 100644
index 0000000..f249f13
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/exception/SetupException.java
@@ -0,0 +1,46 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.exception;
+
+import org.apache.drill.common.exceptions.DrillException;
+
+public class SetupException extends DrillException{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SetupException.class);
+
+  public SetupException() {
+    super();
+  }
+
+  public SetupException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+    super(message, cause, enableSuppression, writableStackTrace);
+  }
+
+  public SetupException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public SetupException(String message) {
+    super(message);
+  }
+
+  public SetupException(Throwable cause) {
+    super(cause);
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BatchIterator.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BatchIterator.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BatchIterator.java
new file mode 100644
index 0000000..2ebbef5
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BatchIterator.java
@@ -0,0 +1,32 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ops;
+
+import org.apache.drill.exec.record.RecordBatch;
+
+import parquet.schema.MessageType;
+
+public interface BatchIterator {
+  static enum IterOutcome{NONE, FULL_NEW_SCHEMA, FULL, PARTIAL_NEW_SCHEMA, PARTIAL, STOP}
+  public RecordBatch getBatch();
+  public FragmentContext getContext();
+  public MessageType getSchema();
+  public void kill(QueryOutcome outcome);
+  public IterOutcome next();
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
new file mode 100644
index 0000000..be1081f
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
@@ -0,0 +1,49 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ops;
+
+import org.apache.drill.common.logical.StorageEngineConfig;
+import org.apache.drill.exec.rpc.bit.BitCom;
+import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.store.StorageEngine;
+
+public class FragmentContext {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FragmentContext.class);
+
+  private final DrillbitContext context;
+  
+  public FragmentContext(DrillbitContext context) {
+    this.context = context;
+  }
+
+  public void fail(Throwable cause) {
+
+  }
+
+  public DrillbitContext getDrillbitContext(){
+    return context;
+  }
+  
+  public StorageEngine getStorageEngine(StorageEngineConfig config){
+    return null;
+  }
+  
+  public BitCom getCommunicator(){
+    return null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OutputMutator.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OutputMutator.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OutputMutator.java
new file mode 100644
index 0000000..59abdc4
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OutputMutator.java
@@ -0,0 +1,28 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ops;
+
+import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.vector.ValueVector;
+
+public interface OutputMutator {
+  public void removeField(int fieldId) throws SchemaChangeException;
+  public void addField(int fieldId, ValueVector<?> vector) throws SchemaChangeException ;
+  public void setNewSchema(BatchSchema schema) throws SchemaChangeException ;
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryOutcome.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryOutcome.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryOutcome.java
new file mode 100644
index 0000000..b737f7c
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryOutcome.java
@@ -0,0 +1,22 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ops;
+
+public class QueryOutcome {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(QueryOutcome.class);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ScanBatch.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ScanBatch.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ScanBatch.java
new file mode 100644
index 0000000..88b8af2
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ScanBatch.java
@@ -0,0 +1,157 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ops;
+
+import java.util.Iterator;
+
+import org.apache.drill.exec.exception.ExecutionSetupException;
+import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.InvalidValueAccessor;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.record.vector.ValueVector;
+import org.apache.drill.exec.store.RecordReader;
+
+import com.carrotsearch.hppc.IntObjectOpenHashMap;
+import com.carrotsearch.hppc.procedures.IntObjectProcedure;
+
+/**
+ * Record batch used for a particular scan. Operators against one or more
+ */
+public class ScanBatch implements RecordBatch {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ScanBatch.class);
+
+  private IntObjectOpenHashMap<ValueVector<?>> fields = new IntObjectOpenHashMap<ValueVector<?>>();
+  private BatchSchema schema;
+  private int recordCount;
+  private boolean schemaChanged = true;
+  private final FragmentContext context;
+  private Iterator<RecordReader> readers;
+  private RecordReader currentReader;
+  private final BatchSchema expectedSchema;
+  private final Mutator mutator = new Mutator();
+
+  public ScanBatch(BatchSchema expectedSchema, Iterator<RecordReader> readers, FragmentContext context)
+      throws ExecutionSetupException {
+    this.expectedSchema = expectedSchema;
+    this.context = context;
+    this.readers = readers;
+    if (!readers.hasNext()) throw new ExecutionSetupException("A scan batch must contain at least one reader.");
+    this.currentReader = readers.next();
+    this.currentReader.setup(expectedSchema, mutator);
+  }
+
+  private void schemaChanged() {
+    schema = null;
+    schemaChanged = true;
+  }
+
+  @Override
+  public FragmentContext getContext() {
+    return context;
+  }
+
+  @Override
+  public BatchSchema getSchema() {
+    return schema;
+  }
+
+  @Override
+  public int getRecordCount() {
+    return recordCount;
+  }
+
+  @Override
+  public void kill() {
+    releaseAssets();
+  }
+
+  private void releaseAssets() {
+    fields.forEach(new IntObjectProcedure<ValueVector<?>>() {
+      @Override
+      public void apply(int key, ValueVector<?> value) {
+        value.close();
+      }
+    });
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  public <T extends ValueVector<T>> T getValueVector(int fieldId, Class<T> clazz) throws InvalidValueAccessor {
+    if (fields.containsKey(fieldId))
+      throw new InvalidValueAccessor(String.format("Unknown value accesor for field id %d."));
+    ValueVector<?> vector = this.fields.lget();
+    if (vector.getClass().isAssignableFrom(clazz)) {
+      return (T) vector;
+    } else {
+      throw new InvalidValueAccessor(String.format(
+          "You requested a field accessor of type %s for field id %d but the actual type was %s.",
+          clazz.getCanonicalName(), fieldId, vector.getClass().getCanonicalName()));
+    }
+  }
+
+  @Override
+  public IterOutcome next() {
+    while ((recordCount = currentReader.next()) == 0) {
+      try {
+        if (!readers.hasNext()) {
+          currentReader.cleanup();
+          releaseAssets();
+          return IterOutcome.NONE;
+        }
+        currentReader.cleanup();
+        currentReader = readers.next();
+        currentReader.setup(expectedSchema, mutator);
+      } catch (ExecutionSetupException e) {
+        this.context.fail(e);
+        releaseAssets();
+        return IterOutcome.STOP;
+      }
+    }
+
+    if (schemaChanged) {
+      schemaChanged = false;
+      return IterOutcome.OK_NEW_SCHEMA;
+    } else {
+      return IterOutcome.OK;
+    }
+  }
+
+  private class Mutator implements OutputMutator {
+
+    public void removeField(int fieldId) throws SchemaChangeException {
+      schemaChanged();
+      ValueVector<?> v = fields.remove(fieldId);
+      if (v == null) throw new SchemaChangeException("Failure attempting to remove an unknown field.");
+      v.close();
+    }
+
+    public void addField(int fieldId, ValueVector<?> vector) {
+      schemaChanged();
+      ValueVector<?> v = fields.put(fieldId, vector);
+      if (v != null) v.close();
+    }
+
+    @Override
+    public void setNewSchema(BatchSchema schema) {
+      ScanBatch.this.schema = schema;
+      ScanBatch.this.schemaChanged = true;
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/exchange/PartitioningSender.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/exchange/PartitioningSender.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/exchange/PartitioningSender.java
new file mode 100644
index 0000000..6640ef2
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/exchange/PartitioningSender.java
@@ -0,0 +1,23 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ops.exchange;
+
+
+public class PartitioningSender {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PartitioningSender.class);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/exchange/RandomReceiver.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/exchange/RandomReceiver.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/exchange/RandomReceiver.java
new file mode 100644
index 0000000..c9f8147
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/exchange/RandomReceiver.java
@@ -0,0 +1,24 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ops.exchange;
+
+public class RandomReceiver {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RandomReceiver.class);
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/exchange/RecordBatchSender.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/exchange/RecordBatchSender.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/exchange/RecordBatchSender.java
new file mode 100644
index 0000000..0e35932
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/ops/exchange/RecordBatchSender.java
@@ -0,0 +1,24 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ops.exchange;
+
+public class RecordBatchSender {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RecordBatchSender.class);
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/opt/IdentityOptimizer.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/opt/IdentityOptimizer.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/opt/IdentityOptimizer.java
new file mode 100644
index 0000000..70a42be
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/opt/IdentityOptimizer.java
@@ -0,0 +1,40 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.opt;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.logical.LogicalPlan;
+import org.apache.drill.common.optimize.Optimizer;
+import org.apache.drill.common.physical.PhysicalPlan;
+
+public class IdentityOptimizer extends Optimizer {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(IdentityOptimizer.class);
+
+  @Override
+  public void init(DrillConfig config) {
+  }
+
+  @Override
+  public PhysicalPlan optimize(OptimizationContext context, LogicalPlan plan) {
+    return null;
+  }
+
+  @Override
+  public void close() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/planner/ExecPlanner.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/planner/ExecPlanner.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/planner/ExecPlanner.java
new file mode 100644
index 0000000..9554bf3
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/planner/ExecPlanner.java
@@ -0,0 +1,27 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.planner;
+
+
+/**
+ * Decides level of paralellization.
+ * Generates smaller physical plans
+ */
+public class ExecPlanner {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExecPlanner.class);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
new file mode 100644
index 0000000..1d32340
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
@@ -0,0 +1,123 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.record;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.drill.common.expression.types.DataType;
+import org.apache.drill.common.physical.RecordField.ValueMode;
+import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.record.vector.ValueVector;
+
+import com.carrotsearch.hppc.IntObjectOpenHashMap;
+import com.google.common.collect.Lists;
+
+public class BatchSchema implements Iterable<MaterializedField>{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BatchSchema.class);
+  
+  private List<MaterializedField> fields = Lists.newArrayList();
+  
+  private BatchSchema(List<MaterializedField> fields) {
+    this.fields = fields;
+  }
+
+  @Override
+  public Iterator<MaterializedField> iterator() {
+    return fields.iterator();
+  }
+
+  public void addAnyField(short fieldId, boolean nullable, ValueMode mode){
+    addTypedField(fieldId, DataType.LATEBIND, nullable, mode, Void.class);
+  }
+  
+  public void addTypedField(short fieldId, DataType type, boolean nullable, ValueMode mode, Class<?> valueClass){
+    fields.add(new MaterializedField(fieldId, type, nullable, mode, valueClass));
+  }
+  
+  
+  /**
+   * Builder to build BatchSchema.  Can have a supporting expected object.  If the expected Schema object is defined, the builder will always check that this schema is a equal or more materialized version of the current schema.
+   */
+  public class BatchSchemaBuilder{
+    private IntObjectOpenHashMap<MaterializedField> fields = new IntObjectOpenHashMap<MaterializedField>();
+    private IntObjectOpenHashMap<MaterializedField> expectedFields = new IntObjectOpenHashMap<MaterializedField>();
+    
+    public BatchSchemaBuilder(BatchSchema expected){
+      for(MaterializedField f: expected){
+        expectedFields.put(f.getFieldId(), f);
+      }
+    }
+    
+    public BatchSchemaBuilder(){
+    }
+    
+    
+    /**
+     * Add a field where we don't have type information.  In this case, DataType will be set to LATEBIND and valueClass will be set to null.
+     * @param fieldId The desired fieldId.  Should be unique for this BatchSchema.
+     * @param nullable Whether this field supports nullability.
+     * @param mode
+     * @throws SchemaChangeException
+     */
+    public void addLateBindField(short fieldId, boolean nullable, ValueMode mode) throws SchemaChangeException{
+      addTypedField(fieldId, DataType.LATEBIND, nullable, mode, Void.class);
+    }
+    
+    private void setTypedField(short fieldId, DataType type, boolean nullable, ValueMode mode, Class<?> valueClass) throws SchemaChangeException{
+      MaterializedField f = new MaterializedField(fieldId, type, nullable, mode, valueClass);
+      if(expectedFields != null){
+        if(!expectedFields.containsKey(f.getFieldId())) throw new SchemaChangeException(String.format("You attempted to add a field for Id An attempt was made to add a duplicate fieldId to the schema.  The offending fieldId was %d", fieldId));
+        f.checkMaterialization(expectedFields.lget());
+      }
+      fields.put(f.getFieldId(), f);
+    }
+    
+    public void addTypedField(short fieldId, DataType type, boolean nullable, ValueMode mode, Class<?> valueClass) throws SchemaChangeException{
+      if(fields.containsKey(fieldId)) throw new SchemaChangeException(String.format("An attempt was made to add a duplicate fieldId to the schema.  The offending fieldId was %d", fieldId));
+      setTypedField(fieldId, type, nullable, mode, valueClass);
+    }
+    
+    public void replaceTypedField(short fieldId, DataType type, boolean nullable, ValueMode mode, Class<?> valueClass) throws SchemaChangeException{
+      if(!fields.containsKey(fieldId)) throw new SchemaChangeException(String.format("An attempt was made to replace a field in the schema, however the schema does not currently contain that field id.  The offending fieldId was %d", fieldId));
+      setTypedField(fieldId, type, nullable, mode, valueClass);
+    }
+    
+    public void addVector(ValueVector<?> v){
+      
+    }
+    
+    public void replaceVector(ValueVector<?> oldVector, ValueVector<?> newVector){
+      
+    }
+    
+    
+    public BatchSchema buildAndClear() throws SchemaChangeException{
+      // check if any fields are unaccounted for.
+      
+      List<MaterializedField> fieldList = Lists.newArrayList();
+      for(MaterializedField f : fields.values){
+        if(f != null) fieldList.add(f);
+      }
+      Collections.sort(fieldList);
+      return new BatchSchema(fieldList);
+    }
+  }
+  
+}


[5/9] basic framework for physical plan. abstraction of graph classes.

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitComImpl.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitComImpl.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitComImpl.java
new file mode 100644
index 0000000..dd00e04
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitComImpl.java
@@ -0,0 +1,142 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc.bit;
+
+import io.netty.channel.Channel;
+import io.netty.channel.socket.SocketChannel;
+import io.netty.util.concurrent.Future;
+
+import java.util.Map;
+
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.exception.DrillbitStartupException;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
+import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;
+import org.apache.drill.exec.proto.ExecProtos.FragmentStatus;
+import org.apache.drill.exec.proto.ExecProtos.PlanFragment;
+import org.apache.drill.exec.proto.GeneralRPCProtos.Ack;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.rpc.DrillRpcFuture;
+import org.apache.drill.exec.rpc.RpcBus;
+import org.apache.drill.exec.server.DrillbitContext;
+
+import com.google.common.collect.Maps;
+import com.google.common.io.Closeables;
+
+public class BitComImpl implements BitCom {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BitComImpl.class);
+
+  private Map<DrillbitEndpoint, BitTunnel> tunnels = Maps.newConcurrentMap();
+  private Map<SocketChannel, DrillbitEndpoint> endpoints = Maps.newConcurrentMap();
+  private Object lock = new Object();
+  private BitServer server;
+  private DrillbitContext context;
+
+  public BitComImpl(DrillbitContext context) {
+    this.context = context;
+  }
+
+  public int start() throws InterruptedException, DrillbitStartupException {
+    server = new BitServer(new BitComHandler(modifier), context.getAllocator().getUnderlyingAllocator(), context.getBitLoopGroup(), context);
+    int port = context.getConfig().getInt(ExecConstants.INITIAL_BIT_PORT);
+    return server.bind(port);
+  }
+
+  private Future<BitTunnel> getNode(DrillbitEndpoint endpoint) {
+    return null;
+    
+//    BitTunnel t = tunnels.get(endpoint);
+//    if (t == null) {
+//      synchronized (lock) {
+//        t = tunnels.get(endpoint);
+//        if (t != null) return t;
+//        BitClient c = new BitClient(new BitComHandler(modifier), context.getAllocator().getUnderlyingAllocator(),
+//            context.getBitLoopGroup(), context);
+//
+//        // need to figure what to do here with regards to waiting for handshake before returning. Probably need to add
+//        // future registry so that new endpoint registration ping the registry.
+//        throw new UnsupportedOperationException();
+//        c.connectAsClient(endpoint.getAddress(), endpoint.getBitPort()).await();
+//        t = new BitTunnel(c);
+//        tunnels.put(endpoint, t);
+//
+//      }
+//    }
+//    return null;
+  }
+
+  @Override
+  public DrillRpcFuture<Ack> sendRecordBatch(FragmentContext context, DrillbitEndpoint node, RecordBatch batch) {
+    return null;
+  }
+
+  @Override
+  public DrillRpcFuture<FragmentHandle> sendFragment(FragmentContext context, DrillbitEndpoint node,
+      PlanFragment fragment) {
+    return null;
+  }
+
+  @Override
+  public DrillRpcFuture<Ack> cancelFragment(FragmentContext context, DrillbitEndpoint node, FragmentHandle handle) {
+    return null;
+  }
+
+  @Override
+  public DrillRpcFuture<FragmentStatus> getFragmentStatus(FragmentContext context, DrillbitEndpoint node,
+      FragmentHandle handle) {
+    return null;
+  }
+
+  private final TunnelModifier modifier = new TunnelModifier();
+
+  /**
+   * Fully synchronized modifier. Contention should be low since endpoints shouldn't be constantly changing.
+   */
+  class TunnelModifier {
+    public BitTunnel remove(Channel ch) {
+      synchronized (this) {
+        DrillbitEndpoint endpoint = endpoints.remove(ch);
+        if (endpoint == null) {
+          logger
+              .warn("We attempted to find a endpoint from a provided channel and found none.  This suggests a race condition or memory leak problem.");
+          return null;
+        }
+
+        BitTunnel tunnel = tunnels.remove(endpoint);
+        return tunnel;
+      }
+    }
+
+    public void create(SocketChannel channel, DrillbitEndpoint endpoint, RpcBus<?> bus) {
+      synchronized (this) {
+        endpoints.put(channel, endpoint);
+        tunnels.put(endpoint, new BitTunnel(bus));
+      }
+    }
+
+  }
+
+  public void close() {
+    Closeables.closeQuietly(server);
+    for (BitTunnel bt : tunnels.values()) {
+      bt.shutdownIfClient();
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitServer.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitServer.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitServer.java
new file mode 100644
index 0000000..e17b25c
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitServer.java
@@ -0,0 +1,64 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc.bit;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.socket.SocketChannel;
+import io.netty.util.concurrent.GenericFutureListener;
+
+import org.apache.drill.exec.proto.ExecProtos.RpcType;
+import org.apache.drill.exec.rpc.BasicServer;
+import org.apache.drill.exec.rpc.Response;
+import org.apache.drill.exec.rpc.RpcException;
+import org.apache.drill.exec.server.DrillbitContext;
+
+import com.google.protobuf.MessageLite;
+
+public class BitServer extends BasicServer<RpcType>{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BitServer.class);
+  
+  private final DrillbitContext context;
+  private final BitComHandler handler;
+  
+  public BitServer(BitComHandler handler, ByteBufAllocator alloc, EventLoopGroup eventLoopGroup, DrillbitContext context) {
+    super(alloc, eventLoopGroup);
+    this.context = context;
+    this.handler = handler;
+  }
+  
+  @Override
+  protected MessageLite getResponseDefaultInstance(int rpcType) throws RpcException {
+    return handler.getResponseDefaultInstance(rpcType);
+  }
+
+  @Override
+  protected Response handle(SocketChannel ch, int rpcType, ByteBuf pBody, ByteBuf dBody) throws RpcException {
+    return handler.handle(context, rpcType, pBody, dBody);
+  }
+
+  @Override
+  protected GenericFutureListener<ChannelFuture> getCloseHandler(SocketChannel ch) {
+    
+    return super.getCloseHandler(ch);
+  }
+
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitTunnel.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitTunnel.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitTunnel.java
new file mode 100644
index 0000000..02991ad
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/bit/BitTunnel.java
@@ -0,0 +1,63 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc.bit;
+
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;
+import org.apache.drill.exec.proto.ExecProtos.FragmentStatus;
+import org.apache.drill.exec.proto.ExecProtos.PlanFragment;
+import org.apache.drill.exec.proto.GeneralRPCProtos.Ack;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.rpc.DrillRpcFuture;
+import org.apache.drill.exec.rpc.RpcBus;
+
+import com.google.common.io.Closeables;
+
+/**
+ * Interface provided for communication between two bits.  Provided by both a server and a client implementation.
+ */
+public class BitTunnel {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BitTunnel.class);
+
+  final RpcBus<?> bus;
+
+  public BitTunnel(RpcBus<?> bus){
+    this.bus = bus;
+  }
+  
+  public DrillRpcFuture<Ack> sendRecordBatch(FragmentContext context, RecordBatch batch){
+    return null;
+  }
+  
+  public DrillRpcFuture<FragmentHandle> sendFragment(FragmentContext context, PlanFragment fragment){
+    return null;
+  }
+  
+  public DrillRpcFuture<Ack> cancelFragment(FragmentContext context, FragmentHandle handle){
+    return null;
+  }
+  
+  public DrillRpcFuture<FragmentStatus> getFragmentStatus(FragmentContext context, FragmentHandle handle){
+    return null;
+  }
+  
+  public void shutdownIfClient(){
+    if(bus.isClient()) Closeables.closeQuietly(bus);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java
new file mode 100644
index 0000000..cd6e15d
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java
@@ -0,0 +1,72 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc.user;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.socket.SocketChannel;
+
+import org.apache.drill.exec.proto.GeneralRPCProtos.Ack;
+import org.apache.drill.exec.proto.UserProtos.BitToUserHandshake;
+import org.apache.drill.exec.proto.UserProtos.QueryHandle;
+import org.apache.drill.exec.proto.UserProtos.QueryResult;
+import org.apache.drill.exec.proto.UserProtos.RpcType;
+import org.apache.drill.exec.proto.UserProtos.RunQuery;
+import org.apache.drill.exec.rpc.BasicClient;
+import org.apache.drill.exec.rpc.DrillRpcFuture;
+import org.apache.drill.exec.rpc.Response;
+import org.apache.drill.exec.rpc.RpcException;
+
+import com.google.protobuf.MessageLite;
+
+public class UserClient extends BasicClient<RpcType> {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(UserClient.class);
+  
+  public UserClient(ByteBufAllocator alloc, EventLoopGroup eventLoopGroup) {
+    super(alloc, eventLoopGroup);
+  }
+
+
+  public DrillRpcFuture<QueryHandle> submitQuery(RunQuery query, ByteBuf data) throws RpcException {
+    return this.send(RpcType.RUN_QUERY, query, QueryHandle.class, data);
+  }
+
+  @Override
+  protected MessageLite getResponseDefaultInstance(int rpcType) throws RpcException {
+    switch(rpcType){
+    case RpcType.ACK_VALUE:
+      return Ack.getDefaultInstance();
+    case RpcType.HANDSHAKE_VALUE:
+      return BitToUserHandshake.getDefaultInstance();
+    case RpcType.QUERY_HANDLE_VALUE:
+      return QueryHandle.getDefaultInstance();
+    case RpcType.QUERY_RESULT_VALUE:
+      return QueryResult.getDefaultInstance();
+    }
+    throw new RpcException(String.format("Unable to deal with RpcType of %d", rpcType));
+  }
+
+
+  @Override
+  protected Response handle(SocketChannel ch, int rpcType, ByteBuf pBody, ByteBuf dBody) throws RpcException {
+    logger.debug("Received a server > client message of type " + rpcType);
+    return new Response(RpcType.ACK, Ack.getDefaultInstance(), null);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserServer.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserServer.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserServer.java
new file mode 100644
index 0000000..fe70c85
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserServer.java
@@ -0,0 +1,90 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.rpc.user;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.socket.SocketChannel;
+
+import org.apache.drill.exec.proto.GeneralRPCProtos.Ack;
+import org.apache.drill.exec.proto.UserProtos.BitToUserHandshake;
+import org.apache.drill.exec.proto.UserProtos.QueryHandle;
+import org.apache.drill.exec.proto.UserProtos.QueryResult;
+import org.apache.drill.exec.proto.UserProtos.RpcType;
+import org.apache.drill.exec.proto.UserProtos.RunQuery;
+import org.apache.drill.exec.rpc.BasicServer;
+import org.apache.drill.exec.rpc.DrillRpcFuture;
+import org.apache.drill.exec.rpc.Response;
+import org.apache.drill.exec.rpc.RpcException;
+import org.apache.drill.exec.server.DrillbitContext;
+
+import com.google.protobuf.MessageLite;
+
+public class UserServer extends BasicServer<RpcType> {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(UserServer.class);
+  
+  final DrillbitContext context;
+  
+  public UserServer(ByteBufAllocator alloc, EventLoopGroup eventLoopGroup, DrillbitContext context) {
+    super(alloc, eventLoopGroup);
+    this.context = context;
+  }
+
+  @Override
+  protected MessageLite getResponseDefaultInstance(int rpcType) throws RpcException {
+    // a user server only expects acknowledgements on messages it creates.
+    switch (rpcType) {
+    case RpcType.ACK_VALUE:
+      return Ack.getDefaultInstance();
+    default:
+      throw new UnsupportedOperationException();
+    }
+
+  }
+
+  public DrillRpcFuture<QueryResult> sendResult(RunQuery query, ByteBuf data) throws RpcException {
+    return this.send(RpcType.QUERY_RESULT, query, QueryResult.class, data);
+  }
+  
+  
+  @Override
+  protected Response handle(SocketChannel channel, int rpcType, ByteBuf pBody, ByteBuf dBody) throws RpcException {
+    switch (rpcType) {
+    
+    case RpcType.HANDSHAKE_VALUE:
+//      logger.debug("Received handshake, responding in kind.");
+      return new Response(RpcType.HANDSHAKE, BitToUserHandshake.getDefaultInstance(), null);
+      
+    case RpcType.RUN_QUERY_VALUE:
+//      logger.debug("Received query to run.  Returning query handle.");
+      return new Response(RpcType.QUERY_HANDLE, QueryHandle.newBuilder().setQueryId(1).build(), null);
+      
+    case RpcType.REQUEST_RESULTS_VALUE:
+//      logger.debug("Received results requests.  Returning empty query result.");
+      return new Response(RpcType.QUERY_RESULT, QueryResult.getDefaultInstance(), null);
+      
+    default:
+      throw new UnsupportedOperationException();
+    }
+
+  }
+  
+  
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/BackedRecord.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/BackedRecord.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/BackedRecord.java
new file mode 100644
index 0000000..e71d381
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/BackedRecord.java
@@ -0,0 +1,44 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema;
+
+public class BackedRecord implements Record {
+    DiffSchema schema;
+    DataRecord record;
+
+    public BackedRecord(DiffSchema schema, DataRecord record) {
+        this.schema = schema;
+        this.record = record;
+    }
+
+    public void setBackend(DiffSchema schema, DataRecord record) {
+        this.record = record;
+        this.schema = schema;
+    }
+
+    @Override
+    public DiffSchema getSchemaChanges() {
+        return schema;
+    }
+
+    @Override
+    public Object getField(int fieldId) {
+        return record.getData(fieldId);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DataRecord.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DataRecord.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DataRecord.java
new file mode 100644
index 0000000..41738bb
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DataRecord.java
@@ -0,0 +1,56 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
+import java.util.List;
+import java.util.Map;
+
+public class DataRecord {
+    private final Map<Integer, Object> dataMap;
+
+    public DataRecord() {
+        this.dataMap = Maps.newHashMap();
+    }
+
+    public void addData(int fieldId, Object data, boolean isList) {
+        //TODO: Rethink lists vs object data handling
+        if(!dataMap.containsKey(fieldId)) {
+            if(isList) {
+                dataMap.put(fieldId, Lists.newArrayList(data));
+            } else {
+                dataMap.put(fieldId, data);
+            }
+        } else {
+            if(isList) {
+                ((List)dataMap.get(fieldId)).add(data);
+            } else {
+                throw new IllegalStateException("Overriding field id existing data!");
+            }
+        }
+    }
+
+    public Object getData(int fieldId) {
+        Preconditions.checkArgument(dataMap.containsKey(fieldId));
+        return dataMap.get(fieldId);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DiffSchema.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DiffSchema.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DiffSchema.java
new file mode 100644
index 0000000..016e097
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DiffSchema.java
@@ -0,0 +1,66 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema;
+
+import com.google.common.collect.Lists;
+
+import java.util.List;
+
+public class DiffSchema {
+    List<Field> addedFields;
+    List<Field> removedFields;
+
+    public DiffSchema() {
+        this.addedFields = Lists.newArrayList();
+        this.removedFields = Lists.newArrayList();
+    }
+
+    public void recordNewField(Field field) {
+        addedFields.add(field);
+    }
+
+    public boolean hasDiffFields() {
+        return !addedFields.isEmpty() || !removedFields.isEmpty();
+    }
+
+    public List<Field> getAddedFields() {
+        return addedFields;
+    }
+
+    public List<Field> getRemovedFields() {
+        return removedFields;
+    }
+
+    public void reset() {
+        addedFields.clear();
+        removedFields.clear();
+    }
+
+    public void addRemovedField(Field field) {
+        removedFields.add(field);
+    }
+
+    @Override
+    public String toString() {
+        return "DiffSchema{" +
+                "addedFields=" + addedFields +
+                ", removedFields=" + removedFields +
+                '}';
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/Field.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/Field.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/Field.java
new file mode 100644
index 0000000..e19c099
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/Field.java
@@ -0,0 +1,135 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema;
+
+import com.google.common.base.Objects;
+import com.google.common.base.Strings;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
+public abstract class Field {
+    final FieldType fieldType;
+    int fieldId;
+    String prefixFieldName;
+    String fieldName;
+    RecordSchema schema;
+    RecordSchema parentSchema;
+    boolean read;
+
+
+    public Field(RecordSchema parentSchema, IdGenerator<Integer> generator, FieldType fieldType, String prefixFieldName) {
+        this.fieldId = generator.getNextId();
+        this.fieldType = fieldType;
+        this.prefixFieldName = prefixFieldName;
+        this.parentSchema = parentSchema;
+    }
+
+    public Field assignSchema(RecordSchema newSchema) {
+        checkState(schema == null, "Schema already assigned to field: %s", fieldName);
+        checkState(fieldType.isEmbedSchema(), "Schema cannot be assigned to non-embedded types: %s", fieldType);
+        schema = newSchema;
+        return this;
+    }
+
+    public String getFullFieldName() {
+        return Strings.isNullOrEmpty(prefixFieldName) ? fieldName : prefixFieldName + "." + fieldName;
+    }
+
+    public int getFieldId() {
+        return fieldId;
+    }
+
+    public String getFieldName() {
+        return fieldName;
+    }
+
+    public void setRead(boolean read) {
+        this.read = read;
+    }
+
+    protected abstract Objects.ToStringHelper addAttributesToHelper(Objects.ToStringHelper helper);
+
+    Objects.ToStringHelper getAttributesStringHelper() {
+        return Objects.toStringHelper(this).add("type", fieldType)
+                .add("id", fieldId)
+                .add("fullFieldName", getFullFieldName())
+                .add("schema", schema == null ? null : schema.toSchemaString()).omitNullValues();
+    }
+
+    @Override
+    public String toString() {
+        return addAttributesToHelper(getAttributesStringHelper()).toString();
+    }
+
+    public RecordSchema getParentSchema() {
+        return parentSchema;
+    }
+
+    public RecordSchema getAssignedSchema() {
+        return schema;
+    }
+
+    public FieldType getFieldType() {
+        return fieldType;
+    }
+
+    public void assignSchemaIfNull(RecordSchema newSchema) {
+        if (!hasSchema()) {
+            schema = newSchema;
+        }
+    }
+
+    public boolean isRead() {
+        return read;
+    }
+
+    public boolean hasSchema() {
+        return schema != null;
+    }
+
+    public static enum FieldType {
+        INTEGER(1),
+        FLOAT(2),
+        BOOLEAN(3),
+        STRING(4),
+        ARRAY(5, true),
+        MAP(6, true);
+
+        byte value;
+        boolean embedSchema;
+
+        FieldType(int value, boolean embedSchema) {
+            this.value = (byte) value;
+            this.embedSchema = embedSchema;
+        }
+
+        FieldType(int value) {
+            this(value, false);
+        }
+
+        public byte value() {
+            return value;
+        }
+
+        public boolean isEmbedSchema() {
+            return embedSchema;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/IdGenerator.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/IdGenerator.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/IdGenerator.java
new file mode 100644
index 0000000..728e8e1
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/IdGenerator.java
@@ -0,0 +1,13 @@
+package org.apache.drill.exec.schema;
+
+/**
+ * Created with IntelliJ IDEA.
+ * User: tnachen
+ * Date: 1/2/13
+ * Time: 10:50 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public interface IdGenerator<T> {
+    public T getNextId();
+    public void reset();
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/ListSchema.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/ListSchema.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/ListSchema.java
new file mode 100644
index 0000000..efdc8fd
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/ListSchema.java
@@ -0,0 +1,108 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema;
+
+import com.google.common.base.Predicate;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+
+import java.util.List;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+public class ListSchema implements RecordSchema {
+    private List<Field> fields;
+
+    public ListSchema() {
+        this.fields = Lists.newArrayList();
+    }
+
+    @Override
+    public void addField(Field field) {
+        if (field.fieldType.isEmbedSchema() || fields.isEmpty() || !isSingleTyped() ||
+                !Iterables.getOnlyElement(fields).equals(field.getFieldType())) {
+            fields.add(field);
+        }
+    }
+
+    @Override
+    public Field getField(String fieldName, int index) {
+        Field field;
+        if (isSingleTyped()) {
+            field = Iterables.getOnlyElement(fields, null);
+        } else {
+            field = index < fields.size() ? fields.get(index) : null;
+        }
+
+        return field;
+    }
+
+    @Override
+    public void removeField(Field field, int index) {
+        checkArgument(fields.size() > index);
+        checkArgument(checkNotNull(fields.get(index)).getFieldId() == field.getFieldId());
+        fields.remove(index);
+    }
+
+    @Override
+    public Iterable<? extends Field> getFields() {
+        return fields;
+    }
+
+    public boolean isSingleTyped() {
+        return fields.size() <= 1;
+    }
+
+    @Override
+    public String toSchemaString() {
+        StringBuilder builder = new StringBuilder("List_fields:[");
+        for (Field field : fields) {
+            builder.append(field.toString());
+        }
+        builder.append("]");
+        return builder.toString();
+    }
+
+    @Override
+    public void resetMarkedFields() {
+        for (Field field : fields) {
+            field.setRead(false);
+        }
+    }
+
+    @Override
+    public Iterable<? extends Field> removeUnreadFields() {
+        final List<Field> removedFields = Lists.newArrayList();
+        Iterables.removeIf(fields, new Predicate<Field>() {
+            @Override
+            public boolean apply(Field field) {
+                if (!field.isRead()) {
+                    removedFields.add(field);
+                    return true;
+                } else if(field.hasSchema()) {
+                    Iterables.addAll(removedFields, field.getAssignedSchema().removeUnreadFields());
+                }
+
+                return false;
+            }
+        });
+        return removedFields;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/NamedField.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/NamedField.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/NamedField.java
new file mode 100644
index 0000000..aa0d6aa
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/NamedField.java
@@ -0,0 +1,44 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema;
+
+import com.google.common.base.Objects;
+
+public class NamedField extends Field {
+    final FieldType keyType;
+
+    public NamedField(RecordSchema parentSchema, IdGenerator<Integer> generator, String prefixFieldName, String fieldName, Field.FieldType fieldType) {
+        this(parentSchema, generator, prefixFieldName, fieldName, fieldType, FieldType.STRING);
+    }
+
+    public NamedField(RecordSchema parentSchema, IdGenerator<Integer> generator, String prefixFieldName, String fieldName, Field.FieldType fieldType, FieldType keyType) {
+        super(parentSchema, generator, fieldType, prefixFieldName);
+        this.fieldName = fieldName;
+        this.keyType = FieldType.STRING;
+    }
+
+    public String getFieldName() {
+        return fieldName;
+    }
+
+    @Override
+    protected Objects.ToStringHelper addAttributesToHelper(Objects.ToStringHelper helper) {
+        return helper.add("keyType", keyType);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/ObjectSchema.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/ObjectSchema.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/ObjectSchema.java
new file mode 100644
index 0000000..9cc30f6
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/ObjectSchema.java
@@ -0,0 +1,91 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema;
+
+import com.google.common.base.Predicate;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
+import java.util.List;
+import java.util.Map;
+
+public class ObjectSchema implements RecordSchema {
+    private final Map<String, Field> fields;
+
+    public ObjectSchema() {
+        fields = Maps.newHashMap();
+    }
+
+    @Override
+    public void addField(Field field) {
+        fields.put(field.getFieldName(), field);
+    }
+
+    @Override
+    public Field getField(String fieldName, int index) {
+        return fields.get(fieldName);
+    }
+
+    @Override
+    public void removeField(Field field, int index) {
+        fields.remove(field.getFieldName());
+    }
+
+    @Override
+    public Iterable<? extends Field> getFields() {
+        return fields.values();
+    }
+
+    @Override
+    public String toSchemaString() {
+        StringBuilder builder = new StringBuilder("Object_fields:[");
+        for (Field field : fields.values()) {
+            builder.append(field.toString()).append(" ");
+        }
+        builder.append("]");
+        return builder.toString();
+    }
+
+    @Override
+    public void resetMarkedFields() {
+        for (Field field : fields.values()) {
+            field.setRead(false);
+        }
+    }
+
+    @Override
+    public Iterable<? extends Field> removeUnreadFields() {
+        final List<Field> removedFields = Lists.newArrayList();
+        Iterables.removeIf(fields.values(), new Predicate<Field>() {
+            @Override
+            public boolean apply(Field field) {
+                if (!field.isRead()) {
+                    removedFields.add(field);
+                    return true;
+                } else if (field.hasSchema()) {
+                    Iterables.addAll(removedFields, field.getAssignedSchema().removeUnreadFields());
+                }
+
+                return false;
+            }
+        });
+        return removedFields;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/OrderedField.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/OrderedField.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/OrderedField.java
new file mode 100644
index 0000000..67fd2fa
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/OrderedField.java
@@ -0,0 +1,33 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema;
+
+import com.google.common.base.Objects;
+
+public class OrderedField extends Field {
+    public OrderedField(RecordSchema parentSchema, IdGenerator<Integer> generator, FieldType fieldType, String prefixFieldName, int index) {
+        super(parentSchema, generator, fieldType, prefixFieldName);
+        this.fieldName = "[" + index + "]";
+    }
+
+    @Override
+    protected Objects.ToStringHelper addAttributesToHelper(Objects.ToStringHelper helper) {
+        return helper;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/Record.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/Record.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/Record.java
new file mode 100644
index 0000000..16e83dc
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/Record.java
@@ -0,0 +1,29 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema;
+
+import org.apache.drill.common.expression.PathSegment;
+import org.apache.drill.common.expression.SchemaPath;
+
+import java.io.IOException;
+
+public interface Record {
+    public DiffSchema getSchemaChanges();
+    public Object getField(int fieldId);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/RecordSchema.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/RecordSchema.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/RecordSchema.java
new file mode 100644
index 0000000..db1f0ed
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/RecordSchema.java
@@ -0,0 +1,29 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema;
+
+public interface RecordSchema {
+    public void addField(Field field);
+    public Field getField(String fieldName, int index);
+    public void removeField(Field field, int index);
+    public Iterable<? extends Field> getFields();
+    public String toSchemaString();
+    void resetMarkedFields();
+    Iterable<? extends Field> removeUnreadFields();
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/SchemaIdGenerator.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/SchemaIdGenerator.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/SchemaIdGenerator.java
new file mode 100644
index 0000000..27ed2d8
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/SchemaIdGenerator.java
@@ -0,0 +1,36 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema;
+
+public class SchemaIdGenerator implements IdGenerator<Integer> {
+    private int nextId;
+
+    public SchemaIdGenerator() {
+        nextId = 1;
+    }
+
+    public Integer getNextId() {
+        return nextId++;
+    }
+
+    @Override
+    public void reset() {
+        nextId = 1;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/SchemaRecorder.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/SchemaRecorder.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/SchemaRecorder.java
new file mode 100644
index 0000000..54a4e0e
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/SchemaRecorder.java
@@ -0,0 +1,122 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import org.apache.drill.exec.schema.json.jackson.JacksonHelper;
+import org.apache.drill.exec.schema.json.jackson.ScanJson;
+
+import java.io.IOException;
+import java.util.List;
+
+public class SchemaRecorder {
+    DiffSchema diffSchema;
+    RecordSchema currentSchema;
+    List<Field> removedFields;
+
+    public SchemaRecorder() {
+        currentSchema = new ObjectSchema();
+        diffSchema = new DiffSchema();
+        removedFields = Lists.newArrayList();
+    }
+
+    public RecordSchema getCurrentSchema() {
+        return currentSchema;
+    }
+
+    public void recordData(ScanJson.ReadType currentReadType, ScanJson.ReadType readType, JsonParser parser, IdGenerator generator, DataRecord record, Field.FieldType fieldType, String prefixFieldName, String fieldName, int index) throws IOException {
+        Field field = currentSchema.getField(fieldName, index);
+
+        if (field == null || field.getFieldType() != fieldType) {
+            if (field != null) {
+                removeStaleField(index, field);
+            }
+            field = currentReadType.createField(currentSchema, generator, prefixFieldName, fieldName, fieldType, index);
+            field.setRead(true);
+            diffSchema.recordNewField(field);
+            currentSchema.addField(field);
+        } else {
+            field.setRead(true);
+        }
+
+        if (readType != null) {
+            RecordSchema origSchema = currentSchema;
+            if (field != null) {
+                currentSchema = field.getAssignedSchema();
+            }
+
+            RecordSchema newSchema = readType.createSchema();
+            field.assignSchemaIfNull(newSchema);
+            setCurrentSchemaIfNull(newSchema);
+            readType.readRecord(parser, generator, this, record, field.getFullFieldName());
+
+            currentSchema = origSchema;
+        } else {
+            RecordSchema schema = field.getParentSchema();
+            record.addData(field.getFieldId(), JacksonHelper.getValueFromFieldType(parser, fieldType), schema != null && schema instanceof ListSchema);
+        }
+    }
+
+    private void removeStaleField(int index, Field field) {
+        if (field.hasSchema()) {
+            removeChildFields(field);
+        }
+        removedFields.add(field);
+        currentSchema.removeField(field, index);
+    }
+
+    private void removeChildFields(Field field) {
+        RecordSchema schema = field.getAssignedSchema();
+        if(schema == null) { return; }
+        for (Field childField : schema.getFields()) {
+            removedFields.add(childField);
+            if (childField.hasSchema()) {
+                removeChildFields(childField);
+            }
+        }
+    }
+
+    public boolean hasDiffs() {
+        return diffSchema.hasDiffFields();
+    }
+
+    public DiffSchema getDiffSchema() {
+        return hasDiffs() ? diffSchema : null;
+    }
+
+    public void setCurrentSchemaIfNull(RecordSchema newSchema) {
+        if (currentSchema == null) {
+            currentSchema = newSchema;
+        }
+    }
+
+    public void reset() {
+        currentSchema.resetMarkedFields();
+        diffSchema.reset();
+        removedFields.clear();
+    }
+
+    public void addMissingFields() {
+        for (Field field : Iterables.concat(currentSchema.removeUnreadFields(), removedFields)) {
+            diffSchema.addRemovedField(field);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/JacksonHelper.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/JacksonHelper.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/JacksonHelper.java
new file mode 100644
index 0000000..0643710
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/JacksonHelper.java
@@ -0,0 +1,63 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema.json.jackson;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonToken;
+import com.google.common.collect.Maps;
+
+import org.apache.drill.exec.schema.Field;
+
+import java.io.IOException;
+import java.util.EnumMap;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+public class JacksonHelper {
+    private static final EnumMap<JsonToken, Field.FieldType> TYPE_LOOKUP = Maps.newEnumMap(JsonToken.class);
+
+    static {
+        TYPE_LOOKUP.put(JsonToken.VALUE_STRING, Field.FieldType.STRING);
+        TYPE_LOOKUP.put(JsonToken.VALUE_FALSE, Field.FieldType.BOOLEAN);
+        TYPE_LOOKUP.put(JsonToken.VALUE_TRUE, Field.FieldType.BOOLEAN);
+        TYPE_LOOKUP.put(JsonToken.START_ARRAY, Field.FieldType.ARRAY);
+        TYPE_LOOKUP.put(JsonToken.START_OBJECT, Field.FieldType.MAP);
+        TYPE_LOOKUP.put(JsonToken.VALUE_NUMBER_INT, Field.FieldType.INTEGER);
+        TYPE_LOOKUP.put(JsonToken.VALUE_NUMBER_FLOAT, Field.FieldType.FLOAT);
+    }
+
+    public static Field.FieldType getFieldType(JsonToken token) {
+        return TYPE_LOOKUP.get(token);
+    }
+
+    public static Object getValueFromFieldType(JsonParser parser, Field.FieldType fieldType) throws IOException {
+        switch(fieldType) {
+            case INTEGER:
+                return parser.getIntValue();
+            case STRING:
+                return parser.getValueAsString();
+            case FLOAT:
+                return parser.getFloatValue();
+            case BOOLEAN:
+                return parser.getBooleanValue();
+            default:
+                throw new RuntimeException("Unexpected Field type to return value: " + fieldType.toString());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/PhysicalOperator.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/PhysicalOperator.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/PhysicalOperator.java
new file mode 100644
index 0000000..e450ee9
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/PhysicalOperator.java
@@ -0,0 +1,36 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema.json.jackson;
+
+import com.google.common.collect.Lists;
+
+import org.apache.drill.exec.schema.Record;
+
+import java.io.IOException;
+import java.util.List;
+
+public abstract class PhysicalOperator {
+    List<PhysicalOperatorIterator> parents;
+
+    public PhysicalOperator(PhysicalOperatorIterator... parents) {
+        this.parents = Lists.newArrayList(parents);
+    }
+
+    public abstract PhysicalOperatorIterator getIterator();
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/PhysicalOperatorIterator.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/PhysicalOperatorIterator.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/PhysicalOperatorIterator.java
new file mode 100644
index 0000000..bf4053e
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/PhysicalOperatorIterator.java
@@ -0,0 +1,31 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema.json.jackson;
+
+import org.apache.drill.exec.schema.Record;
+
+import java.io.IOException;
+
+public interface PhysicalOperatorIterator{
+    public enum NextOutcome {NONE_LEFT, INCREMENTED_SCHEMA_UNCHANGED, INCREMENTED_SCHEMA_CHANGED}
+    public Record getRecord();
+    public NextOutcome next() throws IOException;
+}
+
+

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/ScanJson.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/ScanJson.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/ScanJson.java
new file mode 100644
index 0000000..a1c30e9
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/json/jackson/ScanJson.java
@@ -0,0 +1,203 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema.json.jackson;
+
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonToken;
+import com.google.common.base.Charsets;
+import com.google.common.collect.Maps;
+import com.google.common.io.Files;
+import com.google.common.io.InputSupplier;
+import com.google.common.io.Resources;
+
+import org.apache.drill.exec.schema.*;
+
+import java.io.*;
+import java.util.Map;
+
+public class ScanJson extends PhysicalOperator {
+    private ScanJsonIterator iterator;
+
+    private static final Map<JsonToken, ReadType> READ_TYPES = Maps.newHashMap();
+
+    static {
+        READ_TYPES.put(JsonToken.START_ARRAY, ReadType.ARRAY);
+        READ_TYPES.put(JsonToken.START_OBJECT, ReadType.OBJECT);
+    }
+
+    public ScanJson(String inputName) throws IOException {
+        super();
+        this.iterator = new ScanJsonIterator(inputName);
+    }
+
+    @Override
+    public PhysicalOperatorIterator getIterator() {
+        return iterator;
+    }
+
+    class ScanJsonIterator implements PhysicalOperatorIterator {
+        private JsonParser parser;
+        private SchemaRecorder recorder;
+        private BackedRecord record;
+        private IdGenerator generator;
+
+        private ScanJsonIterator(String inputName) throws IOException {
+            InputSupplier<InputStreamReader> input;
+            if (inputName.startsWith("resource:")) {
+                input = Resources.newReaderSupplier(Resources.getResource(inputName.substring(inputName.indexOf(':') + 1)), Charsets.UTF_8);
+            } else {
+                input = Files.newReaderSupplier(new File(inputName), Charsets.UTF_8);
+            }
+
+            JsonFactory factory = new JsonFactory();
+            parser = factory.createJsonParser(input.getInput());
+            parser.nextToken(); // Read to the first START_OBJECT token
+            recorder = new SchemaRecorder();
+            generator = new SchemaIdGenerator();
+        }
+
+        @Override
+        public Record getRecord() {
+            return record;
+        }
+
+        @Override
+        public NextOutcome next() throws IOException {
+            if (parser.isClosed() || !parser.hasCurrentToken()) {
+                return NextOutcome.NONE_LEFT;
+            }
+
+            recorder.reset();
+
+            DataRecord dataRecord = new DataRecord();
+            ReadType.OBJECT.readRecord(parser, generator, recorder, dataRecord, null);
+
+            parser.nextToken(); // Read to START_OBJECT token
+
+            if (!parser.hasCurrentToken()) {
+                parser.close();
+            }
+
+            recorder.addMissingFields();
+            if (record == null) {
+                record = new BackedRecord(recorder.getDiffSchema(), dataRecord);
+            } else {
+                record.setBackend(recorder.getDiffSchema(), dataRecord);
+            }
+            return recorder.hasDiffs() ? NextOutcome.INCREMENTED_SCHEMA_CHANGED : NextOutcome.INCREMENTED_SCHEMA_UNCHANGED;
+        }
+
+        public RecordSchema getCurrentSchema() {
+            return recorder.getCurrentSchema();
+        }
+    }
+
+    public static enum ReadType {
+        ARRAY(JsonToken.END_ARRAY) {
+            @Override
+            public Field createField(RecordSchema parentSchema, IdGenerator<Integer> generator, String prefixFieldName, String fieldName, Field.FieldType fieldType, int index) {
+                return new OrderedField(parentSchema, generator, fieldType, prefixFieldName, index);
+            }
+
+            @Override
+            public RecordSchema createSchema() throws IOException {
+                return new ListSchema();
+            }
+        },
+        OBJECT(JsonToken.END_OBJECT) {
+            @Override
+            public Field createField(RecordSchema parentSchema, IdGenerator<Integer> generator, String prefixFieldName, String fieldName, Field.FieldType fieldType, int index) {
+                return new NamedField(parentSchema, generator, prefixFieldName, fieldName, fieldType);
+            }
+
+            @Override
+            public RecordSchema createSchema() throws IOException {
+                return new ObjectSchema();
+            }
+        };
+
+        private final JsonToken endObject;
+
+        ReadType(JsonToken endObject) {
+            this.endObject = endObject;
+        }
+
+        public JsonToken getEndObject() {
+            return endObject;
+        }
+
+        public void readRecord(JsonParser parser, IdGenerator generator, SchemaRecorder recorder, DataRecord record, String prefixFieldName) throws IOException {
+            JsonToken token = parser.nextToken();
+            JsonToken endObject = getEndObject();
+            int index = 0;
+            while (token != endObject) {
+                if (token == JsonToken.FIELD_NAME) {
+                    token = parser.nextToken();
+                    continue;
+                }
+
+                String fieldName = parser.getCurrentName();
+                Field.FieldType fieldType = JacksonHelper.getFieldType(token);
+                ReadType readType = READ_TYPES.get(token);
+                if (fieldType != null) { // Including nulls
+                    recorder.recordData(this, readType, parser, generator, record, fieldType, prefixFieldName, fieldName, index);
+                }
+                token = parser.nextToken();
+                ++index;
+            }
+        }
+
+        public abstract RecordSchema createSchema() throws IOException;
+
+        public abstract Field createField(RecordSchema parentSchema, IdGenerator<Integer> generator, String prefixFieldName, String fieldName, Field.FieldType fieldType, int index);
+    }
+
+    public static void main(String[] args) throws IOException {
+        if (args.length != 1) {
+            System.err.println("Requires json path: ScanJson <json_path>");
+            return;
+        }
+
+        String jsonPath = args[0];
+
+        System.out.println("Reading json input...");
+        ScanJson sj = new ScanJson(jsonPath);
+        ScanJsonIterator iterator = (ScanJsonIterator) sj.getIterator();
+        long count = 0;
+
+        while (iterator.next() != PhysicalOperatorIterator.NextOutcome.NONE_LEFT) {
+            Record record = iterator.getRecord();
+            System.out.println("Record " + ++count);
+            System.out.println("Schema: ");
+            System.out.println(iterator.getCurrentSchema().toSchemaString());
+            System.out.println();
+            System.out.println("Changes since last record: ");
+            System.out.println();
+            System.out.println(record.getSchemaChanges());
+            System.out.println();
+        }
+    }
+}
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/transform/ProtobufSchemaTransformer.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/transform/ProtobufSchemaTransformer.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/transform/ProtobufSchemaTransformer.java
new file mode 100644
index 0000000..a81a9d9
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/transform/ProtobufSchemaTransformer.java
@@ -0,0 +1,109 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema.transform;
+
+import com.google.common.base.Function;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.io.ByteArrayDataOutput;
+import com.google.common.io.ByteStreams;
+import com.google.protobuf.DescriptorProtos;
+
+import org.apache.drill.exec.schema.Field;
+import org.apache.drill.exec.schema.ListSchema;
+import org.apache.drill.exec.schema.ObjectSchema;
+import org.apache.drill.exec.schema.RecordSchema;
+
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Map;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+public class ProtobufSchemaTransformer implements SchemaTransformer<DescriptorProtos.DescriptorProto> {
+    private static final Map<Field.FieldType, Function<Field, Object>> FIELD_MAP = Maps.newEnumMap(Field.FieldType.class);
+    private static final Map<Field.FieldType, DescriptorProtos.FieldDescriptorProto.Type> TYPE_MAP = Maps.newEnumMap(Field.FieldType.class);
+    private int fieldIndex = 0;
+    public static final String LIST_SCHEMA_NAME = "_EmbeddedList"; //Read from config?
+
+    static {
+        TYPE_MAP.put(Field.FieldType.BOOLEAN, DescriptorProtos.FieldDescriptorProto.Type.TYPE_BOOL);
+        TYPE_MAP.put(Field.FieldType.STRING, DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING);
+        TYPE_MAP.put(Field.FieldType.FLOAT, DescriptorProtos.FieldDescriptorProto.Type.TYPE_FLOAT);
+        TYPE_MAP.put(Field.FieldType.INTEGER, DescriptorProtos.FieldDescriptorProto.Type.TYPE_INT32);
+    }
+
+    private DescriptorProtos.DescriptorProto.Builder transformSchema(String name, DescriptorProtos.DescriptorProto.Builder parentBuilder, RecordSchema schema) {
+        if (schema instanceof ObjectSchema) {
+            return addObjectSchema(name, parentBuilder, ObjectSchema.class.cast(schema));
+        } else if (schema instanceof ListSchema) {
+            return addListSchema(name, ListSchema.class.cast(schema));
+        } else {
+            throw new RuntimeException("Unknown schema passed to transformer: " + schema);
+        }
+    }
+
+    public DescriptorProtos.DescriptorProto transformSchema(String name, RecordSchema schema) {
+        return transformSchema(name, null, schema).build();
+    }
+
+    private DescriptorProtos.DescriptorProto.Builder addListSchema(String name, ListSchema schema) {
+        DescriptorProtos.DescriptorProto.Builder builder = DescriptorProtos.DescriptorProto.newBuilder().setName(name);
+        DescriptorProtos.FieldDescriptorProto.Builder builderForValue = DescriptorProtos.FieldDescriptorProto.newBuilder();
+        builderForValue.setTypeName(LIST_SCHEMA_NAME).setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE);
+        builder.addField(builderForValue);
+        return builder;
+    }
+
+    private DescriptorProtos.DescriptorProto.Builder addObjectSchema(String name,
+                                                                     DescriptorProtos.DescriptorProto.Builder parentBuilder,
+                                                                     ObjectSchema schema) {
+        DescriptorProtos.DescriptorProto.Builder builder = DescriptorProtos.DescriptorProto.newBuilder().setName(name);
+        for (Field field : schema.getFields()) {
+            DescriptorProtos.FieldDescriptorProto.Builder builderForValue = DescriptorProtos.FieldDescriptorProto.newBuilder();
+            String fieldName = field.getFieldName();
+            builderForValue.setName(fieldName).setLabel(DescriptorProtos.FieldDescriptorProto.Label.LABEL_OPTIONAL).setNumber(++fieldIndex);
+            if (field.hasSchema()) {
+                RecordSchema innerSchema = field.getAssignedSchema();
+                if (innerSchema instanceof ObjectSchema) {
+                    addObjectSchema(fieldName, builder, (ObjectSchema) innerSchema);
+                    DescriptorProtos.DescriptorProto innerProto = Iterables.getLast(builder.getNestedTypeList());
+                    builderForValue.setTypeName(innerProto.getName()).setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE);
+                } else if (innerSchema instanceof ListSchema) {
+                    builderForValue.setTypeName(LIST_SCHEMA_NAME).setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE);
+                }
+            } else {
+                builderForValue.setType(getProtoType(field.getFieldType()));
+            }
+            builder.addField(builderForValue);
+        }
+
+        if (parentBuilder != null) {
+            parentBuilder.addNestedType(builder);
+        }
+
+        return builder;
+    }
+
+    private DescriptorProtos.FieldDescriptorProto.Type getProtoType(Field.FieldType fieldType) {
+        return checkNotNull(TYPE_MAP.get(fieldType));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/transform/SchemaTransformer.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/transform/SchemaTransformer.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/transform/SchemaTransformer.java
new file mode 100644
index 0000000..54d851f
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/schema/transform/SchemaTransformer.java
@@ -0,0 +1,30 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.exec.schema.transform;
+
+import org.apache.drill.exec.schema.Field;
+import org.apache.drill.exec.schema.ListSchema;
+import org.apache.drill.exec.schema.ObjectSchema;
+import org.apache.drill.exec.schema.RecordSchema;
+
+import java.util.List;
+
+public interface SchemaTransformer<T> {
+    public T transformSchema(String name, RecordSchema schema);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
new file mode 100644
index 0000000..6cc35e2
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
@@ -0,0 +1,116 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.server;
+
+import java.net.InetAddress;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.exec.BufferAllocator;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.cache.DistributedCache;
+import org.apache.drill.exec.cache.HazelCache;
+import org.apache.drill.exec.coord.ClusterCoordinator;
+import org.apache.drill.exec.coord.ClusterCoordinator.RegistrationHandle;
+import org.apache.drill.exec.coord.ZKClusterCoordinator;
+import org.apache.drill.exec.exception.DrillbitStartupException;
+import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
+import org.apache.drill.exec.service.ServiceEngine;
+
+import com.google.common.io.Closeables;
+
+public class Drillbit {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Drillbit.class);
+
+  public static void main(String[] cli) throws DrillbitStartupException, InterruptedException {
+    Drillbit bit = null;
+    try {
+      logger.debug("Setting up Drillbit.");
+      StartupOptions options = StartupOptions.parse(cli);
+      DrillConfig config = DrillConfig.create(options.getConfigLocation());
+      bit = new Drillbit(config);
+    } catch (Exception ex) {
+      throw new DrillbitStartupException("Failure while initializing values in Drillbit.", ex);
+    }
+
+    
+    try {
+      logger.debug("Starting Drillbit.");
+      bit.run();
+    } catch (Exception e) {
+      throw new DrillbitStartupException("Failure during initial startup of Drillbit.", e);
+    }
+    Thread.sleep(10000);
+    // at this point, the main thread can terminate as we have started all our working threads.
+  }
+
+  private final DrillbitContext context;
+  final BufferAllocator pool;
+  final ClusterCoordinator coord;
+  final ServiceEngine engine;
+  final DistributedCache cache;
+  private RegistrationHandle handle;
+
+  public Drillbit(DrillConfig config) throws Exception {
+    final DrillbitContext context = new DrillbitContext(config, this);
+    Runtime.getRuntime().addShutdownHook(new ShutdownThread(config));
+    this.context = context;
+    this.pool = BufferAllocator.getAllocator(context);
+    this.coord = new ZKClusterCoordinator(config);
+    this.engine = new ServiceEngine(context);
+    this.cache = new HazelCache(context.getConfig());
+  }
+
+  public void run() throws Exception {
+    coord.start();
+    engine.start();
+    
+    DrillbitEndpoint md = DrillbitEndpoint.newBuilder().setAddress(InetAddress.getLocalHost().getHostAddress())
+        .setBitPort(engine.getBitPort()).setUserPort(engine.getUserPort()).build();
+    handle = coord.register(md);
+    cache.run(md);
+  }
+
+  public void close() {
+    if (coord != null) coord.unregister(handle);
+
+    try {
+      Thread.sleep(context.getConfig().getInt(ExecConstants.ZK_REFRESH) * 2);
+    } catch (InterruptedException e) {
+      logger.warn("Interrupted while sleeping during coordination deregistration.");
+    }
+
+    Closeables.closeQuietly(engine);
+    Closeables.closeQuietly(coord);
+    Closeables.closeQuietly(pool);
+    logger.info("Shutdown completed.");
+  }
+
+  private class ShutdownThread extends Thread {
+    ShutdownThread(DrillConfig config) {
+      this.setName("ShutdownHook");
+    }
+
+    @Override
+    public void run() {
+      logger.info("Received shutdown request.");
+      close();
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/server/DrillbitContext.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/server/DrillbitContext.java b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/server/DrillbitContext.java
new file mode 100644
index 0000000..94c8207
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/src/main/java/org/apache/drill/exec/server/DrillbitContext.java
@@ -0,0 +1,65 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.server;
+
+import io.netty.channel.nio.NioEventLoopGroup;
+
+import java.util.List;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.exec.BufferAllocator;
+import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
+import org.apache.drill.exec.rpc.NamedThreadFactory;
+import org.apache.drill.exec.rpc.bit.BitCom;
+
+public class DrillbitContext {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillbitContext.class);
+  
+  private final DrillConfig config;
+  private final Drillbit underlyingBit;
+  private final NioEventLoopGroup loop;
+
+  public DrillbitContext(DrillConfig config, Drillbit underlyingBit) {
+    super();
+    this.config = config;
+    this.underlyingBit = underlyingBit;
+    this.loop = new NioEventLoopGroup(1, new NamedThreadFactory("BitServer-"));
+  }
+  
+  public DrillConfig getConfig() {
+    return config;
+  }
+  
+  public List<DrillbitEndpoint> getBits(){
+    return underlyingBit.coord.getAvailableEndpoints();
+  }
+
+  public BufferAllocator getAllocator(){
+    return underlyingBit.pool;
+  }
+  
+  
+  public NioEventLoopGroup getBitLoopGroup(){
+    return loop;
+  }
+  
+  public BitCom getBitCom(){
+    return underlyingBit.engine.getBitCom();
+  }
+  
+}


[9/9] git commit: basic framework for physical plan. abstraction of graph classes.

Posted by ja...@apache.org.
basic framework for physical plan.  abstraction of graph classes.

Initial Work on Java Exec

WIP commit of java-exec


Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/b53933f2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/b53933f2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/b53933f2

Branch: refs/heads/execwork
Commit: b53933f225e21b890a9cc25545be7ce4223ba0ce
Parents: 2a6e1b3
Author: Jacques Nadeau <ja...@apache.org>
Authored: Sat Mar 16 17:56:35 2013 -0700
Committer: Jacques Nadeau <ja...@apache.org>
Committed: Sat Apr 13 03:01:00 2013 -0700

----------------------------------------------------------------------
 .../apache/drill/common/config/DrillConfig.java    |   51 +-
 .../exceptions/DrillConfigurationException.java    |   43 +
 .../drill/common/exceptions/DrillIOException.java  |   42 +
 .../drill/common/expression/types/DataType.java    |    2 +
 .../apache/drill/common/optimize/Optimizer.java    |   45 +
 .../drill/common/physical/DataValidationMode.java  |   24 +
 .../org/apache/drill/common/physical/FieldSet.java |   23 +-
 .../apache/drill/common/physical/RecordField.java  |   43 +-
 .../apache/drill/common/physical/ParsePlan.java    |   36 +
 .../common/src/test/resources/basic_physical.json  |   42 +
 .../common/src/test/resources/dsort-physical.json  |   76 --
 .../common/src/test/resources/dsort_logical.json   |   40 +
 .../common/src/test/resources/dsort_physical.json  |   72 ++
 .../common/src/test/resources/simple_plan.json     |  133 +++
 sandbox/prototype/exec/java-exec/pom.xml           |  165 +++-
 .../prototype/exec/java-exec/rse/ClasspathRSE.java |   88 ++
 .../prototype/exec/java-exec/rse/ConsoleRSE.java   |   60 +
 .../exec/java-exec/rse/FileSystemRSE.java          |  144 +++
 .../exec/java-exec/rse/JSONDataWriter.java         |  142 +++
 .../exec/java-exec/rse/JSONRecordReader.java       |  183 ++++
 .../exec/java-exec/rse/OutputStreamWriter.java     |   78 ++
 sandbox/prototype/exec/java-exec/rse/QueueRSE.java |  100 ++
 sandbox/prototype/exec/java-exec/rse/RSEBase.java  |   71 ++
 .../prototype/exec/java-exec/rse/RSERegistry.java  |   85 ++
 .../prototype/exec/java-exec/rse/RecordReader.java |   28 +
 .../exec/java-exec/rse/RecordRecorder.java         |   32 +
 .../exec/java-exec/rse/ReferenceStorageEngine.java |   45 +
 .../org/apache/drill/exec/BufferAllocator.java     |   52 +
 .../apache/drill/exec/DirectBufferAllocator.java   |   47 +
 .../java/org/apache/drill/exec/ExecConstants.java  |   31 +
 .../apache/drill/exec/cache/DistributedCache.java  |   38 +
 .../org/apache/drill/exec/cache/HazelCache.java    |  133 +++
 .../drill/exec/cache/TemplatizedLogicalPlan.java   |   22 +
 .../drill/exec/cache/TemplatizedPhysicalPlan.java  |   22 +
 .../drill/exec/coord/ClusterCoordinator.java       |   47 +
 .../exec/coord/DrillServiceInstanceHelper.java     |   57 +
 .../drill/exec/coord/ZKClusterCoordinator.java     |  145 +++
 .../drill/exec/coord/ZKRegistrationHandle.java     |   32 +
 .../java/org/apache/drill/exec/disk/Spool.java     |   29 +
 .../exec/exception/DrillbitStartupException.java   |   46 +
 .../exec/exception/ExecutionSetupException.java    |   45 +
 .../exec/exception/SchemaChangeException.java      |   52 +
 .../drill/exec/exception/SetupException.java       |   46 +
 .../org/apache/drill/exec/ops/BatchIterator.java   |   32 +
 .../org/apache/drill/exec/ops/FragmentContext.java |   49 +
 .../org/apache/drill/exec/ops/OutputMutator.java   |   28 +
 .../org/apache/drill/exec/ops/QueryOutcome.java    |   22 +
 .../java/org/apache/drill/exec/ops/ScanBatch.java  |  157 +++
 .../exec/ops/exchange/PartitioningSender.java      |   23 +
 .../drill/exec/ops/exchange/RandomReceiver.java    |   24 +
 .../drill/exec/ops/exchange/RecordBatchSender.java |   24 +
 .../apache/drill/exec/opt/IdentityOptimizer.java   |   40 +
 .../org/apache/drill/exec/planner/ExecPlanner.java |   27 +
 .../org/apache/drill/exec/record/BatchSchema.java  |  123 +++
 .../java/org/apache/drill/exec/record/DeadBuf.java |  848 +++++++++++++++
 .../drill/exec/record/InvalidValueAccessor.java    |   46 +
 .../drill/exec/record/MaterializedField.java       |   79 ++
 .../org/apache/drill/exec/record/RecordBatch.java  |   85 ++
 .../org/apache/drill/exec/record/RecordMaker.java  |   22 +
 .../record/vector/AbstractFixedValueVector.java    |   60 +
 .../apache/drill/exec/record/vector/AnyVector.java |   30 +
 .../drill/exec/record/vector/BaseValueVector.java  |  104 ++
 .../apache/drill/exec/record/vector/BitUtil.java   |  108 ++
 .../apache/drill/exec/record/vector/BitVector.java |  118 ++
 .../apache/drill/exec/record/vector/BufBitSet.java |  847 ++++++++++++++
 .../drill/exec/record/vector/ByteVector.java       |   48 +
 .../drill/exec/record/vector/Int32Vector.java      |   52 +
 .../exec/record/vector/NullableValueVector.java    |   70 ++
 .../drill/exec/record/vector/ValueVector.java      |   81 ++
 .../drill/exec/record/vector/VariableVector.java   |   78 ++
 .../org/apache/drill/exec/rpc/BasicClient.java     |   81 ++
 .../org/apache/drill/exec/rpc/BasicServer.java     |  106 ++
 .../drill/exec/rpc/ChannelClosedException.java     |   39 +
 .../apache/drill/exec/rpc/CoordinationQueue.java   |   87 ++
 .../org/apache/drill/exec/rpc/DrillRpcFuture.java  |   92 ++
 .../apache/drill/exec/rpc/InboundRpcMessage.java   |   50 +
 .../apache/drill/exec/rpc/NamedThreadFactory.java  |   48 +
 .../apache/drill/exec/rpc/OutboundRpcMessage.java  |   50 +
 .../drill/exec/rpc/PositiveAtomicInteger.java      |   39 +
 .../apache/drill/exec/rpc/RemoteRpcException.java  |   38 +
 .../java/org/apache/drill/exec/rpc/Response.java   |   41 +
 .../java/org/apache/drill/exec/rpc/RpcBus.java     |  172 +++
 .../org/apache/drill/exec/rpc/RpcConstants.java    |   26 +
 .../java/org/apache/drill/exec/rpc/RpcDecoder.java |  142 +++
 .../java/org/apache/drill/exec/rpc/RpcEncoder.java |  127 +++
 .../org/apache/drill/exec/rpc/RpcException.java    |   45 +
 .../apache/drill/exec/rpc/RpcExceptionHandler.java |   52 +
 .../java/org/apache/drill/exec/rpc/RpcMessage.java |   45 +
 .../exec/rpc/ZeroCopyProtobufLengthDecoder.java    |   80 ++
 .../org/apache/drill/exec/rpc/bit/BitClient.java   |   62 ++
 .../java/org/apache/drill/exec/rpc/bit/BitCom.java |   69 ++
 .../apache/drill/exec/rpc/bit/BitComHandler.java   |  136 +++
 .../org/apache/drill/exec/rpc/bit/BitComImpl.java  |  142 +++
 .../org/apache/drill/exec/rpc/bit/BitServer.java   |   64 ++
 .../org/apache/drill/exec/rpc/bit/BitTunnel.java   |   63 ++
 .../org/apache/drill/exec/rpc/user/UserClient.java |   72 ++
 .../org/apache/drill/exec/rpc/user/UserServer.java |   90 ++
 .../org/apache/drill/exec/schema/BackedRecord.java |   44 +
 .../org/apache/drill/exec/schema/DataRecord.java   |   56 +
 .../org/apache/drill/exec/schema/DiffSchema.java   |   66 ++
 .../java/org/apache/drill/exec/schema/Field.java   |  135 +++
 .../org/apache/drill/exec/schema/IdGenerator.java  |   13 +
 .../org/apache/drill/exec/schema/ListSchema.java   |  108 ++
 .../org/apache/drill/exec/schema/NamedField.java   |   44 +
 .../org/apache/drill/exec/schema/ObjectSchema.java |   91 ++
 .../org/apache/drill/exec/schema/OrderedField.java |   33 +
 .../java/org/apache/drill/exec/schema/Record.java  |   29 +
 .../org/apache/drill/exec/schema/RecordSchema.java |   29 +
 .../drill/exec/schema/SchemaIdGenerator.java       |   36 +
 .../apache/drill/exec/schema/SchemaRecorder.java   |  122 +++
 .../exec/schema/json/jackson/JacksonHelper.java    |   63 ++
 .../exec/schema/json/jackson/PhysicalOperator.java |   36 +
 .../json/jackson/PhysicalOperatorIterator.java     |   31 +
 .../drill/exec/schema/json/jackson/ScanJson.java   |  203 ++++
 .../transform/ProtobufSchemaTransformer.java       |  109 ++
 .../exec/schema/transform/SchemaTransformer.java   |   30 +
 .../org/apache/drill/exec/server/Drillbit.java     |  116 ++
 .../apache/drill/exec/server/DrillbitContext.java  |   65 ++
 .../apache/drill/exec/server/StartupOptions.java   |   66 ++
 .../apache/drill/exec/service/ServiceEngine.java   |   73 ++
 .../drill/exec/store/QueryOptimizerRule.java       |   21 +
 .../org/apache/drill/exec/store/RecordReader.java  |   49 +
 .../apache/drill/exec/store/RecordRecorder.java    |   36 +
 .../org/apache/drill/exec/store/StorageEngine.java |   92 ++
 .../drill/exec/store/StorageEngineRegistry.java    |   82 ++
 .../java-exec/src/main/protobuf/Coordination.proto |   32 +
 .../src/main/protobuf/ExecutionProtos.proto        |   65 ++
 .../java-exec/src/main/protobuf/GeneralRPC.proto   |   35 +
 .../java-exec/src/main/protobuf/SchemaDef.proto    |   37 +
 .../exec/java-exec/src/main/protobuf/User.proto    |   93 ++
 .../java-exec/src/main/resources/drill-module.conf |   28 +
 .../java-exec/src/test/java/BBOutputStream.java    |   38 +
 .../src/test/java/CompressingBytesColumn.java      |   46 +
 .../exec/java-exec/src/test/java/ExternalSort.java |   21 +
 .../src/test/java/GenerateExternalSortData.java    |  124 +++
 .../drill/exec/record/column/SimpleExec.java       |   30 +
 .../drill/exec/record/vector/TestOpenBitSet.java   |  361 ++++++
 .../apache/drill/exec/rpc/user/UserRpcTest.java    |  107 ++
 .../apache/drill/exec/server/StartDrillbit.java    |   31 +
 .../exec/java-exec/src/test/resources/logback.xml  |   45 +
 sandbox/prototype/exec/ref/pom.xml                 |    7 +-
 141 files changed, 10854 insertions(+), 144 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/DrillConfig.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
index a775867..b738002 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
@@ -23,6 +23,7 @@ import java.util.List;
 import java.util.Queue;
 import java.util.concurrent.CopyOnWriteArrayList;
 
+import org.apache.drill.common.exceptions.DrillConfigurationException;
 import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.common.logical.StorageEngineConfigBase;
 import org.apache.drill.common.logical.data.LogicalOperatorBase;
@@ -59,6 +60,14 @@ public final class DrillConfig extends NestedConfig{
   };
   
   /**
+   * Create a DrillConfig object using the default config file name 
+   * @return The new DrillConfig object.
+   */
+  public static DrillConfig create() {
+    return create(null);
+  }
+  
+  /**
    * <p>
    * DrillConfig loads up Drill configuration information. It does this utilizing a combination of classpath scanning
    * and Configuration fallbacks provided by the TypeSafe configuration library. The order of precedence is as
@@ -68,16 +77,20 @@ public final class DrillConfig extends NestedConfig{
    * Configuration values are retrieved as follows:
    * <ul>
    * <li>Check a single copy of "drill-override.conf". If multiple copies are on the classpath, behavior is
-   * indeterminate.</li>
+   * indeterminate.  If a non-null value for overrideFileName is provided, this is utilized instead of drill-override.conf.</li>
    * <li>Check all copies of "drill-module.conf". Loading order is indeterminate.</li>
    * <li>Check a single copy of "drill-default.conf". If multiple copies are on the classpath, behavior is
    * indeterminate.</li>
    * </ul>
    * 
    * </p>
-   * * @return A merged Config object.
+   *  @param overrideFileName The name of the file to use for override purposes.
+   *  @return A merged Config object.
    */
-  public static DrillConfig create() {
+  public static DrillConfig create(String overrideFileName) {
+    
+    overrideFileName = overrideFileName == null ? CommonConstants.CONFIG_OVERRIDE : overrideFileName;
+    
     // first we load defaults.
     Config fallback = ConfigFactory.load(CommonConstants.CONFIG_DEFAULT);
     Collection<URL> urls = PathScanner.getConfigURLs();
@@ -86,10 +99,40 @@ public final class DrillConfig extends NestedConfig{
       fallback = ConfigFactory.parseURL(url).withFallback(fallback);
     }
 
-    Config c = ConfigFactory.load(CommonConstants.CONFIG_OVERRIDE).withFallback(fallback).resolve();
+    Config c = ConfigFactory.load(overrideFileName).withFallback(fallback).resolve();
     return new DrillConfig(c);
   }
   
+  public <T> Class<T> getClassAt(String location, Class<T> clazz) throws DrillConfigurationException{
+    String className = this.getString(location);
+    if(className == null) throw new DrillConfigurationException(String.format("No class defined at location '%s'.  Expected a definition of the class []", location, clazz.getCanonicalName()));
+    try{
+      Class<?> c = Class.forName(className);
+      if(clazz.isAssignableFrom(c)){
+        @SuppressWarnings("unchecked") Class<T> t = (Class<T>) c;
+        return t;
+      }else{
+        throw new DrillConfigurationException(String.format("The class [%s] listed at location '%s' should be of type [%s].  It isn't.", className, location, clazz.getCanonicalName()));
+      }
+    }catch(Exception ex){
+      if(ex instanceof DrillConfigurationException) throw (DrillConfigurationException) ex;
+      throw new DrillConfigurationException(String.format("Failure while initializing class [%s] described at configuration value '%s'.", className, location), ex);
+    }
+    
+  }
+  
+  public <T> T getInstanceOf(String location, Class<T> clazz) throws DrillConfigurationException{
+    Class<T> c = getClassAt(location, clazz);
+    try{
+      T t = c.newInstance();
+      return t;
+    }catch(Exception ex){
+      throw new DrillConfigurationException(String.format("Failure while instantiating class [%s] located at '%s.", clazz.getCanonicalName(), location), ex);
+    }
+  }
+  
+
+  
   public void setSinkQueues(int number, Queue<Object> queue){
     sinkQueues.set(number, queue);
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/main/java/org/apache/drill/common/exceptions/DrillConfigurationException.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/exceptions/DrillConfigurationException.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/exceptions/DrillConfigurationException.java
new file mode 100644
index 0000000..aa83758
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/exceptions/DrillConfigurationException.java
@@ -0,0 +1,43 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.exceptions;
+
+public class DrillConfigurationException extends DrillException {
+  public DrillConfigurationException() {
+    super();
+  }
+
+  public DrillConfigurationException(String message, Throwable cause, boolean enableSuppression,
+      boolean writableStackTrace) {
+    super(message, cause, enableSuppression, writableStackTrace);
+  }
+
+  public DrillConfigurationException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public DrillConfigurationException(String message) {
+    super(message);
+  }
+
+  public DrillConfigurationException(Throwable cause) {
+    super(cause);
+  }
+
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillConfigurationException.class);
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/main/java/org/apache/drill/common/exceptions/DrillIOException.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/exceptions/DrillIOException.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/exceptions/DrillIOException.java
new file mode 100644
index 0000000..cd7d4ab
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/exceptions/DrillIOException.java
@@ -0,0 +1,42 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.exceptions;
+
+import java.io.IOException;
+
+public class DrillIOException extends IOException{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillIOException.class);
+
+  public DrillIOException() {
+    super();
+  }
+
+  public DrillIOException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public DrillIOException(String message) {
+    super(message);
+  }
+
+  public DrillIOException(Throwable cause) {
+    super(cause);
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/DataType.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/DataType.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/DataType.java
index 776a9e8..60d26dc 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/DataType.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/expression/types/DataType.java
@@ -54,6 +54,8 @@ public abstract class DataType {
   public static final DataType LATEBIND = new LateBindType();
   public static final DataType BOOLEAN = new AtomType("BOOLEAN", Comparability.EQUAL, false);
   public static final DataType BYTES = new AtomType("BYTES", Comparability.ORDERED, false);
+  public static final DataType SIGNED_BYTE = new AtomType("SIGNED_BYTE", Comparability.ORDERED, true);
+  public static final DataType SIGNED_INT16 = new AtomType("SIGNED_INT16", Comparability.ORDERED, true);
   public static final DataType NVARCHAR = new AtomType("VARCHAR", Comparability.ORDERED, false);
   public static final DataType FLOAT32 = new AtomType("FLOAT32", Comparability.ORDERED, true);
   public static final DataType FLOAT64 = new AtomType("FLOAT64", Comparability.ORDERED, true);

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/main/java/org/apache/drill/common/optimize/Optimizer.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/optimize/Optimizer.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/optimize/Optimizer.java
new file mode 100644
index 0000000..4b2037c
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/optimize/Optimizer.java
@@ -0,0 +1,45 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.optimize;
+
+import java.io.Closeable;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.exceptions.DrillConfigurationException;
+import org.apache.drill.common.logical.LogicalPlan;
+import org.apache.drill.common.physical.PhysicalPlan;
+
+public abstract class Optimizer implements Closeable{
+  
+  public static String OPTIMIZER_IMPL_KEY = "drill.exec.optimizer.implementation";
+  
+  public abstract void init(DrillConfig config);
+  
+  public abstract PhysicalPlan optimize(OptimizationContext context, LogicalPlan plan);
+  public abstract void close();
+  
+  public static Optimizer getOptimizer(DrillConfig config) throws DrillConfigurationException{
+    Optimizer o = config.getInstanceOf(OPTIMIZER_IMPL_KEY, Optimizer.class);
+    o.init(config);
+    return o;
+  }
+  
+  public interface OptimizationContext{
+    public int getPriority();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/DataValidationMode.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/DataValidationMode.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/DataValidationMode.java
new file mode 100644
index 0000000..6de2cfd
--- /dev/null
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/DataValidationMode.java
@@ -0,0 +1,24 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical;
+
+public enum DataValidationMode {
+  TERMINATE, // terminate the query if the data doesn't match expected.
+  DROP_RECORD, // drop the record that doesn't match the expected situation.
+  SINK_RECORD // record the failed record along with the rule violation in a secondary location.
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/FieldSet.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/FieldSet.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/FieldSet.java
index 05fc49d..c76098d 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/FieldSet.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/FieldSet.java
@@ -18,7 +18,6 @@
 package org.apache.drill.common.physical;
 
 import java.io.IOException;
-import java.util.HashSet;
 import java.util.List;
 
 import org.apache.drill.common.physical.FieldSet.De;
@@ -42,19 +41,10 @@ import com.google.common.collect.Lists;
 public class FieldSet {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FieldSet.class);
   
-  private List<RecordField> incoming = Lists.newArrayList();
-  private List<RecordField> outgoing = Lists.newArrayList();
+  private List<RecordField> fields;
   
   public FieldSet(Iterable<RecordField> fields){
-    for(RecordField f : fields){
-      if(f.getRoute().isIn()){
-        incoming.add(f);
-      }
-      
-      if(f.getRoute().isOut()){
-        outgoing.add(f);
-      }
-    }
+    this.fields = Lists.newArrayList(fields);
   }
   
 
@@ -83,14 +73,7 @@ public class FieldSet {
     @Override
     public void serialize(FieldSet value, JsonGenerator jgen, SerializerProvider provider) throws IOException,
         JsonGenerationException {
-      HashSet<RecordField> fields = new HashSet<RecordField>();
-      for(RecordField f: value.incoming){
-        fields.add(f);
-      }
-      for(RecordField f: value.outgoing){
-        fields.add(f);
-      }
-      jgen.writeObject(Lists.newArrayList(fields));
+      jgen.writeObject(value.fields);
     }
 
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/RecordField.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/RecordField.java b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/RecordField.java
index 821f286..2867084 100644
--- a/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/RecordField.java
+++ b/sandbox/prototype/common/src/main/java/org/apache/drill/common/physical/RecordField.java
@@ -28,14 +28,14 @@ public class RecordField {
   
   private String name;
   private DataType type;
-  private Route route;
+  private ValueMode mode;
   
   @JsonCreator
-  public RecordField(@JsonProperty("name") String name, @JsonProperty("type") DataType type, @JsonProperty("route") Route route) {
+  public RecordField(@JsonProperty("name") String name, @JsonProperty("type") DataType type, @JsonProperty("mode") ValueMode mode) {
     super();
     this.name = name;
     this.type = type;
-    this.route = route;
+    this.mode = mode;
   }
 
   public String getName() {
@@ -46,34 +46,21 @@ public class RecordField {
     return type;
   }
 
-  public Route getRoute() {
-    return route;
+  public ValueMode getMode() {
+    return mode;
   }
   
+  public static enum ValueMode {
+    VECTOR,
+    DICT,
+    RLE
+  }
   
+  public static enum ValueType {
+    OPTIONAL,
+    REQUIRED, 
+    REPEATED
+  }
   
-  public static enum Route {
-    IN(true, false), 
-    OUT(false, true), 
-    THROUGH(true, true), 
-    OPAQUE(true, true);
-    
-    final boolean in;
-    final boolean out;
-    
-    Route(boolean in, boolean out){
-      this.in = in;
-      this.out = out;
-    }
-
-    public boolean isIn() {
-      return in;
-    }
-
-    public boolean isOut() {
-      return out;
-    }
-    
-  }  
   
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/test/java/org/apache/drill/common/physical/ParsePlan.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/java/org/apache/drill/common/physical/ParsePlan.java b/sandbox/prototype/common/src/test/java/org/apache/drill/common/physical/ParsePlan.java
new file mode 100644
index 0000000..e3e43aa
--- /dev/null
+++ b/sandbox/prototype/common/src/test/java/org/apache/drill/common/physical/ParsePlan.java
@@ -0,0 +1,36 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.common.physical;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.util.FileUtils;
+import org.junit.Test;
+
+import com.google.common.base.Charsets;
+import com.google.common.io.Files;
+
+public class ParsePlan {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ParsePlan.class);
+  
+  
+  @Test public void parseSimplePlan() throws Exception{
+    DrillConfig c = DrillConfig.create();
+    PhysicalPlan plan = PhysicalPlan.parse(c, Files.toString(FileUtils.getResourceAsFile("/dsort_physical.json"), Charsets.UTF_8));
+    System.out.println(plan.unparse(c));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/test/resources/basic_physical.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/resources/basic_physical.json b/sandbox/prototype/common/src/test/resources/basic_physical.json
new file mode 100644
index 0000000..4d1d329
--- /dev/null
+++ b/sandbox/prototype/common/src/test/resources/basic_physical.json
@@ -0,0 +1,42 @@
+{
+    head:{
+        type:"APACHE_DRILL_PHYSICAL",
+        version:"1",
+        generator:{
+            type:"manual"
+        }
+    },
+    storage:{
+        fs1:{
+            type:"mock"
+        }
+    },
+    graph:[
+        {
+            @id:1,
+            pop:"scan",
+            storageengine:"fs1",
+            entries:[
+            	{"test1"}
+           	],
+            output:[
+                { "name":"key", mode: "VECTOR", type:"SINT32"},
+                { "name":"value", mode: "VECTOR", type:"SINT32"}
+            ]
+        },
+        {
+            @id:2,
+            child:1,
+            pop: "store",
+            mode: "SYSTEM_CHOICE",
+            storageengine: "fs1",
+            entries:[
+                {
+                    path:"/sort/sorted/${partition_number}.seq",
+                    key:"Text",
+                    type:"JAVA_SEQUENCE"
+                }
+            ] 
+        }           
+    ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/test/resources/dsort-physical.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/resources/dsort-physical.json b/sandbox/prototype/common/src/test/resources/dsort-physical.json
deleted file mode 100644
index 3c57a0a..0000000
--- a/sandbox/prototype/common/src/test/resources/dsort-physical.json
+++ /dev/null
@@ -1,76 +0,0 @@
-{
-    head:{
-        type:"APACHE_DRILL_PHYSICAL",
-        version:"1",
-        generator:{
-            type:"manual"
-        }
-    },
-    storage:{
-        fs1:{
-            type:"mock"
-        }
-    },
-    graph:[
-        {
-            @id:1,
-            pop:"scan",
-            storageengine:"fs1",
-            entries:[{}],
-            fields:[
-                { "name":"key", route: "OUT", type:"LATE"},
-                { "name":"value", route: "OUT", type:"LATE"}
-            ]
-        },
-        {
-            @id:2,
-            child: 1,
-            pop:"quicknwaysort",
-            orderings:[
-                {
-                    order: "DESC",
-                    expr: "data.key"
-                }
-            ],
-            fields:[
-                { "name":"key", route: "THROUGH", type:"LATE"},
-                { "name":"value", route: "OPAQUE", type:"LATE"}
-            ]
-
-        },
-        {
-            @id:3,
-            child: 2,
-            pop:"exchange",
-            partition:{
-                mode:"RANGE",
-                exprs:["key"]
-            },
-            stitch:{
-                mode:"RANDOM"
-            },
-            fields:[
-                { "name":"key", route: "THROUGH", type:"LATE"},
-                { "name":"value", route: "OPAQUE", type:"LATE"}
-            ]
-        },
-        {
-            @id:4,
-            child:3,
-            pop: "store",
-            mode: "SYSTEM_CHOICE",
-            storageengine: "fs1",
-            entries:[
-                {
-                    path:"/sort/sorted/${partition_number}.seq",
-                    key:"Text",
-                    type:"JAVA_SEQUENCE"
-                }
-            ],
-            fields:[
-                { "name":"key", route: "IN", type:"LATE"},
-                { "name":"value", route: "IN", type:"LATE"}
-            ] 
-        }           
-    ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/test/resources/dsort_logical.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/resources/dsort_logical.json b/sandbox/prototype/common/src/test/resources/dsort_logical.json
new file mode 100644
index 0000000..83d30e8
--- /dev/null
+++ b/sandbox/prototype/common/src/test/resources/dsort_logical.json
@@ -0,0 +1,40 @@
+{
+  head:{ type:"apache_drill_logical_plan", version:"1", generator:{ type:"manual", info:"na"}}},
+  storage:[ { type:"fs", name:"fs1", root:"file:///" }],
+  query:[ { op: "sequence", sequence: [
+    {
+      op:"scan",
+      storageengine:"fs1",
+      ref: "data",
+      selection: {
+        path: "/sort/unsorted/*.seq",
+        type: "JAVA_SEQUENCE"
+      }
+    },
+    {
+      op: "order",
+      orderings: [
+        {order: "desc", expr: "data.key" }
+      ]
+    }, 
+    {
+      op: "project",
+      projections: [
+        { ref: "output.key", expr: "data.key" },
+        { ref: "output.value", expr: "data.value" }
+      ]
+    },
+	{
+      op: "store",
+      storageengine: "fs1",
+      target: {
+        path: "/sort/sorted/${partition}.seq",
+        type: "JAVA_SEQUENCE",
+        partition: {
+          type: "ORDERED",
+          exprs: ["key"]
+        }
+      }
+    }
+  ]}]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/test/resources/dsort_physical.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/resources/dsort_physical.json b/sandbox/prototype/common/src/test/resources/dsort_physical.json
new file mode 100644
index 0000000..7c31df2
--- /dev/null
+++ b/sandbox/prototype/common/src/test/resources/dsort_physical.json
@@ -0,0 +1,72 @@
+{
+    head:{
+        type:"APACHE_DRILL_PHYSICAL",
+        version:"1",
+        generator:{
+            type:"manual"
+        }
+    },
+    storage:{
+        fs1:{
+            type:"mock"
+        }
+    },
+    graph:[
+        {
+            @id:1,
+            pop:"scan",
+            storageengine:"fs1",
+            entries:[{}],
+            output:[
+                { "name":"key", mode: "VECTOR", type:"LATE"},
+                { "name":"value", mode: "VECTOR", type:"LATE"}
+            ]
+        },
+        {
+            @id:2,
+            child: 1,
+            pop:"quicknwaysort",
+            orderings:[
+                {
+                    order: "DESC",
+                    expr: "data.key"
+                }
+            ],
+            output:[
+                { "name":"key", mode: "VECTOR", type:"LATE"},
+                { "name":"value", mode: "VECTOR", type:"LATE"}
+            ]
+
+        },
+        {
+            @id:3,
+            child: 2,
+            pop:"exchange",
+            partition:{
+                mode:"RANGE",
+                exprs:["key"]
+            },
+            stitch:{
+                mode:"RANDOM"
+            },
+            fields:[
+                { "name":"key" mode: "VECTOR", type:"LATE"},
+                { "name":"value" mode: "VECTOR", type:"LATE"}
+            ]
+        },
+        {
+            @id:4,
+            child:3,
+            pop: "store",
+            mode: "SYSTEM_CHOICE",
+            storageengine: "fs1",
+            entries:[
+                {
+                    path:"/sort/sorted/${partition_number}.seq",
+                    key:"Text",
+                    type:"JAVA_SEQUENCE"
+                }
+            ] 
+        }           
+    ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/common/src/test/resources/simple_plan.json
----------------------------------------------------------------------
diff --git a/sandbox/prototype/common/src/test/resources/simple_plan.json b/sandbox/prototype/common/src/test/resources/simple_plan.json
new file mode 100644
index 0000000..2457b1f
--- /dev/null
+++ b/sandbox/prototype/common/src/test/resources/simple_plan.json
@@ -0,0 +1,133 @@
+{
+  head:{
+    type:"apache_drill_logical_plan",
+    version:"1",
+    generator:{
+      type:"manual",
+      info:"na"
+    }
+  },
+  storage:{
+    logs: {
+      type:"text",
+	  file: "local://logs/*.log",
+	  compress:"gzip",
+	  line-delimiter:"\n",
+	  record-maker:{
+	    type:"first-row",
+	    delimiter:","
+	  }
+    },
+    {
+      type:"mongo",
+      name:"users",
+      connection:"mongodb://blue:red@localhost/users"
+    },
+    {
+      type:"mysql",
+      name:"mysql",
+      connection:"jdbc:mysql://localhost/main"
+    }
+  ],
+  query:[
+    {
+      @id:"1",
+      op:"scan",
+      memo:"initial_scan",
+      storageengine:"local-logs",
+      selection: {}
+    },
+    {
+      @id:"2",
+      input:"1",
+      memo:"transform1",
+      op:"transform",
+      transforms:[
+        {
+          ref:"userId",
+          expr:"regex_like('activity.cookie', \"persistent=([^;]*)\")"
+        },
+        {
+          ref:"session",
+          expr:"regex_like('activity.cookie', \"session=([^;]*)\")"
+        }
+      ]
+    },
+    {
+      @id:"3",
+      input:"2",
+      memo:"transform2",
+      op:"transform",
+      transforms:[
+        {
+          ref:"userId",
+          expr:"regex_like('activity.cookie', \"persistent=([^;]*)\")"
+        },
+        {
+          ref:"session",
+          expr:"regex_like('activity.cookie', \"session=([^;]*)\")"
+        }
+      ]
+    },
+    {
+      @id:"7",
+      input:"3",
+      op:"sequence",
+      do:[
+        {
+          op:"transform",
+          memo:"seq_transform",
+          transforms:[
+            {
+              ref:"happy",
+              expr:"regex_like('ep2', \"dink\")"
+            }
+          ]
+        }
+        ,
+        {
+          op:"transform",
+          memo:"last_transform",
+          transforms:[
+            {
+              ref:"abc",
+              expr:"123"
+            }
+          ]
+        }
+      ]
+    },
+    {
+      @id:"10",
+      input:"3",
+      op:"transform",
+      memo:"t3",
+      transforms:[
+        {
+          ref:"happy",
+          expr:"regex_like('ep2', \"dink\")"
+        }
+      ]
+    },
+    {
+      @id:12,
+      op:"join",
+      type: "inner",
+      left:"7",
+      right:"10",
+      conditions: [{relationship:"==", left: "1", right: "1" }]
+    }
+    ,
+    {
+      input: 12,
+      op: "store",
+      memo: "output sink",
+      target: {
+        file: "console:///stdout"
+      }
+      
+    }
+
+    
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/pom.xml
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/pom.xml b/sandbox/prototype/exec/java-exec/pom.xml
index a458160..663bab4 100644
--- a/sandbox/prototype/exec/java-exec/pom.xml
+++ b/sandbox/prototype/exec/java-exec/pom.xml
@@ -1,15 +1,154 @@
 <?xml version="1.0"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-  <modelVersion>4.0.0</modelVersion>
-  <parent>
-    <artifactId>exec-parent</artifactId>
-    <groupId>org.apache.drill.exec</groupId>
-    <version>1.0-SNAPSHOT</version>
-  </parent>
-  <artifactId>java-exec</artifactId>
-  <name>java-exec</name>
-
-  <dependencies>
-  </dependencies>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<artifactId>exec-parent</artifactId>
+		<groupId>org.apache.drill.exec</groupId>
+		<version>1.0-SNAPSHOT</version>
+	</parent>
+	<artifactId>java-exec</artifactId>
+	<name>java-exec</name>
+
+	<properties>
+		<target.gen.source.path>${project.basedir}/target/generated-sources</target.gen.source.path>
+		<proto.cas.path>${project.basedir}/src/main/protobuf/</proto.cas.path>
+	</properties>
+
+	<dependencies>
+		<dependency>
+			<groupId>com.twitter</groupId>
+			<artifactId>parquet-column</artifactId>
+			<version>1.0.0-SNAPSHOT</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.drill</groupId>
+			<artifactId>common</artifactId>
+			<version>1.0-SNAPSHOT</version>
+		</dependency>
+		<dependency>
+			<groupId>com.beust</groupId>
+			<artifactId>jcommander</artifactId>
+			<version>1.30</version>
+		</dependency>
+		<dependency>
+			<groupId>com.netflix.curator</groupId>
+			<artifactId>curator-x-discovery</artifactId>
+			<version>1.1.9</version>
+			<exclusions>
+				<!-- <exclusion> -->
+				<!-- <artifactId>netty</artifactId> -->
+				<!-- <groupId>org.jboss.netty</groupId> -->
+				<!-- </exclusion> -->
+				<exclusion>
+					<artifactId>slf4j-log4j12</artifactId>
+					<groupId>org.slf4j</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>log4j</artifactId>
+					<groupId>log4j</groupId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.xerial.snappy</groupId>
+			<artifactId>snappy-java</artifactId>
+			<version>1.0.5-M3</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-core</artifactId>
+			<version>1.1.0</version>
+			<exclusions>
+				<exclusion>
+					<artifactId>jets3t</artifactId>
+					<groupId>net.java.dev.jets3t</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>commons-logging</artifactId>
+					<groupId>commons-logging</groupId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>com.carrotsearch</groupId>
+			<artifactId>hppc</artifactId>
+			<version>0.4.2</version>
+		</dependency>
+		<dependency>
+			<groupId>io.netty</groupId>
+			<artifactId>netty-all</artifactId>
+			<version>4.0.0.CR1</version>
+		</dependency>
+		<dependency>
+			<groupId>com.google.protobuf</groupId>
+			<artifactId>protobuf-java</artifactId>
+			<version>2.5.0</version>
+		</dependency>
+		<dependency>
+			<groupId>com.hazelcast</groupId>
+			<artifactId>hazelcast</artifactId>
+			<version>2.5</version>
+		</dependency>		
+	</dependencies>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.codehaus.mojo</groupId>
+				<artifactId>native-maven-plugin</artifactId>
+				<version>1.0-alpha-7</version>
+				<configuration>
+					<javahClassNames>
+						<javahClassName>org.apache.drill.exec.mem.ByteBufferAllocator</javahClassName>
+					</javahClassNames>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-antrun-plugin</artifactId>
+				<executions>
+					<execution>
+						<id>generate-sources</id>
+						<phase>generate-sources</phase>
+						<configuration>
+							<tasks>
+								<mkdir dir="${target.gen.source.path}" />
+								<path id="proto.path.files">
+									<fileset dir="${proto.cas.path}">
+										<include name="*.proto" />
+									</fileset>
+								</path>
+								<pathconvert pathsep=" " property="proto.files"
+									refid="proto.path.files" />
+
+								<exec executable="protoc">
+									<arg value="--java_out=${target.gen.source.path}" />
+									<arg value="--proto_path=${proto.cas.path}" />
+									<arg line="${proto.files}" />
+								</exec>
+							</tasks>
+							<sourceRoot>${target.gen.source.path}</sourceRoot>
+						</configuration>
+						<goals>
+							<goal>run</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+			<!-- <plugin> -->
+			<!-- <groupId>com.github.igor-petruk.protobuf</groupId> -->
+			<!-- <artifactId>protobuf-maven-plugin</artifactId> -->
+			<!-- <version>0.6.2</version> -->
+			<!-- <executions> -->
+			<!-- <execution> -->
+			<!-- <goals> -->
+			<!-- <goal>run</goal> -->
+			<!-- </goals> -->
+			<!-- </execution> -->
+			<!-- </executions> -->
+			<!-- </plugin> -->
+		</plugins>
+	</build>
+
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/rse/ClasspathRSE.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/rse/ClasspathRSE.java b/sandbox/prototype/exec/java-exec/rse/ClasspathRSE.java
new file mode 100644
index 0000000..aa8186d
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/rse/ClasspathRSE.java
@@ -0,0 +1,88 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ref.rse;
+
+import java.io.IOException;
+import java.net.URL;
+import java.util.Collection;
+import java.util.Collections;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.logical.StorageEngineConfigBase;
+import org.apache.drill.common.logical.data.Scan;
+import org.apache.drill.exec.ref.exceptions.SetupException;
+import org.apache.drill.exec.ref.rops.DataWriter.ConverterType;
+import org.apache.drill.exec.ref.rops.ROP;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+
+public class ClasspathRSE extends RSEBase {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ClasspathRSE.class);
+
+  private DrillConfig dConfig;
+  private SchemaPath rootPath;
+  
+  public ClasspathRSE(ClasspathRSEConfig engineConfig, DrillConfig dConfig) throws SetupException{
+    this.dConfig = dConfig;
+  }
+
+  
+  @JsonTypeName("classpath")
+  public static class ClasspathRSEConfig extends StorageEngineConfigBase {
+  }
+  
+  public static class ClasspathInputConfig implements ReadEntry{
+    public String path;
+    public ConverterType type;
+    @JsonIgnore public SchemaPath rootPath; 
+  }
+
+  public boolean supportsRead() {
+    return true;
+  }
+
+  @Override
+  public Collection<ReadEntry> getReadEntries(Scan scan) throws IOException {
+    ClasspathInputConfig c = scan.getSelection().getWith(dConfig, ClasspathInputConfig.class);
+    c.rootPath = scan.getOutputReference();
+    return Collections.singleton((ReadEntry) c);
+  }
+
+  @Override
+  public RecordReader getReader(ReadEntry readEntry, ROP parentROP) throws IOException {
+    ClasspathInputConfig e = getReadEntry(ClasspathInputConfig.class, readEntry);
+    URL u = RecordReader.class.getResource(e.path);
+    if(u == null){
+      throw new IOException(String.format("Failure finding classpath resource %s.", e.path));
+    }
+    switch(e.type){
+    case JSON:
+      return new JSONRecordReader(e.rootPath, dConfig, u.openStream(), parentROP);
+    default:
+      throw new UnsupportedOperationException();
+    }
+  }
+  
+  
+
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/rse/ConsoleRSE.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/rse/ConsoleRSE.java b/sandbox/prototype/exec/java-exec/rse/ConsoleRSE.java
new file mode 100644
index 0000000..1570ea9
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/rse/ConsoleRSE.java
@@ -0,0 +1,60 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ref.rse;
+
+import java.io.OutputStream;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.logical.StorageEngineConfigBase;
+import org.apache.drill.common.logical.data.Store;
+import org.apache.drill.exec.ref.rops.DataWriter.ConverterType;
+
+import com.fasterxml.jackson.annotation.JsonTypeName;
+
+public class ConsoleRSE extends RSEBase {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ConsoleRSE.class);
+  
+  private final DrillConfig dConfig;
+  
+  public static enum Pipe {
+    STD_OUT, STD_ERR
+  };
+
+  public ConsoleRSE(ConsoleRSEConfig engineConfig, DrillConfig dConfig){
+    this.dConfig = dConfig;
+  }
+  
+  public static class ConsoleOutputConfig {
+    public Pipe pipe = Pipe.STD_OUT;
+    public ConverterType type = ConverterType.JSON;
+  }
+  
+  @JsonTypeName("console") public static class ConsoleRSEConfig extends StorageEngineConfigBase {}
+  
+  public boolean supportsWrite() {
+    return true;
+  }
+
+  @Override
+  public RecordRecorder getWriter(Store store) {
+    ConsoleOutputConfig config = store.getTarget().getWith(dConfig, ConsoleOutputConfig.class);
+    OutputStream out = config.pipe == Pipe.STD_OUT ? System.out : System.err;
+    return new OutputStreamWriter(out, config.type, false);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/rse/FileSystemRSE.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/rse/FileSystemRSE.java b/sandbox/prototype/exec/java-exec/rse/FileSystemRSE.java
new file mode 100644
index 0000000..522191b
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/rse/FileSystemRSE.java
@@ -0,0 +1,144 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ref.rse;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.logical.StorageEngineConfigBase;
+import org.apache.drill.common.logical.data.Scan;
+import org.apache.drill.common.logical.data.Store;
+import org.apache.drill.exec.ref.exceptions.SetupException;
+import org.apache.drill.exec.ref.rops.DataWriter.ConverterType;
+import org.apache.drill.exec.ref.rops.ROP;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+
+public class FileSystemRSE extends RSEBase {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FileSystemRSE.class);
+
+  private FileSystem fs;
+  private Path basePath;
+  private DrillConfig dConfig;
+
+  public FileSystemRSE(FileSystemRSEConfig engineConfig, DrillConfig dConfig) throws SetupException{
+    this.dConfig = dConfig;
+    
+    try {
+      URI u = new URI(engineConfig.root);
+      String path = u.getPath();
+      
+      if(path.charAt(path.length()-1) != '/') throw new SetupException(String.format("The file root provided of %s included a file '%s'.  This must be a base path.", engineConfig.root, u.getPath()));
+      fs = FileSystem.get(u, new Configuration());
+      basePath = new Path(u.getPath());
+    } catch (URISyntaxException | IOException e) {
+      throw new SetupException("Failure while reading setting up file system root path.", e);
+    }
+  }
+
+  
+  @JsonTypeName("fs")
+  public static class FileSystemRSEConfig extends StorageEngineConfigBase {
+    private String root;
+
+    @JsonCreator
+    public FileSystemRSEConfig(@JsonProperty("root") String root) {
+      this.root = root;
+    }
+  }
+  
+  public static class FileSystemInputConfig {
+    public FileSpec[] files;
+  }
+  
+  public static class FileSpec{
+    public String path;
+    public ConverterType type;
+  }
+  
+  
+  public class FSEntry implements ReadEntry{
+    Path path;
+    ConverterType type;
+    SchemaPath rootPath;
+
+    public FSEntry(FileSpec spec, SchemaPath rootPath){
+      this.path = new Path(basePath, spec.path);
+      this.type = spec.type;
+      this.rootPath = rootPath;
+    }
+        
+  }
+
+  public class FileSystemOutputConfig {
+    public String file;
+    public ConverterType type;
+  }
+
+  public boolean supportsRead() {
+    return true;
+  }
+  
+  public boolean supportsWrite() {
+    return true;
+  }
+
+  @Override
+  public RecordRecorder getWriter(Store store) throws IOException {
+    FileSystemOutputConfig config = store.getTarget().getWith(dConfig, FileSystemOutputConfig.class);
+    OutputStream out = fs.create(new Path(basePath, config.file));
+    return new OutputStreamWriter(out, config.type, true);
+  }
+
+  @Override
+  public Collection<ReadEntry> getReadEntries(Scan scan) throws IOException {
+    Set<ReadEntry> s = new HashSet<ReadEntry>();
+    for(FileSpec f : scan.getSelection().getWith(dConfig, FileSystemInputConfig.class).files){
+      s.add(new FSEntry(f, scan.getOutputReference()));
+    }
+    return s;
+  }
+
+  @Override
+  public RecordReader getReader(ReadEntry readEntry, ROP parentROP) throws IOException {
+    FSEntry e = getReadEntry(FSEntry.class, readEntry);
+    
+    switch(e.type){
+    case JSON:
+      return new JSONRecordReader(e.rootPath, dConfig, fs.open(e.path), parentROP);
+    default:
+      throw new UnsupportedOperationException();
+    }
+  }
+  
+  
+
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/rse/JSONDataWriter.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/rse/JSONDataWriter.java b/sandbox/prototype/exec/java-exec/rse/JSONDataWriter.java
new file mode 100644
index 0000000..24434d5
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/rse/JSONDataWriter.java
@@ -0,0 +1,142 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ref.rse;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.drill.exec.ref.rops.DataWriter;
+
+import com.fasterxml.jackson.core.JsonEncoding;
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonGenerator;
+
+public class JSONDataWriter implements DataWriter{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JSONDataWriter.class);
+  
+  private final JsonGenerator g;
+//  private CharSequence transientName;
+  
+  public JSONDataWriter(OutputStream out) throws IOException{
+    JsonFactory f = new JsonFactory();
+    
+    this.g = f.createJsonGenerator(out, JsonEncoding.UTF8);
+    this.g.useDefaultPrettyPrinter();
+  }
+  
+  private String s(CharSequence seq) {
+    String s = (seq instanceof String) ? (String) seq : seq.toString();
+    return s;
+  }
+  
+  @Override
+  public void startRecord() throws IOException {
+    
+  }
+
+  @Override
+  public void writeArrayStart(int length) throws IOException {
+    g.writeStartArray();
+  }
+
+  @Override
+  public void writeArrayElementStart() throws IOException {
+  }
+
+  @Override
+  public void writeArrayElementEnd() throws IOException {
+  }
+
+  @Override
+  public void writeArrayEnd() throws IOException {
+    g.writeEndArray();
+  }
+
+  @Override
+  public void writeMapStart() throws IOException {
+    g.writeStartObject();
+  }
+
+  @Override
+  public void writeMapKey(CharSequence seq) throws IOException {
+    g.writeFieldName(s(seq));
+  }
+
+  @Override
+  public void writeMapValueStart() throws IOException {
+  }
+
+  @Override
+  public void writeMapValueEnd() throws IOException {
+  }
+
+  @Override
+  public void writeMapEnd() throws IOException {
+    g.writeEndObject();
+  }
+
+  @Override
+  public void writeBoolean(boolean b) throws IOException {
+    g.writeBoolean(b);
+  }
+
+  @Override
+  public void writeSInt32(int value) throws IOException {
+    g.writeNumber(value);
+  }
+
+  @Override
+  public void writeSInt64(long value) throws IOException {
+    g.writeNumber(value);
+  }
+
+  @Override
+  public void writeBytes(byte[] bytes) throws IOException {
+    g.writeBinary(bytes);
+  }
+
+  @Override
+  public void writeSFloat64(double value) throws IOException {
+    g.writeNumber(value);
+  }
+
+  @Override
+  public void writeSFloat32(float value) throws IOException {
+    g.writeNumber(value);
+  }
+
+  @Override
+  public void writeNullValue() throws IOException {
+    g.writeNull();
+  }
+
+  @Override
+  public void writeCharSequence(CharSequence value) throws IOException {
+    g.writeString(s(value));
+  }
+
+  @Override
+  public void endRecord() throws IOException {
+    g.writeRawValue("\n");
+  }
+  
+  public void finish() throws IOException{
+    g.close();
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/rse/JSONRecordReader.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/rse/JSONRecordReader.java b/sandbox/prototype/exec/java-exec/rse/JSONRecordReader.java
new file mode 100644
index 0000000..7510e72
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/rse/JSONRecordReader.java
@@ -0,0 +1,183 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ref.rse;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.Iterator;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.exec.ref.RecordIterator;
+import org.apache.drill.exec.ref.RecordPointer;
+import org.apache.drill.exec.ref.RunOutcome;
+import org.apache.drill.exec.ref.UnbackedRecord;
+import org.apache.drill.exec.ref.exceptions.RecordException;
+import org.apache.drill.exec.ref.rops.ROP;
+import org.apache.drill.exec.ref.values.DataValue;
+import org.apache.drill.exec.ref.values.ScalarValues.BooleanScalar;
+import org.apache.drill.exec.ref.values.ScalarValues.BytesScalar;
+import org.apache.drill.exec.ref.values.ScalarValues.DoubleScalar;
+import org.apache.drill.exec.ref.values.ScalarValues.IntegerScalar;
+import org.apache.drill.exec.ref.values.ScalarValues.LongScalar;
+import org.apache.drill.exec.ref.values.ScalarValues.StringScalar;
+import org.apache.drill.exec.ref.values.SimpleArrayValue;
+import org.apache.drill.exec.ref.values.SimpleMapValue;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Charsets;
+
+public class JSONRecordReader implements RecordReader {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JSONRecordReader.class);
+
+  private InputStreamReader input;
+  private String file;
+  private SchemaPath rootPath;
+  private JsonParser parser;
+  private UnbackedRecord record = new UnbackedRecord();
+  private ObjectMapper mapper;
+  private ROP parent;
+
+  public JSONRecordReader(SchemaPath rootPath, DrillConfig dConfig, InputStream stream, ROP parent) throws IOException {
+    this.input = new InputStreamReader(stream, Charsets.UTF_8);
+    this.mapper = dConfig.getMapper();
+    this.parser = mapper.getFactory().createJsonParser(input);
+    this.parent = parent;
+    this.rootPath = rootPath;
+  }
+
+  private class NodeIter implements RecordIterator {
+
+    @Override
+    public NextOutcome next() {
+//      logger.debug("Next Record Called");
+      try {
+        if (parser.nextToken() == null) {
+//          logger.debug("No current token, returning.");
+          return NextOutcome.NONE_LEFT;
+        }
+        JsonNode n = mapper.readTree(parser);
+        if (n == null) {
+//          logger.debug("Nothing was returned for read tree, returning.");
+          return NextOutcome.NONE_LEFT;
+        }
+//        logger.debug("Record found, returning new json record.");
+        record.setClearAndSetRoot(rootPath, convert(n));
+        // todo, add schema checking here.
+        return NextOutcome.INCREMENTED_SCHEMA_CHANGED;
+      } catch (IOException e) {
+        throw new RecordException("Failure while reading record", null, e);
+      }
+    }
+
+
+    @Override
+    public RecordPointer getRecordPointer() {
+      return record;
+    }
+
+
+    @Override
+    public ROP getParent() {
+      return parent;
+    }
+
+  }
+
+  private DataValue convert(JsonNode node) {
+    if (node == null || node.isNull() || node.isMissingNode()) {
+      return DataValue.NULL_VALUE;
+    } else if (node.isArray()) {
+      SimpleArrayValue arr = new SimpleArrayValue(node.size());
+      for (int i = 0; i < node.size(); i++) {
+        arr.addToArray(i, convert(node.get(i)));
+      }
+      return arr;
+    } else if (node.isObject()) {
+      SimpleMapValue map = new SimpleMapValue();
+      String name;
+      for (Iterator<String> iter = node.fieldNames(); iter.hasNext();) {
+        name = iter.next();
+        map.setByName(name, convert(node.get(name)));
+      }
+      return map;
+    } else if (node.isBinary()) {
+      try {
+        return new BytesScalar(node.binaryValue());
+      } catch (IOException e) {
+        throw new RuntimeException("Failure converting binary value.", e);
+      }
+    } else if (node.isBigDecimal()) {
+      throw new UnsupportedOperationException();
+//      return new BigDecimalScalar(node.decimalValue());
+    } else if (node.isBigInteger()) {
+      throw new UnsupportedOperationException();
+//      return new BigIntegerScalar(node.bigIntegerValue());
+    } else if (node.isBoolean()) {
+      return new BooleanScalar(node.asBoolean());
+    } else if (node.isFloatingPointNumber()) {
+      if (node.isBigDecimal()) {
+        throw new UnsupportedOperationException();
+//        return new BigDecimalScalar(node.decimalValue());
+      } else {
+        return new DoubleScalar(node.asDouble());
+      }
+    } else if (node.isInt()) {
+      return new IntegerScalar(node.asInt());
+    } else if (node.isLong()) {
+      return new LongScalar(node.asLong());
+    } else if (node.isTextual()) {
+      return new StringScalar(node.asText());
+    } else {
+      throw new UnsupportedOperationException(String.format("Don't know how to convert value of type %s.", node
+          .getClass().getCanonicalName()));
+    }
+
+  }
+
+  
+  /* (non-Javadoc)
+   * @see org.apache.drill.exec.ref.rse.DataReader#getIterator()
+   */
+  @Override
+  public RecordIterator getIterator() {
+    return new NodeIter();
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.drill.exec.ref.rse.DataReader#cleanup()
+   */
+  @Override
+  public void cleanup() {
+    try {
+      parser.close();
+      this.input.close();
+    } catch (IOException e) {
+      logger.warn("Error while closing InputStream for file {}", file, e);
+    }
+
+  }
+
+
+  @Override
+  public void setup() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/rse/OutputStreamWriter.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/rse/OutputStreamWriter.java b/sandbox/prototype/exec/java-exec/rse/OutputStreamWriter.java
new file mode 100644
index 0000000..20d5b8f
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/rse/OutputStreamWriter.java
@@ -0,0 +1,78 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ref.rse;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.drill.exec.ref.RecordPointer;
+import org.apache.drill.exec.ref.RunOutcome.OutcomeType;
+import org.apache.drill.exec.ref.rops.DataWriter;
+import org.apache.drill.exec.ref.rops.DataWriter.ConverterType;
+import org.apache.hadoop.fs.FSDataOutputStream;
+
+public class OutputStreamWriter implements RecordRecorder{
+  
+  private OutputStream stream;
+  private FSDataOutputStream posStream;
+  private DataWriter writer;
+  private ConverterType type;
+  private boolean closeStream;
+  
+  public OutputStreamWriter(OutputStream stream, ConverterType type, boolean closeStream){
+    this.stream = stream;
+    this.closeStream = closeStream;
+    this.type = type;
+    if(stream instanceof FSDataOutputStream) posStream = (FSDataOutputStream) stream;
+  }
+
+  @Override
+  public void setup() throws IOException {
+    DataWriter w = null;
+    switch(type){
+    case JSON:
+      w = new JSONDataWriter(stream);
+      break;
+    default:
+      throw new UnsupportedOperationException();
+    }
+    this.writer = w;
+  }
+  
+  private long getPos() throws IOException{
+    if(posStream == null) return 0;
+    return posStream.getPos();
+  }
+
+  @Override
+  public long recordRecord(RecordPointer pointer) throws IOException {
+    pointer.write(writer);
+    return getPos();
+  }
+
+  @Override
+  public void finish(OutcomeType outcome) throws IOException {
+    writer.finish();
+    if(closeStream){
+      stream.close();
+    }else{
+      stream.flush();
+    }
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/rse/QueueRSE.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/rse/QueueRSE.java b/sandbox/prototype/exec/java-exec/rse/QueueRSE.java
new file mode 100644
index 0000000..9a0a132
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/rse/QueueRSE.java
@@ -0,0 +1,100 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ref.rse;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.Queue;
+import java.util.concurrent.ArrayBlockingQueue;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.logical.StorageEngineConfigBase;
+import org.apache.drill.common.logical.data.Store;
+import org.apache.drill.exec.ref.RecordPointer;
+import org.apache.drill.exec.ref.RunOutcome.OutcomeType;
+import org.apache.drill.exec.ref.exceptions.SetupException;
+
+import com.fasterxml.jackson.annotation.JsonTypeName;
+
+public class QueueRSE extends RSEBase {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(QueueRSE.class);
+
+  private DrillConfig dConfig;
+  private final List<Queue<Object>> sinkQueues;
+  
+  public QueueRSE(QueueRSEConfig engineConfig, DrillConfig dConfig) throws SetupException{
+    this.dConfig = dConfig;
+    sinkQueues = Collections.singletonList( (Queue<Object>) (new ArrayBlockingQueue<Object>(100)));
+  }
+
+  public Queue<Object> getQueue(int number){
+    return sinkQueues.get(number);
+  }
+  
+  @JsonTypeName("queue") public static class QueueRSEConfig extends StorageEngineConfigBase {}
+  
+  public static class QueueOutputInfo{
+    public int number;
+  }
+
+  public boolean supportsWrite() {
+    return true;
+  }
+
+  
+  @Override
+  public RecordRecorder getWriter(Store store) throws IOException {
+    QueueOutputInfo config = store.getTarget().getWith(dConfig, QueueOutputInfo.class);
+    Queue<Object> q = dConfig.getQueue(config.number);
+    return new QueueRecordRecorder(q);
+  }
+
+  
+  private class QueueRecordRecorder implements RecordRecorder{
+
+    private final Queue<Object> queue;
+    
+    public QueueRecordRecorder(Queue<Object> queue) {
+      this.queue = queue;
+    }
+
+    @Override
+    public void setup() throws IOException {
+    }
+
+    @Override
+    public long recordRecord(RecordPointer r) throws IOException {
+      final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+      final JSONDataWriter writer = new JSONDataWriter(baos);
+      r.write(writer);
+      writer.finish();
+      queue.add(baos.toByteArray());
+      return 0;
+    }
+
+    @Override
+    public void finish(OutcomeType type) throws IOException {
+      queue.add(type);
+    }
+    
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/rse/RSEBase.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/rse/RSEBase.java b/sandbox/prototype/exec/java-exec/rse/RSEBase.java
new file mode 100644
index 0000000..3f86c98
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/rse/RSEBase.java
@@ -0,0 +1,71 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ref.rse;
+
+import java.io.IOException;
+import java.util.Collection;
+
+import org.apache.drill.common.logical.data.Scan;
+import org.apache.drill.common.logical.data.Store;
+import org.apache.drill.common.util.PathScanner;
+import org.apache.drill.exec.ref.ExecRefConstants;
+import org.apache.drill.exec.ref.RecordIterator;
+import org.apache.drill.exec.ref.exceptions.MajorException;
+import org.apache.drill.exec.ref.rops.ROP;
+
+import com.typesafe.config.Config;
+
+public abstract class RSEBase implements ReferenceStorageEngine{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RSEBase.class);
+  
+  @Override
+  public boolean supportsRead() {
+    return false;
+  }
+
+  @Override
+  public boolean supportsWrite() {
+    return false;
+  }
+
+  @Override
+  public Collection<ReadEntry> getReadEntries(Scan scan) throws IOException {
+    throw new UnsupportedOperationException(String.format("%s does not support reads.", this.getClass().getCanonicalName()));
+  }
+
+  @Override
+  public RecordReader getReader(ReadEntry readEntry, ROP parentROP) throws IOException {
+    throw new UnsupportedOperationException(String.format("%s does not support reads.", this.getClass().getCanonicalName()));
+  }
+
+  @Override
+  public RecordRecorder getWriter(Store store) throws IOException {
+    throw new UnsupportedOperationException(String.format("%s does not support writes.", this.getClass().getCanonicalName()));
+  }
+  
+  public static Class<?>[] getSubTypes(Config config){
+    Collection<Class<? extends ReferenceStorageEngine>> engines = PathScanner.scanForImplementations(ReferenceStorageEngine.class, config.getStringList(ExecRefConstants.STORAGE_ENGINE_SCAN_PACKAGES));
+    return engines.toArray(new Class<?>[engines.size()]);
+  }
+
+  @SuppressWarnings("unchecked")
+  protected <T extends ReadEntry> T getReadEntry(Class<T> c, ReadEntry entry){
+    if(!c.isAssignableFrom(entry.getClass())) throw new MajorException(String.format("Expected entry type was invalid.  Expected entry of type %s but received type of %s.", c.getCanonicalName(), entry.getClass().getCanonicalName()));
+    return (T) entry;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/rse/RSERegistry.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/rse/RSERegistry.java b/sandbox/prototype/exec/java-exec/rse/RSERegistry.java
new file mode 100644
index 0000000..4266aac
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/rse/RSERegistry.java
@@ -0,0 +1,85 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ref.rse;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Queue;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.logical.StorageEngineConfig;
+import org.apache.drill.common.util.PathScanner;
+import org.apache.drill.exec.ref.ExecRefConstants;
+import org.apache.drill.exec.ref.exceptions.SetupException;
+
+import com.typesafe.config.Config;
+
+public class RSERegistry {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RSERegistry.class);
+  
+  private Map<Object, Constructor<? extends ReferenceStorageEngine>> availableEngines = new HashMap<Object, Constructor<? extends ReferenceStorageEngine>>();
+  private Map<StorageEngineConfig, ReferenceStorageEngine> activeEngines = new HashMap<StorageEngineConfig, ReferenceStorageEngine>();
+  private DrillConfig config;
+  
+  public RSERegistry(DrillConfig config){
+    this.config = config;
+    setup(config);
+  }
+  
+  @SuppressWarnings("unchecked")
+  public void setup(DrillConfig config){
+    Collection<Class<? extends ReferenceStorageEngine>> engines = PathScanner.scanForImplementations(ReferenceStorageEngine.class, config.getStringList(ExecRefConstants.STORAGE_ENGINE_SCAN_PACKAGES));
+    logger.debug("Loading storage engines {}", engines);
+    for(Class<? extends ReferenceStorageEngine> engine: engines){
+      int i =0;
+      for(Constructor<?> c : engine.getConstructors()){
+        Class<?>[] params = c.getParameterTypes();
+        if(params.length != 2 || params[1] == Config.class || !StorageEngineConfig.class.isAssignableFrom(params[0])){
+          logger.debug("Skipping ReferenceStorageEngine constructor {} for engine class {} since it doesn't implement a [constructor(StorageEngineConfig, Config)]", c, engine);
+          continue;
+        }
+        availableEngines.put(params[0], (Constructor<? extends ReferenceStorageEngine>) c);
+        i++;
+      }
+      if(i == 0){
+        logger.debug("Skipping registration of ReferenceStorageEngine {} as it doesn't have a constructor with the parameters of (StorangeEngineConfig, Config)", engine.getCanonicalName());
+      }
+    }
+  }
+  
+  public ReferenceStorageEngine getEngine(StorageEngineConfig engineConfig) throws SetupException{
+    ReferenceStorageEngine engine = activeEngines.get(engineConfig);
+    if(engine != null) return engine;
+    Constructor<? extends ReferenceStorageEngine> c = availableEngines.get(engineConfig.getClass());
+    if(c == null) throw new SetupException(String.format("Failure finding StorageEngine constructor for config %s", engineConfig));
+    try {
+      return c.newInstance(engineConfig, config);
+    } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
+      Throwable t = e instanceof InvocationTargetException ? ((InvocationTargetException)e).getTargetException() : e;
+      if(t instanceof SetupException) throw ((SetupException) t);
+      throw new SetupException(String.format("Failure setting up new storage engine configuration for config %s", engineConfig), t);
+    }
+  }
+  
+
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/rse/RecordReader.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/rse/RecordReader.java b/sandbox/prototype/exec/java-exec/rse/RecordReader.java
new file mode 100644
index 0000000..b7840bc
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/rse/RecordReader.java
@@ -0,0 +1,28 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ref.rse;
+
+import org.apache.drill.exec.ref.RecordIterator;
+
+public interface RecordReader {
+
+  public abstract RecordIterator getIterator();
+  public abstract void setup();
+  public abstract void cleanup();
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/b53933f2/sandbox/prototype/exec/java-exec/rse/RecordRecorder.java
----------------------------------------------------------------------
diff --git a/sandbox/prototype/exec/java-exec/rse/RecordRecorder.java b/sandbox/prototype/exec/java-exec/rse/RecordRecorder.java
new file mode 100644
index 0000000..9527b0b
--- /dev/null
+++ b/sandbox/prototype/exec/java-exec/rse/RecordRecorder.java
@@ -0,0 +1,32 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.ref.rse;
+
+import java.io.IOException;
+
+import org.apache.drill.exec.ref.RecordPointer;
+import org.apache.drill.exec.ref.RunOutcome;
+
+public interface RecordRecorder {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RecordRecorder.class);
+  
+  public void setup() throws IOException;
+  public long recordRecord(RecordPointer pointer) throws IOException;
+  public void finish(RunOutcome.OutcomeType outcome) throws IOException;
+  
+}