You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@drill.apache.org by ja...@apache.org on 2014/04/23 05:14:39 UTC

[01/10] git commit: DRILL-466: fixing when submit_plan fails does not terminate the program

Repository: incubator-drill
Updated Branches:
  refs/heads/master 4a8ae53cc -> 69c571ccd


DRILL-466: fixing when submit_plan fails does not terminate the program


Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/50cc90e5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/50cc90e5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/50cc90e5

Branch: refs/heads/master
Commit: 50cc90e5e4fa3839019ec55204af4e6652b187a0
Parents: 4a8ae53
Author: Kamesh <ka...@gmail.com>
Authored: Tue Apr 22 13:44:46 2014 +0530
Committer: Jacques Nadeau <ja...@apache.org>
Committed: Tue Apr 22 18:59:33 2014 -0700

----------------------------------------------------------------------
 .../main/java/org/apache/drill/exec/client/QuerySubmitter.java    | 3 +++
 1 file changed, 3 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/50cc90e5/exec/java-exec/src/main/java/org/apache/drill/exec/client/QuerySubmitter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/client/QuerySubmitter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/client/QuerySubmitter.java
index 0234b7e..7967957 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/client/QuerySubmitter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/client/QuerySubmitter.java
@@ -130,6 +130,9 @@ public class QuerySubmitter {
       }
       return submitQuery(client, plan, type, format, width);
 
+    } catch(Throwable th) {
+      System.err.println("Query Failed due to : " + th.getMessage());
+      return -1;
     }finally{
       if(client != null) client.close();
       if(local){


[05/10] git commit: DRILL-332: Support for decimal data type

Posted by ja...@apache.org.
DRILL-332: Support for decimal data type


Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/a5ee8f84
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/a5ee8f84
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/a5ee8f84

Branch: refs/heads/master
Commit: a5ee8f84653d3dff64d83821fc14007514493382
Parents: 50cc90e
Author: Mehant Baid <me...@gmail.com>
Authored: Thu Apr 17 00:21:39 2014 -0700
Committer: Jacques Nadeau <ja...@apache.org>
Committed: Tue Apr 22 19:06:47 2014 -0700

----------------------------------------------------------------------
 .../drill/common/expression/parser/ExprLexer.g  |    9 +-
 .../drill/common/expression/parser/ExprParser.g |   18 +-
 .../expression/ExpressionStringBuilder.java     |   49 +
 .../common/expression/ValueExpressions.java     |  166 ++-
 .../common/expression/fn/CastFunctions.java     |    6 +
 .../visitors/AbstractExprVisitor.java           |   25 +
 .../expression/visitors/AggregateChecker.java   |   22 +
 .../expression/visitors/ConstantChecker.java    |   24 +
 .../common/expression/visitors/ExprVisitor.java |    8 +
 .../visitors/ExpressionValidator.java           |   24 +
 .../expression/visitors/SimpleExprVisitor.java  |   28 +
 .../org/apache/drill/common/types/Types.java    |   21 +-
 .../drill/common/util/DecimalUtility.java       |  291 +++++
 exec/java-exec/pom.xml                          |    5 -
 exec/java-exec/src/main/codegen/config.fmpp     |    3 +-
 exec/java-exec/src/main/codegen/data/Casts.tdd  |   72 ++
 .../src/main/codegen/data/DecimalTypes.tdd      |   26 +
 .../src/main/codegen/data/ValueVectorTypes.tdd  |   33 +-
 .../src/main/codegen/includes/vv_imports.ftl    |    2 +
 .../Decimal/CastDecimalDenseDecimalSparse.java  |  160 +++
 .../templates/Decimal/CastDecimalFloat.java     |   95 ++
 .../templates/Decimal/CastDecimalInt.java       |  106 ++
 .../templates/Decimal/CastDecimalSimilar.java   |   80 ++
 .../Decimal/CastDecimalSparseDecimalDense.java  |  175 +++
 .../templates/Decimal/CastDecimalVarchar.java   |  212 ++++
 .../templates/Decimal/CastFloatDecimal.java     |   92 ++
 .../templates/Decimal/CastIntDecimal.java       |  105 ++
 .../templates/Decimal/CastSrcDecimalSimple.java |  256 +++++
 .../templates/Decimal/CastVarCharDecimal.java   |  331 ++++++
 .../templates/Decimal/DecimalFunctions.java     | 1078 ++++++++++++++++++
 .../codegen/templates/FixedValueVectors.java    |  143 ++-
 .../codegen/templates/NullableValueVectors.java |   32 +-
 .../main/codegen/templates/SqlAccessors.java    |    5 +
 .../src/main/codegen/templates/TypeHelper.java  |    5 +-
 .../main/codegen/templates/ValueHolders.java    |   71 +-
 .../sig/ConstantExpressionIdentifier.java       |   24 +
 .../drill/exec/expr/EvaluationVisitor.java      |  131 ++-
 .../exec/expr/ExpressionTreeMaterializer.java   |   95 +-
 .../exec/expr/annotations/FunctionTemplate.java |    2 +-
 .../expr/fn/DrillDecimalCastFuncHolder.java     |   68 ++
 .../expr/fn/DrillDecimalMaxScaleFuncHolder.java |   58 +
 .../expr/fn/DrillDecimalSumScaleFuncHolder.java |   58 +
 .../drill/exec/expr/fn/FunctionConverter.java   |   10 +-
 .../drill/exec/planner/logical/DrillOptiq.java  |   52 +-
 .../drill/exec/record/MaterializedField.java    |    9 +
 .../exec/resolver/ResolverTypePrecedence.java   |   11 +-
 .../drill/exec/resolver/TypeCastRules.java      |  250 ++--
 .../drill/exec/store/hive/HiveRecordReader.java |    4 +-
 .../drill/exec/vector/ValueHolderHelper.java    |   68 +-
 .../drill/exec/physical/impl/TestDecimal.java   |  357 ++++++
 .../resources/decimal/cast_float_decimal.json   |   47 +
 .../resources/decimal/cast_simple_decimal.json  |   47 +
 .../decimal/simple_decimal_arithmetic.json      |   55 +
 .../resources/decimal/test_decimal_complex.json |   61 +
 .../decimal/test_decimal_dense_sparse.json      |   78 ++
 .../decimal/test_decimal_sort_complex.json      |   56 +
 .../test_decimal_sparse_dense_dense.json        |   56 +
 .../test/resources/input_complex_decimal.json   |   28 +
 .../test/resources/input_simple_decimal.json    |   24 +
 .../resources/input_sort_complex_decimal.json   |   30 +
 pom.xml                                         |    6 +
 .../apache/drill/common/types/TypeProtos.java   |  127 ++-
 protocol/src/main/protobuf/Types.proto          |   12 +-
 63 files changed, 5365 insertions(+), 237 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/common/src/main/antlr3/org/apache/drill/common/expression/parser/ExprLexer.g
----------------------------------------------------------------------
diff --git a/common/src/main/antlr3/org/apache/drill/common/expression/parser/ExprLexer.g b/common/src/main/antlr3/org/apache/drill/common/expression/parser/ExprLexer.g
index 700bd72..b0d082d 100644
--- a/common/src/main/antlr3/org/apache/drill/common/expression/parser/ExprLexer.g
+++ b/common/src/main/antlr3/org/apache/drill/common/expression/parser/ExprLexer.g
@@ -53,8 +53,13 @@ TIMESTAMPTZ: 'timestamptz' | 'TIMESTAMPTZ';
 INTERVAL : 'interval' | 'INTERVAL';
 INTERVALYEAR : 'intervalyear' | 'INTERVALYEAR';
 INTERVALDAY : 'intervalday' | 'INTERVALDAY';
-
 Period : '.';
+DECIMAL9 : 'decimal9';
+DECIMAL18 : 'decimal18';
+DECIMAL28DENSE : 'decimal28dense';
+DECIMAL28SPARSE : 'decimal28sparse';
+DECIMAL38DENSE : 'decimal38dense';
+DECIMAL38SPARSE : 'decimal38sparse';
 Or       : '||' | 'or' | 'OR' | 'Or';
 And      : '&&' | 'and' | 'AND' ;
 Equals   : '==' | '=';
@@ -144,4 +149,4 @@ FallThrough
 	  );
 	}
   :
-  ;
\ No newline at end of file
+  ;

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/common/src/main/antlr3/org/apache/drill/common/expression/parser/ExprParser.g
----------------------------------------------------------------------
diff --git a/common/src/main/antlr3/org/apache/drill/common/expression/parser/ExprParser.g b/common/src/main/antlr3/org/apache/drill/common/expression/parser/ExprParser.g
index e4eb149..6788e56 100644
--- a/common/src/main/antlr3/org/apache/drill/common/expression/parser/ExprParser.g
+++ b/common/src/main/antlr3/org/apache/drill/common/expression/parser/ExprParser.g
@@ -101,12 +101,26 @@ numType returns [MajorType type]
 	| BIGINT { $type = Types.required(TypeProtos.MinorType.BIGINT); }
 	| FLOAT4 { $type = Types.required(TypeProtos.MinorType.FLOAT4); }
 	| FLOAT8 { $type = Types.required(TypeProtos.MinorType.FLOAT8); }
+	| DECIMAL9 OParen precision Comma scale CParen { $type = TypeProtos.MajorType.newBuilder().setMinorType(TypeProtos.MinorType.DECIMAL9).setMode(DataMode.REQUIRED).setPrecision($precision.value.intValue()).setScale($scale.value.intValue()).build(); }
+	| DECIMAL18 OParen precision Comma scale CParen { $type = TypeProtos.MajorType.newBuilder().setMinorType(TypeProtos.MinorType.DECIMAL18).setMode(DataMode.REQUIRED).setPrecision($precision.value.intValue()).setScale($scale.value.intValue()).build(); }
+	| DECIMAL28DENSE OParen precision Comma scale CParen { $type = TypeProtos.MajorType.newBuilder().setMinorType(TypeProtos.MinorType.DECIMAL28DENSE).setMode(DataMode.REQUIRED).setPrecision($precision.value.intValue()).setScale($scale.value.intValue()).build(); }
+	| DECIMAL28SPARSE OParen precision Comma scale CParen { $type = TypeProtos.MajorType.newBuilder().setMinorType(TypeProtos.MinorType.DECIMAL28SPARSE).setMode(DataMode.REQUIRED).setPrecision($precision.value.intValue()).setScale($scale.value.intValue()).build(); }
+	| DECIMAL38DENSE OParen precision Comma scale CParen { $type = TypeProtos.MajorType.newBuilder().setMinorType(TypeProtos.MinorType.DECIMAL38DENSE).setMode(DataMode.REQUIRED).setPrecision($precision.value.intValue()).setScale($scale.value.intValue()).build(); }
+	| DECIMAL38SPARSE OParen precision Comma scale CParen { $type = TypeProtos.MajorType.newBuilder().setMinorType(TypeProtos.MinorType.DECIMAL38SPARSE).setMode(DataMode.REQUIRED).setPrecision($precision.value.intValue()).setScale($scale.value.intValue()).build(); }
 	;
 
 charType returns [MajorType type]
 	:  VARCHAR typeLen {$type = TypeProtos.MajorType.newBuilder().setMinorType(TypeProtos.MinorType.VARCHAR).setMode(DataMode.REQUIRED).setWidth($typeLen.length.intValue()).build(); }
 	|  VARBINARY typeLen {$type = TypeProtos.MajorType.newBuilder().setMinorType(TypeProtos.MinorType.VARBINARY).setMode(DataMode.REQUIRED).setWidth($typeLen.length.intValue()).build();}	
-	; 
+	;
+
+precision returns [Integer value]
+    : Number {$value = Integer.parseInt($Number.text); }
+    ;
+
+scale returns [Integer value]
+    : Number {$value = Integer.parseInt($Number.text); }
+    ;
 
 dateType returns [MajorType type]
     : DATE { $type = Types.required(TypeProtos.MinorType.DATE); }
@@ -121,7 +135,7 @@ dateType returns [MajorType type]
 typeLen returns [Integer length]
     : OParen Number CParen {$length = Integer.parseInt($Number.text);}
     ;
-     	
+
 ifStatement returns [LogicalExpression e]
 	@init {
 	  IfExpression.Builder s = IfExpression.newBuilder();

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/common/src/main/java/org/apache/drill/common/expression/ExpressionStringBuilder.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/expression/ExpressionStringBuilder.java b/common/src/main/java/org/apache/drill/common/expression/ExpressionStringBuilder.java
index 89fa437..8b43846 100644
--- a/common/src/main/java/org/apache/drill/common/expression/ExpressionStringBuilder.java
+++ b/common/src/main/java/org/apache/drill/common/expression/ExpressionStringBuilder.java
@@ -28,6 +28,10 @@ import org.apache.drill.common.expression.ValueExpressions.IntExpression;
 import org.apache.drill.common.expression.ValueExpressions.IntervalDayExpression;
 import org.apache.drill.common.expression.ValueExpressions.IntervalYearExpression;
 import org.apache.drill.common.expression.ValueExpressions.LongExpression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal9Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal18Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal28Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal38Expression;
 import org.apache.drill.common.expression.ValueExpressions.QuotedString;
 import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
@@ -40,6 +44,9 @@ import org.joda.time.format.DateTimeFormatterBuilder;
 import org.joda.time.format.DateTimeParser;
 
 import com.google.common.collect.ImmutableList;
+import org.apache.drill.common.util.DecimalUtility;
+
+import java.math.BigDecimal;
 
 public class ExpressionStringBuilder extends AbstractExprVisitor<Void, StringBuilder, RuntimeException>{
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExpressionStringBuilder.class);
@@ -184,6 +191,33 @@ public class ExpressionStringBuilder extends AbstractExprVisitor<Void, StringBui
   }
 
   @Override
+  public Void visitDecimal9Constant(Decimal9Expression decExpr, StringBuilder sb) throws RuntimeException {
+    BigDecimal value = new BigDecimal(decExpr.getIntFromDecimal());
+    sb.append((value.setScale(decExpr.getScale())).toString());
+    return null;
+  }
+
+  @Override
+  public Void visitDecimal18Constant(Decimal18Expression decExpr, StringBuilder sb) throws RuntimeException {
+    BigDecimal value = new BigDecimal(decExpr.getLongFromDecimal());
+    sb.append((value.setScale(decExpr.getScale())).toString());
+    return null;
+  }
+
+
+  @Override
+  public Void visitDecimal28Constant(Decimal28Expression decExpr, StringBuilder sb) throws RuntimeException {
+    sb.append(decExpr.toString());
+    return null;
+  }
+
+  @Override
+  public Void visitDecimal38Constant(Decimal38Expression decExpr, StringBuilder sb) throws RuntimeException {
+    sb.append(decExpr.getBigDecimal().toString());
+    return null;
+  }
+
+  @Override
   public Void visitDoubleConstant(DoubleExpression dExpr, StringBuilder sb) throws RuntimeException {
     sb.append(dExpr.getDouble());
     return null;
@@ -237,11 +271,26 @@ public class ExpressionStringBuilder extends AbstractExprVisitor<Void, StringBui
     case FIXED16CHAR:
     case FIXEDBINARY:
     case FIXEDCHAR:
+
       // add size in parens
       sb.append("(");
       sb.append(mt.getWidth());
       sb.append(")");
       break;
+    case DECIMAL9:
+    case DECIMAL18:
+    case DECIMAL28DENSE:
+    case DECIMAL28SPARSE:
+    case DECIMAL38DENSE:
+    case DECIMAL38SPARSE:
+
+      // add scale and precision
+      sb.append("(");
+      sb.append(mt.getPrecision());
+      sb.append(", ");
+      sb.append(mt.getScale());
+      sb.append(")");
+      break;
     default:
       throw new UnsupportedOperationException(String.format("Unable to convert cast expression %s into string.", e));
     }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/common/src/main/java/org/apache/drill/common/expression/ValueExpressions.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/expression/ValueExpressions.java b/common/src/main/java/org/apache/drill/common/expression/ValueExpressions.java
index 1d217f2..78f698c 100644
--- a/common/src/main/java/org/apache/drill/common/expression/ValueExpressions.java
+++ b/common/src/main/java/org/apache/drill/common/expression/ValueExpressions.java
@@ -18,6 +18,7 @@
 package org.apache.drill.common.expression;
 
 import java.util.GregorianCalendar;
+import java.math.BigDecimal;
 import java.util.Iterator;
 
 import org.apache.drill.common.expression.visitors.ExprVisitor;
@@ -25,6 +26,7 @@ import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
+import org.apache.drill.common.util.DecimalUtility;
 
 import com.google.common.collect.Iterators;
 
@@ -72,9 +74,23 @@ public class ValueExpressions {
       return new IntervalDayExpression(intervalInMillis);
   }
 
+  public static LogicalExpression getDecimal9(BigDecimal i) {
+    return new Decimal9Expression(i, ExpressionPosition.UNKNOWN);
+  }
+
+  public static LogicalExpression getDecimal18(BigDecimal i) {
+    return new Decimal18Expression(i, ExpressionPosition.UNKNOWN);
+  }
 
+  public static LogicalExpression getDecimal28(BigDecimal i) {
+    return new Decimal28Expression(i, ExpressionPosition.UNKNOWN);
+  }
 
-  public static LogicalExpression getNumericExpression(String s, ExpressionPosition ep) {
+  public static LogicalExpression getDecimal38(BigDecimal i) {
+      return new Decimal38Expression(i, ExpressionPosition.UNKNOWN);
+  }
+
+    public static LogicalExpression getNumericExpression(String s, ExpressionPosition ep) {
     try {
         int a = Integer.parseInt(s);
         return new IntExpression(a, ep);
@@ -210,6 +226,154 @@ public class ValueExpressions {
 
   }
 
+  public static class Decimal9Expression extends LogicalExpressionBase {
+
+    private int decimal;
+    private int scale;
+    private int precision;
+
+    public Decimal9Expression(BigDecimal input, ExpressionPosition pos) {
+      super(pos);
+      this.scale = input.scale();
+      this.precision = input.precision();
+      this.decimal = DecimalUtility.getDecimal9FromBigDecimal(input, scale, precision);
+    }
+
+
+    public int getIntFromDecimal() {
+      return decimal;
+    }
+
+    public int getScale() {
+      return scale;
+    }
+
+    public int getPrecision() {
+      return precision;
+    }
+
+    @Override
+    public MajorType getMajorType() {
+      return MajorType.newBuilder().setMinorType(MinorType.DECIMAL9).setScale(scale).setPrecision(precision).setMode(DataMode.REQUIRED).build();
+    }
+
+    @Override
+    public <T, V, E extends Exception> T accept(ExprVisitor<T, V, E> visitor, V value) throws E {
+      return visitor.visitDecimal9Constant(this, value);
+    }
+
+    @Override
+    public Iterator<LogicalExpression> iterator() {
+      return Iterators.emptyIterator();
+    }
+
+  }
+
+  public static class Decimal18Expression extends LogicalExpressionBase {
+
+    private long decimal;
+    private int scale;
+    private int precision;
+
+    public Decimal18Expression(BigDecimal input, ExpressionPosition pos) {
+      super(pos);
+      this.scale = input.scale();
+      this.precision = input.precision();
+      this.decimal = DecimalUtility.getDecimal18FromBigDecimal(input, scale, precision);
+    }
+
+
+    public long getLongFromDecimal() {
+      return decimal;
+    }
+
+    public int getScale() {
+      return scale;
+    }
+
+    public int getPrecision() {
+      return precision;
+    }
+
+    @Override
+    public MajorType getMajorType() {
+      return MajorType.newBuilder().setMinorType(MinorType.DECIMAL18).setScale(scale).setPrecision(precision).setMode(DataMode.REQUIRED).build();
+    }
+
+    @Override
+    public <T, V, E extends Exception> T accept(ExprVisitor<T, V, E> visitor, V value) throws E {
+      return visitor.visitDecimal18Constant(this, value);
+    }
+
+    @Override
+    public Iterator<LogicalExpression> iterator() {
+      return Iterators.emptyIterator();
+    }
+
+  }
+
+  public static class Decimal28Expression extends LogicalExpressionBase {
+
+    private BigDecimal bigDecimal;
+
+    public Decimal28Expression(BigDecimal input, ExpressionPosition pos) {
+      super(pos);
+      this.bigDecimal = input;
+    }
+
+
+    public BigDecimal getBigDecimal() {
+      return bigDecimal;
+    }
+
+    @Override
+    public MajorType getMajorType() {
+      return MajorType.newBuilder().setMinorType(MinorType.DECIMAL28SPARSE).setScale(bigDecimal.scale()).setPrecision(bigDecimal.precision()).setMode(DataMode.REQUIRED).build();
+    }
+
+    @Override
+    public <T, V, E extends Exception> T accept(ExprVisitor<T, V, E> visitor, V value) throws E {
+      return visitor.visitDecimal28Constant(this, value);
+    }
+
+    @Override
+    public Iterator<LogicalExpression> iterator() {
+      return Iterators.emptyIterator();
+    }
+
+  }
+
+  public static class Decimal38Expression extends LogicalExpressionBase {
+
+    private BigDecimal bigDecimal;
+
+    public Decimal38Expression(BigDecimal input, ExpressionPosition pos) {
+      super(pos);
+      this.bigDecimal = input;
+    }
+
+    public BigDecimal getBigDecimal() {
+      return bigDecimal;
+    }
+
+    @Override
+    public MajorType getMajorType() {
+      return MajorType.newBuilder().setMinorType(MinorType.DECIMAL38SPARSE).setScale(bigDecimal.scale()).setPrecision(bigDecimal.precision()).setMode(DataMode.REQUIRED).build();
+    }
+
+    @Override
+    public <T, V, E extends Exception> T accept(ExprVisitor<T, V, E> visitor, V value) throws E {
+      return visitor.visitDecimal38Constant(this, value);
+    }
+
+    @Override
+    public Iterator<LogicalExpression> iterator() {
+      return Iterators.emptyIterator();
+    }
+
+  }
+
+
   public static class DoubleExpression extends LogicalExpressionBase {
     private double d;
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/common/src/main/java/org/apache/drill/common/expression/fn/CastFunctions.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/expression/fn/CastFunctions.java b/common/src/main/java/org/apache/drill/common/expression/fn/CastFunctions.java
index dfffdfc..b76708d 100644
--- a/common/src/main/java/org/apache/drill/common/expression/fn/CastFunctions.java
+++ b/common/src/main/java/org/apache/drill/common/expression/fn/CastFunctions.java
@@ -41,6 +41,12 @@ public class CastFunctions {
     TYPE2FUNC.put(MinorType.INTERVALDAY, "castINTERVALDAY");
     TYPE2FUNC.put(MinorType.INTERVALYEAR, "castINTERVALYEAR");
     TYPE2FUNC.put(MinorType.INTERVAL, "castINTERVAL");
+    TYPE2FUNC.put(MinorType.DECIMAL9, "castDECIMAL9");
+    TYPE2FUNC.put(MinorType.DECIMAL18, "castDECIMAL18");
+    TYPE2FUNC.put(MinorType.DECIMAL28SPARSE, "castDECIMAL28SPARSE");
+    TYPE2FUNC.put(MinorType.DECIMAL28DENSE, "castDECIMAL28DENSE");
+    TYPE2FUNC.put(MinorType.DECIMAL38SPARSE, "castDECIMAL38SPARSE");
+    TYPE2FUNC.put(MinorType.DECIMAL38DENSE, "castDECIMAL38DENSE");
   }
 
   public static String getCastFunc(MinorType targetMinorType) {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/common/src/main/java/org/apache/drill/common/expression/visitors/AbstractExprVisitor.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/expression/visitors/AbstractExprVisitor.java b/common/src/main/java/org/apache/drill/common/expression/visitors/AbstractExprVisitor.java
index eb4410e..7ed36f6 100644
--- a/common/src/main/java/org/apache/drill/common/expression/visitors/AbstractExprVisitor.java
+++ b/common/src/main/java/org/apache/drill/common/expression/visitors/AbstractExprVisitor.java
@@ -33,6 +33,10 @@ import org.apache.drill.common.expression.ValueExpressions.DateExpression;
 import org.apache.drill.common.expression.ValueExpressions.IntervalYearExpression;
 import org.apache.drill.common.expression.ValueExpressions.IntervalDayExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal9Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal18Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal28Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal38Expression;
 import org.apache.drill.common.expression.ValueExpressions.QuotedString;
 
 public abstract class AbstractExprVisitor<T, VAL, EXCEP extends Exception> implements ExprVisitor<T, VAL, EXCEP> {
@@ -73,6 +77,27 @@ public abstract class AbstractExprVisitor<T, VAL, EXCEP extends Exception> imple
     return visitUnknown(intExpr, value);
   }
 
+
+  @Override
+  public T visitDecimal9Constant(Decimal9Expression decExpr, VAL value) throws EXCEP {
+    return visitUnknown(decExpr, value);
+  }
+
+  @Override
+  public T visitDecimal18Constant(Decimal18Expression decExpr, VAL value) throws EXCEP {
+    return visitUnknown(decExpr, value);
+  }
+
+  @Override
+  public T visitDecimal28Constant(Decimal28Expression decExpr, VAL value) throws EXCEP {
+    return visitUnknown(decExpr, value);
+  }
+
+  @Override
+  public T visitDecimal38Constant(Decimal38Expression decExpr, VAL value) throws EXCEP {
+    return visitUnknown(decExpr, value);
+  }
+
   @Override
   public T visitDateConstant(DateExpression intExpr, VAL value) throws EXCEP {
     return visitUnknown(intExpr, value);

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/common/src/main/java/org/apache/drill/common/expression/visitors/AggregateChecker.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/expression/visitors/AggregateChecker.java b/common/src/main/java/org/apache/drill/common/expression/visitors/AggregateChecker.java
index 9df1f47..4291dee 100644
--- a/common/src/main/java/org/apache/drill/common/expression/visitors/AggregateChecker.java
+++ b/common/src/main/java/org/apache/drill/common/expression/visitors/AggregateChecker.java
@@ -33,6 +33,10 @@ import org.apache.drill.common.expression.ValueExpressions.IntervalYearExpressio
 import org.apache.drill.common.expression.ValueExpressions.IntervalDayExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal9Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal18Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal28Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal38Expression;
 import org.apache.drill.common.expression.ValueExpressions.FloatExpression;
 import org.apache.drill.common.expression.ValueExpressions.IntExpression;
 import org.apache.drill.common.expression.ValueExpressions.QuotedString;
@@ -108,6 +112,24 @@ public final class AggregateChecker implements ExprVisitor<Boolean, ErrorCollect
   public Boolean visitBooleanConstant(BooleanExpression e, ErrorCollector errors) {
     return false;
   }
+  public Boolean visitDecimal9Constant(Decimal9Expression decExpr, ErrorCollector errors) {
+    return false;
+  }
+
+  @Override
+  public Boolean visitDecimal18Constant(Decimal18Expression decExpr, ErrorCollector errors) {
+    return false;
+  }
+
+  @Override
+  public Boolean visitDecimal28Constant(Decimal28Expression decExpr, ErrorCollector errors) {
+    return false;
+  }
+
+  @Override
+  public Boolean visitDecimal38Constant(Decimal38Expression decExpr, ErrorCollector errors) {
+    return false;
+  }
 
   @Override
   public Boolean visitQuotedStringConstant(QuotedString e, ErrorCollector errors) {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/common/src/main/java/org/apache/drill/common/expression/visitors/ConstantChecker.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/expression/visitors/ConstantChecker.java b/common/src/main/java/org/apache/drill/common/expression/visitors/ConstantChecker.java
index 6da66d2..09fbedf 100644
--- a/common/src/main/java/org/apache/drill/common/expression/visitors/ConstantChecker.java
+++ b/common/src/main/java/org/apache/drill/common/expression/visitors/ConstantChecker.java
@@ -35,6 +35,10 @@ import org.apache.drill.common.expression.ValueExpressions.IntervalYearExpressio
 import org.apache.drill.common.expression.ValueExpressions.IntervalDayExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal9Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal18Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal28Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal38Expression;
 import org.apache.drill.common.expression.ValueExpressions.QuotedString;
 
 final class ConstantChecker implements ExprVisitor<Boolean, ErrorCollector, RuntimeException> {
@@ -127,6 +131,26 @@ final class ConstantChecker implements ExprVisitor<Boolean, ErrorCollector, Runt
   }
 
   @Override
+  public Boolean visitDecimal9Constant(Decimal9Expression decExpr, ErrorCollector errors) {
+    return false;
+  }
+
+  @Override
+  public Boolean visitDecimal18Constant(Decimal18Expression decExpr, ErrorCollector errors) {
+    return false;
+  }
+
+  @Override
+  public Boolean visitDecimal28Constant(Decimal28Expression decExpr, ErrorCollector errors) {
+    return false;
+  }
+
+  @Override
+  public Boolean visitDecimal38Constant(Decimal38Expression decExpr, ErrorCollector errors) {
+    return false;
+  }
+
+  @Override
   public Boolean visitDoubleConstant(DoubleExpression dExpr, ErrorCollector errors) {
     return true;
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/common/src/main/java/org/apache/drill/common/expression/visitors/ExprVisitor.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/expression/visitors/ExprVisitor.java b/common/src/main/java/org/apache/drill/common/expression/visitors/ExprVisitor.java
index fad4919..92fd777 100644
--- a/common/src/main/java/org/apache/drill/common/expression/visitors/ExprVisitor.java
+++ b/common/src/main/java/org/apache/drill/common/expression/visitors/ExprVisitor.java
@@ -33,6 +33,10 @@ import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
 import org.apache.drill.common.expression.ValueExpressions.IntervalYearExpression;
 import org.apache.drill.common.expression.ValueExpressions.IntervalDayExpression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal9Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal18Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal28Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal38Expression;
 import org.apache.drill.common.expression.ValueExpressions.QuotedString;
 
 
@@ -49,6 +53,10 @@ public interface ExprVisitor<T, VAL, EXCEP extends Exception> {
   public T visitTimeStampConstant(TimeStampExpression intExpr, VAL value) throws EXCEP;
   public T visitIntervalYearConstant(IntervalYearExpression intExpr, VAL value) throws EXCEP;
   public T visitIntervalDayConstant(IntervalDayExpression intExpr, VAL value) throws EXCEP;
+  public T visitDecimal9Constant(Decimal9Expression decExpr, VAL value) throws EXCEP;
+  public T visitDecimal18Constant(Decimal18Expression decExpr, VAL value) throws EXCEP;
+  public T visitDecimal28Constant(Decimal28Expression decExpr, VAL value) throws EXCEP;
+  public T visitDecimal38Constant(Decimal38Expression decExpr, VAL value) throws EXCEP;
   public T visitDoubleConstant(DoubleExpression dExpr, VAL value) throws EXCEP;
   public T visitBooleanConstant(BooleanExpression e, VAL value) throws EXCEP;
   public T visitQuotedStringConstant(QuotedString e, VAL value) throws EXCEP;

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/common/src/main/java/org/apache/drill/common/expression/visitors/ExpressionValidator.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/expression/visitors/ExpressionValidator.java b/common/src/main/java/org/apache/drill/common/expression/visitors/ExpressionValidator.java
index 803841a..1caf145 100644
--- a/common/src/main/java/org/apache/drill/common/expression/visitors/ExpressionValidator.java
+++ b/common/src/main/java/org/apache/drill/common/expression/visitors/ExpressionValidator.java
@@ -36,6 +36,10 @@ import org.apache.drill.common.expression.ValueExpressions.IntervalYearExpressio
 import org.apache.drill.common.expression.ValueExpressions.IntervalDayExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal9Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal18Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal28Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal38Expression;
 import org.apache.drill.common.expression.ValueExpressions.QuotedString;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
@@ -124,6 +128,26 @@ public class ExpressionValidator implements ExprVisitor<Void, ErrorCollector, Ru
   }
 
   @Override
+  public Void visitDecimal9Constant(Decimal9Expression decExpr, ErrorCollector errors) throws RuntimeException {
+    return null;
+  }
+
+  @Override
+  public Void visitDecimal18Constant(Decimal18Expression decExpr, ErrorCollector errors) throws RuntimeException {
+    return null;
+  }
+
+  @Override
+  public Void visitDecimal28Constant(Decimal28Expression decExpr, ErrorCollector errors) throws RuntimeException {
+    return null;
+  }
+
+  @Override
+  public Void visitDecimal38Constant(Decimal38Expression decExpr, ErrorCollector errors) throws RuntimeException {
+    return null;
+  }
+    
+  @Override
   public Void visitDateConstant(DateExpression intExpr, ErrorCollector errors) throws RuntimeException {
     return null;
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/common/src/main/java/org/apache/drill/common/expression/visitors/SimpleExprVisitor.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/expression/visitors/SimpleExprVisitor.java b/common/src/main/java/org/apache/drill/common/expression/visitors/SimpleExprVisitor.java
index be80f11..28f13fb 100644
--- a/common/src/main/java/org/apache/drill/common/expression/visitors/SimpleExprVisitor.java
+++ b/common/src/main/java/org/apache/drill/common/expression/visitors/SimpleExprVisitor.java
@@ -31,6 +31,10 @@ import org.apache.drill.common.expression.ValueExpressions.IntervalYearExpressio
 import org.apache.drill.common.expression.ValueExpressions.IntervalDayExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal9Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal18Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal28Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal38Expression;
 import org.apache.drill.common.expression.ValueExpressions.QuotedString;
 
 public abstract class SimpleExprVisitor<T> implements ExprVisitor<T, Void, RuntimeException>{
@@ -97,6 +101,26 @@ public abstract class SimpleExprVisitor<T> implements ExprVisitor<T, Void, Runti
   }
 
   @Override
+  public T visitDecimal9Constant(Decimal9Expression decExpr, Void value) throws RuntimeException {
+    return visitDecimal9Constant(decExpr);
+  }
+
+  @Override
+  public T visitDecimal18Constant(Decimal18Expression decExpr, Void value) throws RuntimeException {
+    return visitDecimal18Constant(decExpr);
+  }
+
+  @Override
+  public T visitDecimal28Constant(Decimal28Expression decExpr, Void value) throws RuntimeException {
+    return visitDecimal28Constant(decExpr);
+  }
+
+  @Override
+  public T visitDecimal38Constant(Decimal38Expression decExpr, Void value) throws RuntimeException {
+    return visitDecimal38Constant(decExpr);
+  }
+
+  @Override
   public T visitDoubleConstant(DoubleExpression dExpr, Void value) throws RuntimeException {
     return visitDoubleConstant(dExpr);
   }
@@ -124,6 +148,10 @@ public abstract class SimpleExprVisitor<T> implements ExprVisitor<T, Void, Runti
   public abstract T visitIntervalYearConstant(IntervalYearExpression intExpr);
   public abstract T visitIntervalDayConstant(IntervalDayExpression intExpr);
   public abstract T visitTimeStampConstant(TimeStampExpression intExpr);
+  public abstract T visitDecimal9Constant(Decimal9Expression intExpr);
+  public abstract T visitDecimal18Constant(Decimal18Expression intExpr);
+  public abstract T visitDecimal28Constant(Decimal28Expression intExpr);
+  public abstract T visitDecimal38Constant(Decimal38Expression intExpr);
   public abstract T visitDoubleConstant(DoubleExpression dExpr);
   public abstract T visitBooleanConstant(BooleanExpression e);
   public abstract T visitQuotedStringConstant(QuotedString e); 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/common/src/main/java/org/apache/drill/common/types/Types.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/types/Types.java b/common/src/main/java/org/apache/drill/common/types/Types.java
index 7d82711..5418e50 100644
--- a/common/src/main/java/org/apache/drill/common/types/Types.java
+++ b/common/src/main/java/org/apache/drill/common/types/Types.java
@@ -39,9 +39,12 @@ public class Types {
     
     switch(type.getMinorType()){
     case BIGINT:
-    case DECIMAL16:
-    case DECIMAL4:
-    case DECIMAL8:
+    case DECIMAL38SPARSE:
+    case DECIMAL38DENSE:
+    case DECIMAL28SPARSE:
+    case DECIMAL28DENSE:
+    case DECIMAL18:
+    case DECIMAL9:
     case FLOAT4:
     case FLOAT8:
     case INT:
@@ -70,10 +73,12 @@ public class Types {
       return java.sql.Types.DATE;
     case TIMESTAMP:
       return java.sql.Types.DATE;
-    case DECIMAL12:
-    case DECIMAL16:
-    case DECIMAL4:
-    case DECIMAL8:
+    case DECIMAL9:
+    case DECIMAL18:
+    case DECIMAL28DENSE:
+    case DECIMAL28SPARSE:
+    case DECIMAL38DENSE:
+    case DECIMAL38SPARSE:
       return java.sql.Types.DECIMAL;
     case FIXED16CHAR:
       return java.sql.Types.NCHAR;
@@ -143,8 +148,6 @@ public class Types {
     if(type.getMode() == REPEATED) return true;
     switch(type.getMinorType()){
     case BIGINT:
-    case DECIMAL4:
-    case DECIMAL8:
     case FLOAT4:
     case FLOAT8:
     case INT:

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/common/src/main/java/org/apache/drill/common/util/DecimalUtility.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/util/DecimalUtility.java b/common/src/main/java/org/apache/drill/common/util/DecimalUtility.java
new file mode 100644
index 0000000..8255784
--- /dev/null
+++ b/common/src/main/java/org/apache/drill/common/util/DecimalUtility.java
@@ -0,0 +1,291 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.common.util;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+
+
+import io.netty.buffer.Unpooled;
+import io.netty.buffer.ByteBuf;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+
+public class DecimalUtility {
+
+    public final static int MAX_DIGITS = 9;
+    public final static int DIGITS_BASE = 1000000000;
+    public final static int integerSize = (Integer.SIZE/8);
+
+    public final static String[] decimalToString = {"",
+            "0",
+            "00",
+            "000",
+            "0000",
+            "00000",
+            "000000",
+            "0000000",
+            "00000000",
+            "000000000"};
+
+
+    /* Given the number of actual digits this function returns the
+     * number of indexes it will occupy in the array of integers
+     * which are stored in base 1 billion
+     */
+    public static int roundUp(int ndigits) {
+        return (ndigits + MAX_DIGITS - 1)/MAX_DIGITS;
+    }
+
+    /* Returns a string representation of the given integer
+     * If the length of the given integer is less than the
+     * passed length, this function will prepend zeroes to the string
+     */
+    public static StringBuilder toStringWithZeroes(int number, int desiredLength) {
+        String value = ((Integer) number).toString();
+        int length = value.length();
+
+        StringBuilder str = new StringBuilder();
+        str.append(decimalToString[desiredLength - length]);
+        str.append(value);
+
+        return str;
+    }
+
+    public static StringBuilder toStringWithZeroes(long number, int desiredLength) {
+        String value = ((Long) number).toString();
+        int length = value.length();
+
+        StringBuilder str = new StringBuilder();
+
+        // Desired length can be > MAX_DIGITS
+        int zeroesLength = desiredLength - length;
+        while (zeroesLength > MAX_DIGITS) {
+            str.append(decimalToString[MAX_DIGITS]);
+            zeroesLength -= MAX_DIGITS;
+        }
+        str.append(decimalToString[zeroesLength]);
+        str.append(value);
+
+        return str;
+    }
+
+    public static BigDecimal getBigDecimalFromIntermediate(ByteBuf data, int startIndex, int nDecimalDigits, int scale) {
+
+        // In the intermediate representation we don't pad the scale with zeroes, so set truncate = false
+        return getBigDecimalFromByteBuf(data, startIndex, nDecimalDigits, scale, false);
+    }
+
+    public static BigDecimal getBigDecimalFromSparse(ByteBuf data, int startIndex, int nDecimalDigits, int scale) {
+
+        // In the sparse representation we pad the scale with zeroes for ease of arithmetic, need to truncate
+        return getBigDecimalFromByteBuf(data, startIndex, nDecimalDigits, scale, true);
+    }
+
+
+    /* Create a BigDecimal object using the data in the ByteBuf.
+     * This function assumes that data is provided in a non-dense format
+     * It works on both sparse and intermediate representations.
+     */
+    public static BigDecimal getBigDecimalFromByteBuf(ByteBuf data, int startIndex, int nDecimalDigits, int scale, boolean truncateScale) {
+
+        // For sparse decimal type we have padded zeroes at the end, strip them while converting to BigDecimal.
+        int actualDigits;
+
+        // Initialize the BigDecimal, first digit in the ByteBuf has the sign so mask it out
+        BigInteger decimalDigits = BigInteger.valueOf((data.getInt(startIndex)) & 0x7FFFFFFF);
+
+        BigInteger base = BigInteger.valueOf(DIGITS_BASE);
+
+        for (int i = 1; i < nDecimalDigits; i++) {
+
+            BigInteger temp = BigInteger.valueOf(data.getInt(startIndex + (i * integerSize)));
+            decimalDigits = decimalDigits.multiply(base);
+            decimalDigits = decimalDigits.add(temp);
+        }
+
+        // Truncate any additional padding we might have added
+        if (truncateScale == true && scale > 0 && (actualDigits = scale % MAX_DIGITS) != 0) {
+            BigInteger truncate = BigInteger.valueOf((int)Math.pow(10, (MAX_DIGITS - actualDigits)));
+            decimalDigits = decimalDigits.divide(truncate);
+        }
+
+        // set the sign
+        if ((data.getInt(startIndex) & 0x80000000) != 0) {
+            decimalDigits = decimalDigits.negate();
+        }
+
+        BigDecimal decimal = new BigDecimal(decimalDigits, scale);
+
+        return decimal;
+    }
+
+    /* This function returns a BigDecimal object from the dense decimal representation.
+     * First step is to convert the dense representation into an intermediate representation
+     * and then invoke getBigDecimalFromByteBuf() to get the BigDecimal object
+     */
+    public static BigDecimal getBigDecimalFromDense(ByteBuf data, int startIndex, int nDecimalDigits, int scale, int maxPrecision, int width) {
+
+        /* This method converts the dense representation to
+         * an intermediate representation. The intermediate
+         * representation has one more integer than the dense
+         * representation.
+         */
+        byte[] intermediateBytes = new byte[((nDecimalDigits + 1) * integerSize)];
+
+        // Start storing from the least significant byte of the first integer
+        int intermediateIndex = 3;
+
+        int[] mask = {0x03, 0x0F, 0x3F, 0xFF};
+        int[] reverseMask = {0xFC, 0xF0, 0xC0, 0x00};
+
+        int maskIndex;
+        int shiftOrder;
+        byte shiftBits;
+
+        // TODO: Some of the logic here is common with casting from Dense to Sparse types, factor out common code
+        if (maxPrecision == 38) {
+            maskIndex = 0;
+            shiftOrder = 6;
+            shiftBits = 0x00;
+            intermediateBytes[intermediateIndex++] = (byte) (data.getByte(startIndex) & 0x7F);
+        } else if (maxPrecision == 28) {
+            maskIndex = 1;
+            shiftOrder = 4;
+            shiftBits = (byte) ((data.getByte(startIndex) & 0x03) << shiftOrder);
+            intermediateBytes[intermediateIndex++] = (byte) (((data.getByte(startIndex) & 0x3C) & 0xFF) >>> 2);
+        } else {
+            throw new UnsupportedOperationException("Dense types with max precision 38 and 28 are only supported");
+        }
+
+        int inputIndex = 1;
+        boolean sign = false;
+
+        if ((data.getByte(startIndex) & 0x80) != 0) {
+            sign = true;
+        }
+
+        while (inputIndex < width) {
+
+            intermediateBytes[intermediateIndex] = (byte) ((shiftBits) | (((data.getByte(startIndex + inputIndex) & reverseMask[maskIndex]) & 0xFF) >>> (8 - shiftOrder)));
+
+            shiftBits = (byte) ((data.getByte(startIndex + inputIndex) & mask[maskIndex]) << shiftOrder);
+
+            inputIndex++;
+            intermediateIndex++;
+
+            if (((inputIndex - 1) % integerSize) == 0) {
+                shiftBits = (byte) ((shiftBits & 0xFF) >>> 2);
+                maskIndex++;
+                shiftOrder -= 2;
+            }
+
+        }
+        /* copy the last byte */
+        intermediateBytes[intermediateIndex] = shiftBits;
+
+        if (sign == true) {
+            intermediateBytes[0] = (byte) (intermediateBytes[0] | 0x80);
+        }
+
+        ByteBuf intermediateData = Unpooled.wrappedBuffer(intermediateBytes);
+
+        return getBigDecimalFromIntermediate(intermediateData, 0, nDecimalDigits + 1, scale);
+    }
+
+    /*
+     * Function converts the BigDecimal and stores it in out internal sparse representation
+     */
+    public static void getSparseFromBigDecimal(BigDecimal input, ByteBuf data, int startIndex, int scale, int precision, int nDecimalDigits) {
+
+        boolean sign = false;
+
+        if (input.signum() == -1) {
+            // negative input
+            sign = true;
+            input = input.abs();
+        }
+
+        // Truncate the input as per the scale provided
+        input = input.setScale(scale, BigDecimal.ROUND_DOWN);
+
+        // Separate out the integer part
+        BigDecimal integerPart = input.setScale(0, BigDecimal.ROUND_DOWN);
+
+        int destIndex = nDecimalDigits - roundUp(scale) - 1;
+
+        // we use base 1 billion integer digits for out integernal representation
+        BigDecimal base = new BigDecimal(DIGITS_BASE);
+
+        while (integerPart.compareTo(BigDecimal.ZERO) == 1) {
+            // store the modulo as the integer value
+            data.setInt(startIndex + (destIndex * integerSize), (integerPart.remainder(base)).intValue());
+            destIndex--;
+            // Divide by base 1 billion
+            integerPart = (integerPart.divide(base)).setScale(0, BigDecimal.ROUND_DOWN);
+        }
+
+        /* Sparse representation contains padding of additional zeroes
+         * so each digit contains MAX_DIGITS for ease of arithmetic
+         */
+        int actualDigits;
+        if ((actualDigits = (scale % MAX_DIGITS)) != 0) {
+            // Pad additional zeroes
+            scale = scale + (MAX_DIGITS - actualDigits);
+            input = input.setScale(scale, BigDecimal.ROUND_DOWN);
+        }
+
+        //separate out the fractional part
+        BigDecimal fractionalPart = input.remainder(BigDecimal.ONE).movePointRight(scale);
+
+        destIndex = nDecimalDigits - 1;
+
+        while (scale > 0) {
+            // Get next set of MAX_DIGITS (9) store it in the ByteBuf
+            fractionalPart = fractionalPart.movePointLeft(MAX_DIGITS);
+            BigDecimal temp = fractionalPart.remainder(BigDecimal.ONE);
+
+            data.setInt(startIndex + (destIndex * integerSize), (temp.unscaledValue().intValue()));
+            destIndex--;
+
+            fractionalPart = fractionalPart.setScale(0, BigDecimal.ROUND_DOWN);
+            scale -= MAX_DIGITS;
+        }
+
+        // Set the negative sign
+        if (sign == true) {
+            data.setInt(startIndex, data.getInt(startIndex) | 0x80000000);
+        }
+
+    }
+    public static int getDecimal9FromBigDecimal(BigDecimal input, int scale, int precision) {
+        // Truncate/ or pad to set the input to the correct scale
+        input = input.setScale(scale, BigDecimal.ROUND_DOWN);
+
+        return (input.unscaledValue().intValue());
+    }
+
+    public static long getDecimal18FromBigDecimal(BigDecimal input, int scale, int precision) {
+        // Truncate or pad to set the input to the correct scale
+        input = input.setScale(scale, BigDecimal.ROUND_DOWN);
+
+        return (input.unscaledValue().longValue());
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/pom.xml
----------------------------------------------------------------------
diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml
index 60eeb1f..196b095 100644
--- a/exec/java-exec/pom.xml
+++ b/exec/java-exec/pom.xml
@@ -200,11 +200,6 @@
       <version>0.4.2</version>
     </dependency>
     <dependency>
-      <groupId>io.netty</groupId>
-      <artifactId>netty-handler</artifactId>
-      <version>4.0.7.Final</version>
-    </dependency>
-    <dependency>
       <groupId>com.google.protobuf</groupId>
       <artifactId>protobuf-java</artifactId>
       <version>2.5.0</version>

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/config.fmpp
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/config.fmpp b/exec/java-exec/src/main/codegen/config.fmpp
index ee7a7b5..49e0614 100644
--- a/exec/java-exec/src/main/codegen/config.fmpp
+++ b/exec/java-exec/src/main/codegen/config.fmpp
@@ -25,7 +25,8 @@ data: {
     aggrtypes2: tdd(../data/AggrTypes2.tdd),
     date: tdd(../data/DateTypes.tdd),
     extract: tdd(../data/ExtractTypes.tdd),
-    parser: tdd(../data/Parser.tdd)
+    parser: tdd(../data/Parser.tdd),
+    decimal: tdd(../data/DecimalTypes.tdd)
 }
 freemarkerLinks: {
     includes: includes/

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/data/Casts.tdd
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/data/Casts.tdd b/exec/java-exec/src/main/codegen/data/Casts.tdd
index a7036c6..397dcdb 100644
--- a/exec/java-exec/src/main/codegen/data/Casts.tdd
+++ b/exec/java-exec/src/main/codegen/data/Casts.tdd
@@ -80,5 +80,77 @@
     {from: "Interval", to: "IntervalYear", major: "IntervalComplexToSimple"},
     {from: "Interval", to: "IntervalDay", major: "IntervalComplexToSimple"}
 
+    {from: "Decimal9", to: "Decimal18", major: "DecimalSimpleDecimalSimple", javatype: "long"},
+
+    {from: "Decimal9", to: "Decimal28Dense", major: "DecimalSimpleDecimalDense", arraySize: "3"},
+    {from: "Decimal9", to: "Decimal38Dense", major: "DecimalSimpleDecimalDense", arraySize: "4"},
+    {from: "Decimal18", to: "Decimal28Dense", major: "DecimalSimpleDecimalDense", arraySize: "3"},
+    {from: "Decimal18", to: "Decimal38Dense", major: "DecimalSimpleDecimalDense", arraySize: "4"},
+
+    {from: "Decimal9", to: "Decimal28Sparse", major: "DecimalSimpleDecimalSparse", arraySize: "5"},
+    {from: "Decimal9", to: "Decimal38Sparse", major: "DecimalSimpleDecimalSparse", arraySize: "6"},
+    {from: "Decimal18", to: "Decimal28Sparse", major: "DecimalSimpleDecimalSparse", arraySize: "5"},
+    {from: "Decimal18", to: "Decimal38Sparse", major: "DecimalSimpleDecimalSparse", arraySize: "6"},
+
+    {from: "Decimal28Dense", to: "Decimal28Sparse", major: "DecimalDenseDecimalSparse", arraySize: "5"},
+    {from: "Decimal28Dense", to: "Decimal38Sparse", major: "DecimalDenseDecimalSparse",arraySize: "6"},
+    {from: "Decimal38Dense", to: "Decimal38Sparse", major: "DecimalDenseDecimalSparse", arraySize: "6"},
+
+    {from: "Decimal28Sparse", to: "Decimal28Dense", major: "DecimalSparseDecimalDense",  arraySize: "3"},
+    {from: "Decimal28Sparse", to: "Decimal38Dense", major: "DecimalSparseDecimalDense", arraySize: "4"},
+    {from: "Decimal38Sparse", to: "Decimal38Dense", major: "DecimalSparseDecimalDense", arraySize: "4"},
+
+    {from: "Decimal28Dense", to: "Decimal38Dense", major: "DecimalSimilar", arraySize: "4"},
+    {from: "Decimal28Sparse", to: "Decimal38Sparse", major: "DecimalSimilar", arraySize: "6"},
+
+    {from: "Int", to: "Decimal9", major: "IntDecimal", javatype: "int"},
+    {from: "Int", to: "Decimal18", major: "IntDecimal", javatype: "long"},
+    {from: "Int", to: "Decimal28Sparse", major: "IntDecimal", arraySize: "5"},
+    {from: "Int", to: "Decimal38Sparse", major: "IntDecimal", arraySize: "6"},
+
+    {from: "BigInt", to: "Decimal9", major: "BigIntDecimal", javatype: "int"},
+    {from: "BigInt", to: "Decimal18", major: "BigIntDecimal", javatype: "long"},
+    {from: "BigInt", to: "Decimal28Sparse", major: "BigIntDecimal", arraySize: "5"},
+    {from: "BigInt", to: "Decimal38Sparse", major: "BigIntDecimal", arraySize: "6"},
+
+    {from: "Decimal9", to: "Int", major: "DecimalSimpleInt", javatype: "int"},
+    {from: "Decimal18", to: "Int", major: "DecimalSimpleInt", javatype: "int"},
+    {from: "Decimal28Sparse", to: "Int", major: "DecimalComplexInt", javatype: "int"},
+    {from: "Decimal38Sparse", to: "Int", major: "DecimalComplexInt", javatype: "int"},
+
+    {from: "Decimal9", to: "BigInt", major: "DecimalSimpleBigInt", javatype: "long"},
+    {from: "Decimal18", to: "BigInt", major: "DecimalSimpleBigInt", javatype: "long"},
+    {from: "Decimal28Sparse", to: "BigInt", major: "DecimalComplexBigInt", javatype: "long"},
+    {from: "Decimal38Sparse", to: "BigInt", major: "DecimalComplexBigInt", javatype: "long"},
+
+    {from: "Decimal9", to: "Float4", major: "DecimalSimpleFloat", javatype: "float"},
+    {from: "Decimal18", to: "Float4", major: "DecimalSimpleFloat", javatype: "float"},
+    {from: "Decimal28Sparse", to: "Float4", major: "DecimalComplexFloat", javatype: "float"},
+    {from: "Decimal38Sparse", to: "Float4", major: "DecimalComplexFloat", javatype: "float"},
+
+    {from: "Float4", to: "Decimal9", major: "FloatDecimalSimple", javatype: "int"},
+    {from: "Float4", to: "Decimal18", major: "FloatDecimalSimple", javatype: "long"},
+    {from: "Float4", to: "Decimal28Sparse", major: "FloatDecimalComplex", arraySize: "5"},
+    {from: "Float4", to: "Decimal38Sparse", major: "FloatDecimalComplex", arraySize: "6"},
+
+    {from: "Float8", to: "Decimal9", major: "DoubleDecimalSimple", javatype: "int"},
+    {from: "Float8", to: "Decimal18", major: "DoubleDecimalSimple", javatype: "long"},
+    {from: "Float8", to: "Decimal28Sparse", major: "DoubleDecimalComplex", arraySize: "5"},
+    {from: "Float8", to: "Decimal38Sparse", major: "DoubleDecimalComplex", arraySize: "6"}
+
+    {from: "Decimal9", to: "Float8", major: "DecimalSimpleDouble", javatype: "double"},
+    {from: "Decimal18", to: "Float8", major: "DecimalSimpleDouble", javatype: "double"},
+    {from: "Decimal28Sparse", to: "Float8", major: "DecimalComplexDouble", javatype: "double"},
+    {from: "Decimal38Sparse", to: "Float8", major: "DecimalComplexDouble", javatype: "double"},
+
+    {from: "VarChar", to: "Decimal9", major: "VarCharDecimalSimple"},
+    {from: "VarChar", to: "Decimal18", major: "VarCharDecimalSimple"},
+    {from: "VarChar", to: "Decimal28Sparse", major: "VarCharDecimalComplex", arraySize: "5"},
+    {from: "VarChar", to: "Decimal38Sparse", major: "VarCharDecimalComplex", arraySize: "6"},
+
+    {from: "Decimal9", to: "VarChar", major: "DecimalSimpleVarChar", bufferSize: "11", javatype: "int"},
+    {from: "Decimal18", to: "VarChar", major: "DecimalSimpleVarChar", bufferSize: "20", javatype: "long"},
+    {from: "Decimal28Sparse", to: "VarChar", major: "DecimalComplexVarChar", bufferSize: "30", arraySize: "5"},
+    {from: "Decimal38Sparse", to: "VarChar", major: "DecimalComplexVarChar", bufferSize: "40", arraySize: "6"}
   ]
 } 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/data/DecimalTypes.tdd
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/data/DecimalTypes.tdd b/exec/java-exec/src/main/codegen/data/DecimalTypes.tdd
new file mode 100644
index 0000000..423fe89
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/data/DecimalTypes.tdd
@@ -0,0 +1,26 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http:# www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+{
+  { decimalTypes: [
+      {name: "Decimal28Sparse", storage: "5"},
+      {name: "Decimal38Sparse", storage: "6"},
+      {name: "Decimal28Dense", storage: "4"},
+      {name: "Decimal38Dense", storage: "3"},
+      {name: "Decimal9", storage: "int"},
+      {name: "Decimal18", storage: "long"}
+    ]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/data/ValueVectorTypes.tdd
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/data/ValueVectorTypes.tdd b/exec/java-exec/src/main/codegen/data/ValueVectorTypes.tdd
index 2d8fd63..c5ae64e 100644
--- a/exec/java-exec/src/main/codegen/data/ValueVectorTypes.tdd
+++ b/exec/java-exec/src/main/codegen/data/ValueVectorTypes.tdd
@@ -59,10 +59,7 @@
         { class: "Float4", javaType: "float" },
         { class: "Time", javaType: "int"},
         { class: "IntervalYear", javaType: "int"}
-        <#-- 
-        { class: "Decimal4", maxPrecisionDigits: 8, scale: 4, javaType: "float"},
-        { class: "Date" }
-        -->
+        { class: "Decimal9", maxPrecisionDigits: 9},
       ]
     },
     {
@@ -76,8 +73,8 @@
         { class: "Float8", javaType: "double" , boxedType: "Double"},
         { class: "Date", javaType: "long"},
         { class: "TimeStamp", javaType: "long"}
+        { class: "Decimal18", maxPrecisionDigits: 18},
         <#--
-        { class: "Decimal8", maxPrecisionDigits: 18, scale: 4, javaType: "double", boxedType: "Double"},
         { class: "Money", maxPrecisionDigits: 2, scale: 1, },
         -->
       ]
@@ -98,16 +95,17 @@
       minor: [
         { class: "Interval", daysOffset: 4, milliSecondsOffset: 8}
       ]
-    }
-    <#--
+    },
     {
       major: "Fixed",
       width: 12,
       javaType: "ByteBuf",
       minor: [
-        { class: "Decimal12", maxPrecisionDigits: 28, scale: 5},
+        <#--
         { class: "TimeTZ" },
         { class: "Interval" }
+        -->
+        { class: "Decimal28Dense", maxPrecisionDigits: 28, nDecimalDigits: 3}
       ]
     },
     {
@@ -115,10 +113,25 @@
       width: 16,
       javaType: "ByteBuf",
       minor: [
-        { class: "Decimal16", maxPrecisionDigits: 37, scale: 6}
+        { class: "Decimal38Dense", maxPrecisionDigits: 38, nDecimalDigits: 4}
+      ]
+    },
+    {
+      major: "Fixed",
+      width: 24,
+      javaType: "ByteBuf",
+      minor: [
+        { class: "Decimal38Sparse", maxPrecisionDigits: 38, nDecimalDigits: 6}
+      ]
+    },
+    {
+      major: "Fixed",
+      width: 20,
+      javaType: "ByteBuf",
+      minor: [
+        { class: "Decimal28Sparse", maxPrecisionDigits: 28, nDecimalDigits: 5}
       ]
     },
-    -->
     {
       major: "VarLen",
       width: 4,

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/includes/vv_imports.ftl
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/includes/vv_imports.ftl b/exec/java-exec/src/main/codegen/includes/vv_imports.ftl
index 8f1895d..bca6e3c 100644
--- a/exec/java-exec/src/main/codegen/includes/vv_imports.ftl
+++ b/exec/java-exec/src/main/codegen/includes/vv_imports.ftl
@@ -46,6 +46,8 @@ import java.io.InputStreamReader;
 import java.sql.Date;
 import java.sql.Time;
 import java.sql.Timestamp;
+import java.math.BigDecimal;
+import java.math.BigInteger;
 
 
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalDenseDecimalSparse.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalDenseDecimalSparse.java b/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalDenseDecimalSparse.java
new file mode 100644
index 0000000..a486cf2
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalDenseDecimalSparse.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+<@pp.dropOutputFile />
+
+<#list cast.types as type>
+<#if type.major == "DecimalDenseDecimalSparse">
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.DECIMAL_CAST, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc{
+
+    @Param ${type.from}Holder in;
+    @Workspace ByteBuf buffer;
+    @Param BigIntHolder precision;
+    @Param BigIntHolder scale;
+    @Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+        int size = (${type.arraySize} * (org.apache.drill.common.util.DecimalUtility.integerSize));
+        buffer = io.netty.buffer.Unpooled.wrappedBuffer(new byte[size]);
+        buffer = new io.netty.buffer.SwappedByteBuf(buffer);
+    }
+
+    public void eval() {
+        out.buffer = buffer;
+        out.start = 0;
+
+        // Re initialize the buffer everytime
+        for (int i = 0; i < ${type.arraySize}; i++) {
+            out.setInteger(i, 0);
+        }
+        out.scale = (int) scale.value;
+        out.precision = (int) precision.value;
+        out.sign = in.sign;
+
+        /* We store base 1 Billion integers in our representation, which requires
+         * 30 bits, but a typical integer requires 32 bits. In our dense representation
+         * we shift bits around to utilize the two available bits, to get back to our sparse
+         * representation rearrange the bits so that we use 32 bits represent the digits.
+         */
+
+        byte[] intermediateBytes = new byte[(in.nDecimalDigits * org.apache.drill.common.util.DecimalUtility.integerSize) + 1];
+
+        int[] mask = {0x03, 0x0F, 0x3F, 0xFF};
+        int[] reverseMask = {0xFC, 0xF0, 0xC0, 0x00};
+
+        <#if (type.from == "Decimal38Dense")>
+        int maskIndex = 0;
+        int shiftOrder = 6;
+        byte shiftBits = 0x00;
+        intermediateBytes[0] = (byte) (in.buffer.getByte(0) & 0x7F);
+        <#elseif (type.from == "Decimal28Dense")>
+        int maskIndex = 1;
+        int shiftOrder = 4;
+        byte shiftBits = (byte) ((in.buffer.getByte(0) & 0x03) << shiftOrder);
+        intermediateBytes[0] = (byte) (((in.buffer.getByte(0) & 0x3C) & 0xFF) >>> 2);
+        </#if>
+
+        int intermediateIndex = 1;
+        int inputIndex = in.start + 1;
+
+        while (intermediateIndex < in.WIDTH) {
+
+            intermediateBytes[intermediateIndex] = (byte) ((shiftBits) | (((in.buffer.getByte(inputIndex) & reverseMask[maskIndex]) & 0xFF) >>> (8 - shiftOrder)));
+
+            shiftBits = (byte) ((in.buffer.getByte(inputIndex) & mask[maskIndex]) << shiftOrder);
+
+            inputIndex++;
+            intermediateIndex++;
+
+            if (((intermediateIndex - 1) % org.apache.drill.common.util.DecimalUtility.integerSize) == 0) {
+                shiftBits = (byte) ((shiftBits & 0xFF) >>> 2);
+                maskIndex++;
+                shiftOrder -= 2;
+            }
+
+        }
+
+        /* copy the last byte */
+        intermediateBytes[intermediateIndex] = shiftBits;
+
+        /* We have shifted the bits around and now each digit is represented by 32 digits
+         * Now we transfer the bytes into a integer array and separate out the scale and
+         * integer part of the decimal. Also pad the scale part with zeroes if needed
+         */
+        int[] intermediate = new int[(intermediateBytes.length/org.apache.drill.common.util.DecimalUtility.integerSize) + 1];
+
+        java.nio.ByteBuffer wrapper = java.nio.ByteBuffer.wrap(intermediateBytes);
+        intermediate[0] = wrapper.get(0);
+
+        int intermediateIdx = 1;
+
+        for (int i = 1; i < intermediate.length; i++) {
+            intermediate[i] = wrapper.getInt(intermediateIdx);
+            intermediateIdx += 4;
+        }
+
+        int actualDigits;
+        int srcIndex = intermediate.length - 1;
+        int dstIndex = out.nDecimalDigits - 1;
+
+        // break the scale and integer part and pad zeroes
+        if (in.scale > 0 && (actualDigits = (in.scale % org.apache.drill.common.util.DecimalUtility.MAX_DIGITS)) > 0) {
+
+            int paddedDigits = org.apache.drill.common.util.DecimalUtility.MAX_DIGITS - actualDigits;
+            int padding = (int) (Math.pow(10, paddedDigits));
+            int transferDigitMask = (int) (Math.pow(10, actualDigits));
+
+            /* copy the remaining scale over to the last deciml digit */
+            out.setInteger(dstIndex, ((intermediate[srcIndex] % transferDigitMask) * (padding)));
+            dstIndex--;
+
+            while (srcIndex > 0) {
+                out.setInteger(dstIndex, ((intermediate[srcIndex]/transferDigitMask) + ((intermediate[srcIndex - 1] % transferDigitMask) * padding)));
+
+                dstIndex--;
+                srcIndex--;
+            }
+
+            out.setInteger(dstIndex, (intermediate[0]/transferDigitMask));
+        } else {
+            for (; srcIndex >= 0; srcIndex--, dstIndex--)
+                out.setInteger(dstIndex, intermediate[srcIndex]);
+        }
+    }
+}
+</#if>
+</#list>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalFloat.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalFloat.java b/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalFloat.java
new file mode 100644
index 0000000..82f9a43
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalFloat.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+<@pp.dropOutputFile />
+
+
+<#list cast.types as type>
+<#if type.major == "DecimalSimpleFloat" || type.major == "DecimalSimpleDouble"> <#-- Cast function template for conversion from Decimal9, Decimal18 to Float4 and Float8-->
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.common.util.DecimalUtility;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc {
+
+@Param ${type.from}Holder in;
+@Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+    }
+
+    public void eval() {
+
+        // Divide the decimal with the scale to get the floating point value
+        out.value = (${type.javatype}) (in.value / Math.pow(10, in.scale));
+    }
+}
+<#elseif type.major == "DecimalComplexFloat" || type.major == "DecimalComplexDouble"> <#-- Cast function template for conversion from Decimal9, Decimal18 to Float4 -->
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.common.util.DecimalUtility;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc {
+
+@Param ${type.from}Holder in;
+@Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+    }
+
+    public void eval() {
+
+        java.math.BigDecimal bigDecimal = org.apache.drill.common.util.DecimalUtility.getBigDecimalFromByteBuf(in.buffer, in.start, in.nDecimalDigits, in.scale, true);
+        out.value = bigDecimal.${type.javatype}Value();
+    }
+}
+</#if>
+</#list>

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalInt.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalInt.java b/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalInt.java
new file mode 100644
index 0000000..646f52c
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalInt.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+<@pp.dropOutputFile />
+
+
+<#list cast.types as type>
+
+<#if type.major == "DecimalSimpleInt" || type.major == "DecimalSimpleBigInt"> <#-- Cast function template for conversion from Decimal9, Decimal18 to Int and BigInt -->
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.common.util.DecimalUtility;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc {
+
+@Param ${type.from}Holder in;
+@Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+    }
+
+    public void eval() {
+
+        // Assign the integer part of the decimal to the output holder
+        out.value = (${type.javatype}) ((in.value / Math.pow(10, in.scale)));
+    }
+}
+
+<#elseif type.major == "DecimalComplexInt" || type.major == "DecimalComplexBigInt"> <#-- Cast function template for conversion from Decimal28Sparse, Decimal38Sparse to Int and BigInt -->
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.common.util.DecimalUtility;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc {
+
+@Param ${type.from}Holder in;
+@Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+    }
+
+    public void eval() {
+
+        // Get the index, where the integer part of the decimal ends
+        int integerEndIndex = in.nDecimalDigits - org.apache.drill.common.util.DecimalUtility.roundUp(in.scale);
+
+        for (int i = 0 ; i < integerEndIndex; i++) {
+            // We store values as base 1 billion integers, use this to compute the output (we don't care about overflows)
+            out.value = (${type.javatype}) ((out.value * org.apache.drill.common.util.DecimalUtility.DIGITS_BASE) + in.getInteger(i));
+        }
+
+        if (in.sign == true) {
+            out.value *= -1;
+        }
+    }
+}
+
+</#if> <#-- type.major -->
+</#list>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalSimilar.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalSimilar.java b/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalSimilar.java
new file mode 100644
index 0000000..0d99569
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalSimilar.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+<@pp.dropOutputFile />
+
+<#-- Template for converting between similar types of decimal. Decimal28Dense -> Decimal38Dense & Decimal28Sparse -> Decimal38Sparse -->
+
+<#list cast.types as type>
+<#if type.major == "DecimalSimilar">
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.DECIMAL_CAST, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc{
+
+    @Param ${type.from}Holder in;
+    @Workspace ByteBuf buffer;
+    @Param BigIntHolder precision;
+    @Param BigIntHolder scale;
+    @Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+        int size = (${type.arraySize} * (org.apache.drill.common.util.DecimalUtility.integerSize));
+        buffer = io.netty.buffer.Unpooled.wrappedBuffer(new byte[size]);
+        buffer = new io.netty.buffer.SwappedByteBuf(buffer);
+    }
+
+    public void eval() {
+
+        out.buffer = buffer;
+        out.start = 0;
+        out.scale = (int) scale.value;
+        out.precision = (int) precision.value;
+        out.sign = in.sign;
+
+        // Re initialize the buffer everytime
+        for (int i = 0; i < ${type.arraySize}; i++) {
+            out.setInteger(i, 0);
+        }
+
+        int inputIdx = in.nDecimalDigits - 1;
+        int outputIdx = out.nDecimalDigits - 1;
+
+        for (; inputIdx >= 0; inputIdx--, outputIdx--) {
+            out.setInteger(outputIdx, in.getInteger(inputIdx));
+        }
+    }
+}
+</#if> <#-- type.major -->
+</#list>

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalSparseDecimalDense.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalSparseDecimalDense.java b/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalSparseDecimalDense.java
new file mode 100644
index 0000000..4798b34
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalSparseDecimalDense.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+<@pp.dropOutputFile />
+
+<#list cast.types as type>
+<#if type.major == "DecimalSparseDecimalDense">
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.DECIMAL_CAST, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc{
+
+    @Param ${type.from}Holder in;
+    @Workspace ByteBuf buffer;
+    @Param BigIntHolder precision;
+    @Param BigIntHolder scale;
+    @Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+        int size = (${type.arraySize} * (org.apache.drill.common.util.DecimalUtility.integerSize));
+        buffer = io.netty.buffer.Unpooled.wrappedBuffer(new byte[size]);
+        buffer = new io.netty.buffer.SwappedByteBuf(buffer);
+    }
+
+    public void eval() {
+
+        out.buffer = buffer;
+        out.start = 0;
+
+        // Re initialize the buffer everytime
+        for (int i = 0; i < ${type.arraySize}; i++) {
+            out.setInteger(i, 0);
+        }
+
+        out.scale = (int) scale.value;
+        out.precision = (int) precision.value;
+
+        out.sign = in.sign;
+
+        /* Before converting from a sparse representation to a dense representation
+         * we need to convert it to an intermediate representation. In the sparse
+         * representation we separate out the scale and the integer part of the decimal
+         * and pad the scale part with additional zeroes for ease of performing arithmetic
+         * operations. In the intermediate representation we strip out the extra zeroes and
+         * combine the scale and integer part.
+         */
+        int[] intermediate = new int[in.nDecimalDigits - 1];
+
+        int index = in.nDecimalDigits - 1;
+        int actualDigits;
+
+        if (in.scale > 0 && (actualDigits = (in.scale % org.apache.drill.common.util.DecimalUtility.MAX_DIGITS)) > 0) {
+
+            int paddedDigits = org.apache.drill.common.util.DecimalUtility.MAX_DIGITS - actualDigits;
+
+            int paddedMask = (int) Math.pow(10, paddedDigits);
+
+            /* We have a scale that does not completely occupy a decimal
+             * digit, so we have padded zeroes to it for ease of arithmetic
+             * Truncate the extra zeroes added and move the digits to the right
+             */
+            int temp = (in.getInteger(index)/paddedMask);
+            index--;
+
+            while(index >= 0) {
+
+                int transferDigits = (in.getInteger(index) % (paddedMask));
+
+                intermediate[index] = (int) (temp + (Math.pow(10, actualDigits) * transferDigits));
+
+                temp = (in.getInteger(index)/(paddedMask));
+
+                index--;
+            }
+        } else {
+
+            /* If the scale does not exist or it perfectly fits within a decimal digit
+             * then we have padded no zeroes, which means there can atmost be only 38 digits, which
+             * need only 5 decimal digit to be stored, simply copy over the integers
+             */
+            for (int i = 1; i < in.nDecimalDigits; i++)
+                intermediate[i - 1] = in.getInteger(i);
+
+        }
+
+        /* Now we have an intermediate representation in the array intermediate[]
+         * Every number in the intermediate representation is base 1 billion number
+         * To represent it we require only 30 bits, but every integer has 32 bits.
+         * By shifting the bits around we can utilize the extra two bits on every
+         * number and create a dense representation
+         */
+
+          /* Allocate a byte array */
+          int size = (((intermediate.length - 1) * org.apache.drill.common.util.DecimalUtility.integerSize) + 1);
+          byte[] intermediateBytes = new byte[size];
+          java.nio.ByteBuffer wrapper = java.nio.ByteBuffer.wrap(intermediateBytes);
+
+          wrapper.put((byte) intermediate[0]);
+
+          for (int i = 1; i < intermediate.length; i++) {
+            wrapper.put(java.nio.ByteBuffer.allocate(org.apache.drill.common.util.DecimalUtility.integerSize).putInt(intermediate[i]).array());
+          }
+
+          final int[] mask = {0x03, 0x0F, 0x3F, 0xFF};
+          int maskIndex = 0;
+          int shiftOrder = 2;
+
+          // Start just after the last integer and shift bits to the right
+          index = size - (org.apache.drill.common.util.DecimalUtility.integerSize+ 1);
+
+          while (index >= 0) {
+
+              /* get the last bits that need to shifted to the next byte */
+              byte shiftBits = (byte) ((intermediateBytes[index] & mask[maskIndex]) << (8 - shiftOrder));
+
+              int shiftOrder1 = ((index % org.apache.drill.common.util.DecimalUtility.integerSize) == 0) ? shiftOrder - 2 : shiftOrder;
+
+              /* transfer the bits from the left to the right */
+              intermediateBytes[index + 1] = (byte) (((intermediateBytes[index + 1] & 0xFF) >>> (shiftOrder1)) | shiftBits);
+
+              index--;
+
+              if ((index % org.apache.drill.common.util.DecimalUtility.integerSize) == 0) {
+                  /* We are on a border */
+                  shiftOrder += 2;
+                  maskIndex++;
+              }
+          }
+
+          <#if (type.from == "Decimal28Sparse") && (type.to == "Decimal38Dense")>
+          /* Decimal38Dense representation has four bytes more than that needed to
+           * represent Decimal28Dense. So our first four bytes are empty in that scenario
+           */
+          int dstIndex = 4;
+          <#else>
+          int dstIndex = 0;
+          </#if>
+
+          // Set the bytes in the buffer
+          out.buffer.setBytes(dstIndex, intermediateBytes, 1, (size - 1));
+    }
+}
+</#if>
+</#list>
\ No newline at end of file


[10/10] git commit: DRILL-468 Support for FileSystem partitions

Posted by ja...@apache.org.
DRILL-468 Support for FileSystem partitions


Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/69c571cc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/69c571cc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/69c571cc

Branch: refs/heads/master
Commit: 69c571ccd841b7bcda1c38979716862690cba696
Parents: 54287d0
Author: Steven Phillips <sp...@maprtech.com>
Authored: Wed Mar 26 11:50:04 2014 -0700
Committer: Jacques Nadeau <ja...@apache.org>
Committed: Tue Apr 22 20:06:03 2014 -0700

----------------------------------------------------------------------
 distribution/src/resources/drill-override.conf  |  5 +-
 .../org/apache/drill/exec/ExecConstants.java    |  5 +-
 .../drill/exec/physical/impl/ScanBatch.java     | 69 +++++++++++++++++++-
 .../drill/exec/store/dfs/DrillPathFilter.java   | 31 +++++++++
 .../drill/exec/store/dfs/FileSelection.java     | 31 ++++++---
 .../exec/store/dfs/WorkspaceSchemaFactory.java  |  2 +-
 .../exec/store/dfs/easy/EasyFormatPlugin.java   | 52 +++++++++++++--
 .../exec/store/dfs/easy/EasyGroupScan.java      | 18 +++--
 .../drill/exec/store/dfs/easy/EasySubScan.java  | 14 +++-
 .../dfs/shim/fallback/FallbackFileSystem.java   |  9 ++-
 .../exec/store/parquet/ParquetFormatPlugin.java | 20 +-----
 .../exec/store/parquet/ParquetGroupScan.java    | 19 ++++--
 .../exec/store/parquet/ParquetRowGroupScan.java | 17 +++--
 .../store/parquet/ParquetScanBatchCreator.java  | 60 +++++++++++++----
 .../src/main/resources/drill-module.conf        |  3 +-
 .../org/apache/drill/TestExampleQueries.java    | 15 +++++
 .../exec/store/text/TextRecordReaderTest.java   |  2 +-
 .../src/test/resources/storage-engines.json     | 13 ----
 .../resources/store/text/data/d1/regions.csv    |  5 ++
 .../test/resources/store/text/data/regions.csv  |  5 ++
 .../src/test/resources/store/text/regions.csv   |  5 --
 pom.xml                                         |  2 +-
 .../apache/drill/jdbc/test/TestJdbcQuery.java   |  1 +
 23 files changed, 313 insertions(+), 90 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/distribution/src/resources/drill-override.conf
----------------------------------------------------------------------
diff --git a/distribution/src/resources/drill-override.conf b/distribution/src/resources/drill-override.conf
index a5e5522..a9316a9 100644
--- a/distribution/src/resources/drill-override.conf
+++ b/distribution/src/resources/drill-override.conf
@@ -56,7 +56,8 @@ drill.exec: {
       text: {
         buffer.size: 262144,
         batch.size: 4000
-      }
+      },
+      partition.column.label: "dir"
     }
   },
   metrics : {
@@ -89,7 +90,7 @@ drill.exec: {
     executor.threads: 4
   },
   trace: {
-    directory: "/var/log/drill",
+    directory: "/tmp/drill-trace",
     filesystem: "file:///"
   },
   tmp: {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index f88b1b4..34bde9b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -58,7 +58,8 @@ public interface ExecConstants {
   public static final String EXTERNAL_SORT_SPILL_THRESHOLD = "drill.exec.sort.external.spill.threshold";
   public static final String EXTERNAL_SORT_SPILL_DIRS = "drill.exec.sort.external.spill.directories";
   public static final String EXTERNAL_SORT_SPILL_FILESYSTEM = "drill.exec.sort.external.spill.fs";
-  public static final String TEXT_LINE_READER_BUFFER_SIZE = "drill.exec.storage.file.text.buffer.size";
   public static final String TEXT_LINE_READER_BATCH_SIZE = "drill.exec.storage.file.text.batch.size";
-
+  public static final String TEXT_LINE_READER_BUFFER_SIZE = "drill.exec.storage.file.text.buffer.size";
+  public static final String FILESYSTEM_PARTITION_COLUMN_LABEL = "drill.exec.storage.file.partition.column.label";
+  
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
index ace2677..e93fbcc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
@@ -17,11 +17,19 @@
  */
 package org.apache.drill.exec.physical.impl;
 
+import java.util.Collections;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 
+import com.google.common.collect.Lists;
+import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -36,9 +44,13 @@ import org.apache.drill.exec.record.WritableBatch;
 import org.apache.drill.exec.record.selection.SelectionVector2;
 import org.apache.drill.exec.record.selection.SelectionVector4;
 import org.apache.drill.exec.store.RecordReader;
+import org.apache.drill.exec.vector.AllocationHelper;
+import org.apache.drill.exec.vector.NullableVarCharVector;
 import org.apache.drill.exec.vector.ValueVector;
 
 import com.google.common.collect.Maps;
+import org.apache.drill.exec.vector.VarCharVector;
+import org.apache.drill.exec.vector.allocator.VectorAllocator;
 
 /**
  * Record batch used for a particular scan. Operators against one or more
@@ -56,14 +68,29 @@ public class ScanBatch implements RecordBatch {
   private RecordReader currentReader;
   private BatchSchema schema;
   private final Mutator mutator = new Mutator();
+  private Iterator<String[]> partitionColumns;
+  private String[] partitionValues;
+  List<ValueVector> partitionVectors;
+  List<Integer> selectedPartitionColumns;
+  private String partitionColumnDesignator;
 
-  public ScanBatch(FragmentContext context, Iterator<RecordReader> readers) throws ExecutionSetupException {
+  public ScanBatch(FragmentContext context, Iterator<RecordReader> readers, List<String[]> partitionColumns, List<Integer> selectedPartitionColumns) throws ExecutionSetupException {
     this.context = context;
     this.readers = readers;
     if (!readers.hasNext())
       throw new ExecutionSetupException("A scan batch must contain at least one reader.");
     this.currentReader = readers.next();
     this.currentReader.setup(mutator);
+    this.partitionColumns = partitionColumns.iterator();
+    this.partitionValues = this.partitionColumns.hasNext() ? this.partitionColumns.next() : null;
+    this.selectedPartitionColumns = selectedPartitionColumns;
+    DrillConfig config = context.getConfig(); //This nonsense it is to not break all the stupid unit tests using SimpleRootExec
+    this.partitionColumnDesignator = config == null ? "dir" : config.getString(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL);
+    addPartitionVectors();
+  }
+
+  public ScanBatch(FragmentContext context, Iterator<RecordReader> readers) throws ExecutionSetupException {
+    this(context, readers, Collections.EMPTY_LIST, Collections.EMPTY_LIST);
   }
 
   @Override
@@ -101,7 +128,10 @@ public class ScanBatch implements RecordBatch {
         }
         currentReader.cleanup();
         currentReader = readers.next();
+        partitionValues = partitionColumns.hasNext() ? partitionColumns.next() : null;
+        mutator.removeAllFields();
         currentReader.setup(mutator);
+        addPartitionVectors();
       } catch (ExecutionSetupException e) {
         this.context.fail(e);
         releaseAssets();
@@ -109,6 +139,7 @@ public class ScanBatch implements RecordBatch {
       }
     }
 
+    populatePartitionVectors();
     if (schemaChanged) {
       schemaChanged = false;
       return IterOutcome.OK_NEW_SCHEMA;
@@ -117,6 +148,42 @@ public class ScanBatch implements RecordBatch {
     }
   }
 
+  private void addPartitionVectors() {
+    partitionVectors = Lists.newArrayList();
+    for (int i : selectedPartitionColumns) {
+      MaterializedField field;
+      ValueVector v;
+      if (partitionValues.length > i) {
+        field = MaterializedField.create(SchemaPath.getSimplePath(partitionColumnDesignator + i), Types.required(MinorType.VARCHAR));
+        v = new VarCharVector(field, context.getAllocator());
+      } else {
+        field = MaterializedField.create(SchemaPath.getSimplePath(partitionColumnDesignator + i), Types.optional(MinorType.VARCHAR));
+        v = new NullableVarCharVector(field, context.getAllocator());
+      }
+      mutator.addField(v);
+      partitionVectors.add(v);
+    }
+  }
+
+  private void populatePartitionVectors() {
+    for (int i : selectedPartitionColumns) {
+      if (partitionValues.length > i) {
+        VarCharVector v = (VarCharVector) partitionVectors.get(i);
+        String val = partitionValues[i];
+        byte[] bytes = val.getBytes();
+        AllocationHelper.allocate(v, recordCount, val.length());
+        for (int j = 0; j < recordCount; j++) {
+          v.getMutator().set(j, bytes);
+        }
+        v.getMutator().setValueCount(recordCount);
+      } else {
+        NullableVarCharVector v = (NullableVarCharVector) partitionVectors.get(i);
+        AllocationHelper.allocate(v, recordCount, 0);
+        v.getMutator().setValueCount(recordCount);
+      }
+    }
+  }
+
   @Override
   public SelectionVector2 getSelectionVector2() {
     throw new UnsupportedOperationException();

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillPathFilter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillPathFilter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillPathFilter.java
new file mode 100644
index 0000000..81c8779
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillPathFilter.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.dfs;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.Utils;
+
+public class DrillPathFilter extends Utils.OutputFileUtils.OutputFilesFilter {
+  @Override
+  public boolean accept(Path path) {
+    if (path.toString().contains("_metadata")) {
+      return false;
+    }
+    return super.accept(path);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java
index 5ab2c1a..14c5ad8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java
@@ -20,8 +20,10 @@ package org.apache.drill.exec.store.dfs;
 import java.io.IOException;
 import java.util.Collections;
 import java.util.List;
-import java.util.regex.Pattern;
 
+import com.google.common.base.Preconditions;
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.drill.exec.store.dfs.shim.DrillFileSystem;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
@@ -41,21 +43,31 @@ public class FileSelection {
   private List<FileStatus> statuses;
 
   public List<String> files;
+  public String selectionRoot;
 
   public FileSelection() {
   }
-  
+
+  public FileSelection(List<String> files, String selectionRoot, boolean dummy) {
+    this.files = files;
+    this.selectionRoot = selectionRoot;
+  }
   
   public FileSelection(List<String> files, boolean dummy){
     this.files = files;
   }
 
   public FileSelection(List<FileStatus> statuses) {
+    this(statuses, null);
+  }
+
+  public FileSelection(List<FileStatus> statuses, String selectionRoot) {
     this.statuses = statuses;
     this.files = Lists.newArrayList();
     for (FileStatus f : statuses) {
       files.add(f.getPath().toString());
     }
+    this.selectionRoot = selectionRoot;
   }
 
   public boolean containsDirectories(DrillFileSystem fs) throws IOException {
@@ -66,7 +78,7 @@ public class FileSelection {
     return false;
   }
 
-  public FileSelection minusDirectorries(DrillFileSystem fs) throws IOException {
+  public FileSelection minusDirectories(DrillFileSystem fs) throws IOException {
     init(fs);
     List<FileStatus> newList = Lists.newArrayList();
     for (FileStatus p : statuses) {
@@ -75,12 +87,11 @@ public class FileSelection {
         for (FileStatus s : statuses) {
           newList.add(s);
         }
-
       } else {
         newList.add(p);
       }
     }
-    return new FileSelection(newList);
+    return new FileSelection(newList, selectionRoot);
   }
 
   public FileStatus getFirstPath(DrillFileSystem fs) throws IOException {
@@ -116,11 +127,15 @@ public class FileSelection {
     if ( !(path.contains("*") || path.contains("?")) ) {
       Path p = new Path(parent, path);
       FileStatus status = fs.getFileStatus(p);
-      return new FileSelection(Collections.singletonList(status));
+      return new FileSelection(Collections.singletonList(status), p.toUri().getPath());
     } else {
-      FileStatus[] status = fs.getUnderlying().globStatus(new Path(parent, path));
+      Path p = new Path(parent, path);
+      FileStatus[] status = fs.getUnderlying().globStatus(p);
       if(status == null || status.length == 0) return null;
-      return new FileSelection(Lists.newArrayList(status));
+      String[] s = p.toUri().getPath().split("/");
+      String newPath = StringUtils.join(ArrayUtils.subarray(s, 0, s.length - 1), "/");
+      Preconditions.checkState(!newPath.contains("*") && !newPath.contains("?"), String.format("Unsupported selection path: %s", p));
+      return new FileSelection(Lists.newArrayList(status), newPath);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
index c77bd92..1551e5a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
@@ -81,7 +81,7 @@ public class WorkspaceSchemaFactory implements ExpandingConcurrentMap.MapValueFa
             logger.debug("File read failed.", e);
           }
         }
-        fileSelection = fileSelection.minusDirectorries(fs);
+        fileSelection = fileSelection.minusDirectories(fs);
       }
 
       for (FormatMatcher m : fileMatchers) {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
index 9c1dc74..6e87da5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
@@ -20,12 +20,16 @@ package org.apache.drill.exec.store.dfs.easy;
 import java.io.IOException;
 import java.util.Collections;
 import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
+import org.apache.commons.lang3.ArrayUtils;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.expression.FieldReference;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.base.AbstractGroupScan;
 import org.apache.drill.exec.physical.impl.ScanBatch;
@@ -41,8 +45,6 @@ import org.apache.drill.exec.store.dfs.shim.DrillFileSystem;
 
 import com.beust.jcommander.internal.Lists;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
 
 public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements FormatPlugin {
@@ -108,17 +110,55 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
 
   
   RecordBatch getBatch(FragmentContext context, EasySubScan scan) throws ExecutionSetupException {
+    String partitionDesignator = context.getConfig().getString(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL);
+    List<SchemaPath> columns = scan.getColumns();
     List<RecordReader> readers = Lists.newArrayList();
+    List<String[]> partitionColumns = Lists.newArrayList();
+    List<Integer> selectedPartitionColumns = Lists.newArrayList();
+    boolean selectAllColumns = false;
+
+    if (columns == null || columns.size() == 0) {
+      selectAllColumns = true;
+    } else {
+      Pattern pattern = Pattern.compile(String.format("%s[0-9]+", partitionDesignator));
+      for (SchemaPath column : columns) {
+        Matcher m = pattern.matcher(column.getAsUnescapedPath());
+        if (m.matches()) {
+          scan.getColumns().remove(column);
+          selectedPartitionColumns.add(Integer.parseInt(column.getAsUnescapedPath().toString().substring(partitionDesignator.length())));
+        }
+      }
+    }
+    int numParts = 0;
     for(FileWork work : scan.getWorkUnits()){
-      readers.add(getRecordReader(context, work, scan.getColumns())); 
+      readers.add(getRecordReader(context, work, scan.getColumns()));
+      if (scan.getSelectionRoot() != null) {
+        String[] r = scan.getSelectionRoot().split("/");
+        String[] p = work.getPath().split("/");
+        if (p.length > r.length) {
+          String[] q = ArrayUtils.subarray(p, r.length, p.length - 1);
+          partitionColumns.add(q);
+          numParts = Math.max(numParts, q.length);
+        } else {
+          partitionColumns.add(new String[] {});
+        }
+      } else {
+        partitionColumns.add(new String[] {});
+      }
+    }
+
+    if (selectAllColumns) {
+      for (int i = 0; i < numParts; i++) {
+        selectedPartitionColumns.add(i);
+      }
     }
-    
-    return new ScanBatch(context, readers.iterator());
+
+    return new ScanBatch(context, readers.iterator(), partitionColumns, selectedPartitionColumns);
   }
   
   @Override
   public AbstractGroupScan getGroupScan(FileSelection selection) throws IOException {
-    return new EasyGroupScan(selection, this, null);
+    return new EasyGroupScan(selection, this, null, selection.selectionRoot);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
index fc2ae2c..68fee34 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
@@ -32,11 +32,8 @@ import org.apache.drill.exec.physical.base.AbstractGroupScan;
 import org.apache.drill.exec.physical.base.PhysicalOperator;
 import org.apache.drill.exec.physical.base.Size;
 import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
-import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.store.dfs.FileSelection;
-import org.apache.drill.exec.store.dfs.shim.DrillFileSystem;
-import org.apache.drill.exec.store.easy.text.TextFormatPlugin;
 import org.apache.drill.exec.store.schedule.AffinityCreator;
 import org.apache.drill.exec.store.schedule.AssignmentCreator;
 import org.apache.drill.exec.store.schedule.BlockMapBuilder;
@@ -64,6 +61,7 @@ public class EasyGroupScan extends AbstractGroupScan{
   private ListMultimap<Integer, CompleteFileWork> mappings;
   private List<CompleteFileWork> chunks;
   private List<EndpointAffinity> endpointAffinities;
+  private String selectionRoot;
 
   @JsonCreator
   public EasyGroupScan(
@@ -71,7 +69,8 @@ public class EasyGroupScan extends AbstractGroupScan{
       @JsonProperty("storage") StoragePluginConfig storageConfig, //
       @JsonProperty("format") FormatPluginConfig formatConfig, //
       @JacksonInject StoragePluginRegistry engineRegistry, // 
-      @JsonProperty("columns") List<SchemaPath> columns
+      @JsonProperty("columns") List<SchemaPath> columns,
+      @JsonProperty("selectionRoot") String selectionRoot
       ) throws IOException, ExecutionSetupException {
 
     this.formatPlugin = (EasyFormatPlugin<?>) engineRegistry.getFormatPlugin(storageConfig, formatConfig);
@@ -87,12 +86,14 @@ public class EasyGroupScan extends AbstractGroupScan{
     }
     maxWidth = chunks.size();
     this.columns = columns;
+    this.selectionRoot = selectionRoot;
   }
   
   public EasyGroupScan(
       FileSelection selection, //
       EasyFormatPlugin<?> formatPlugin, // 
-      List<SchemaPath> columns
+      List<SchemaPath> columns,
+      String selectionRoot
       ) throws IOException{
     this.selection = selection;
     this.formatPlugin = formatPlugin;
@@ -106,6 +107,11 @@ public class EasyGroupScan extends AbstractGroupScan{
       this.endpointAffinities = Collections.emptyList();
     }
     maxWidth = chunks.size();
+    this.selectionRoot = selectionRoot;
+  }
+
+  public String getSelectionRoot() {
+    return selectionRoot;
   }
 
   @Override
@@ -170,7 +176,7 @@ public class EasyGroupScan extends AbstractGroupScan{
     Preconditions.checkArgument(!filesForMinor.isEmpty(),
         String.format("MinorFragmentId %d has no read entries assigned", minorFragmentId));
 
-    return new EasySubScan(convert(filesForMinor), formatPlugin, columns);
+    return new EasySubScan(convert(filesForMinor), formatPlugin, columns, selectionRoot);
   }
   
   private List<FileWorkImpl> convert(List<CompleteFileWork> list){

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasySubScan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasySubScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasySubScan.java
index c01fb84..0b3fe0f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasySubScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasySubScan.java
@@ -25,7 +25,6 @@ import org.apache.drill.common.expression.FieldReference;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
-import org.apache.drill.exec.exception.DrillbitStartupException;
 import org.apache.drill.exec.physical.base.AbstractSubScan;
 import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.store.dfs.NamedFormatPluginConfig;
@@ -45,6 +44,7 @@ public class EasySubScan extends AbstractSubScan{
   private final List<FileWorkImpl> files;
   private final EasyFormatPlugin<?> formatPlugin;
   private final List<SchemaPath> columns;
+  private String selectionRoot;
   
   @JsonCreator
   public EasySubScan(
@@ -52,19 +52,27 @@ public class EasySubScan extends AbstractSubScan{
       @JsonProperty("storage") StoragePluginConfig storageConfig, //
       @JsonProperty("format") FormatPluginConfig formatConfig, //
       @JacksonInject StoragePluginRegistry engineRegistry, // 
-      @JsonProperty("columns") List<SchemaPath> columns //
+      @JsonProperty("columns") List<SchemaPath> columns, //
+      @JsonProperty("selectionRoot") String selectionRoot
       ) throws IOException, ExecutionSetupException {
 
     this.formatPlugin = (EasyFormatPlugin<?>) engineRegistry.getFormatPlugin(storageConfig, formatConfig);
     Preconditions.checkNotNull(this.formatPlugin);
     this.files = files;
     this.columns = columns;
+    this.selectionRoot = selectionRoot;
   }
   
-  public EasySubScan(List<FileWorkImpl> files, EasyFormatPlugin<?> plugin, List<SchemaPath> columns){
+  public EasySubScan(List<FileWorkImpl> files, EasyFormatPlugin<?> plugin, List<SchemaPath> columns, String selectionRoot){
     this.formatPlugin = plugin;
     this.files = files;
     this.columns = columns;
+    this.selectionRoot = selectionRoot;
+  }
+
+  @JsonProperty
+  public String getSelectionRoot() {
+    return selectionRoot;
   }
   
   @JsonIgnore

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/shim/fallback/FallbackFileSystem.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/shim/fallback/FallbackFileSystem.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/shim/fallback/FallbackFileSystem.java
index 340919d..0c18e71 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/shim/fallback/FallbackFileSystem.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/shim/fallback/FallbackFileSystem.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.util.List;
 
 import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.exec.store.dfs.DrillPathFilter;
 import org.apache.drill.exec.store.dfs.shim.DrillFileSystem;
 import org.apache.drill.exec.store.dfs.shim.DrillInputStream;
 import org.apache.drill.exec.store.dfs.shim.DrillOutputStream;
@@ -65,9 +66,13 @@ public class FallbackFileSystem extends DrillFileSystem {
   private void addRecursiveStatus(FileStatus parent, List<FileStatus> listToFill) throws IOException {
     if (parent.isDir()) {
       Path pattern = new Path(parent.getPath(), "*");
-      FileStatus[] sub = fs.globStatus(pattern);
+      FileStatus[] sub = fs.globStatus(pattern, new DrillPathFilter());
       for(FileStatus s : sub){
-        listToFill.add(s);
+        if (s.isDir()) {
+          addRecursiveStatus(s, listToFill);
+        } else {
+          listToFill.add(s);
+        }
       }
     } else {
       listToFill.add(parent);

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
index cde9b08..d9e6795 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
@@ -26,12 +26,7 @@ import org.apache.drill.common.expression.FieldReference;
 import org.apache.drill.common.logical.StoragePluginConfig;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.QueryOptimizerRule;
-import org.apache.drill.exec.store.dfs.BasicFormatMatcher;
-import org.apache.drill.exec.store.dfs.FileSelection;
-import org.apache.drill.exec.store.dfs.FormatMatcher;
-import org.apache.drill.exec.store.dfs.FormatPlugin;
-import org.apache.drill.exec.store.dfs.FormatSelection;
-import org.apache.drill.exec.store.dfs.MagicString;
+import org.apache.drill.exec.store.dfs.*;
 import org.apache.drill.exec.store.dfs.shim.DrillFileSystem;
 import org.apache.drill.exec.store.mock.MockStorageEngine;
 import org.apache.hadoop.conf.Configuration;
@@ -39,7 +34,6 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.mapred.Utils;
 import parquet.format.converter.ParquetMetadataConverter;
 import parquet.hadoop.CodecFactoryExposer;
 import parquet.hadoop.ParquetFileWriter;
@@ -101,7 +95,7 @@ public class ParquetFormatPlugin implements FormatPlugin{
 
   @Override
   public ParquetGroupScan getGroupScan(FileSelection selection) throws IOException {
-    return new ParquetGroupScan( selection.getFileStatusList(fs), this);
+    return new ParquetGroupScan(selection.getFileStatusList(fs), this, selection.selectionRoot);
   }
 
   @Override
@@ -170,15 +164,7 @@ public class ParquetFormatPlugin implements FormatPlugin{
           return true;
         } else {
 
-          PathFilter filter = new Utils.OutputFileUtils.OutputFilesFilter() {
-            @Override
-            public boolean accept(Path path) {
-              if (path.toString().contains("_metadata")) {
-                return false;
-              }
-              return super.accept(path);
-            }
-          };
+          PathFilter filter = new DrillPathFilter();
 
           FileStatus[] files = fs.getUnderlying().listStatus(dir.getPath(), filter);
           if (files.length == 0) {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
index bcee2be..cd7575d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
@@ -22,7 +22,6 @@ import java.util.Collections;
 import java.util.List;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.exceptions.PhysicalOperatorSetupException;
 import org.apache.drill.common.expression.FieldReference;
@@ -38,7 +37,6 @@ import org.apache.drill.exec.physical.base.PhysicalOperator;
 import org.apache.drill.exec.physical.base.Size;
 import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
 import org.apache.drill.exec.store.StoragePluginRegistry;
-import org.apache.drill.exec.store.dfs.FileSystemPlugin;
 import org.apache.drill.exec.store.dfs.ReadEntryFromHDFS;
 import org.apache.drill.exec.store.dfs.ReadEntryWithPath;
 import org.apache.drill.exec.store.dfs.easy.FileWork;
@@ -89,6 +87,7 @@ public class ParquetGroupScan extends AbstractGroupScan {
   private final ParquetFormatConfig formatConfig;
   private final FileSystem fs;
   private List<EndpointAffinity> endpointAffinities;
+  private String selectionRoot;
 
   private List<SchemaPath> columns;
 
@@ -112,7 +111,8 @@ public class ParquetGroupScan extends AbstractGroupScan {
       @JsonProperty("storage") StoragePluginConfig storageConfig, //
       @JsonProperty("format") FormatPluginConfig formatConfig, //
       @JacksonInject StoragePluginRegistry engineRegistry, // 
-      @JsonProperty("columns") List<SchemaPath> columns //
+      @JsonProperty("columns") List<SchemaPath> columns, //
+      @JsonProperty("selectionRoot") String selectionRoot //
       ) throws IOException, ExecutionSetupException {
     this.columns = columns;
     if(formatConfig == null) formatConfig = new ParquetFormatConfig();
@@ -123,12 +123,18 @@ public class ParquetGroupScan extends AbstractGroupScan {
     this.fs = formatPlugin.getFileSystem().getUnderlying();
     this.formatConfig = formatPlugin.getConfig();
     this.entries = entries;
+    this.selectionRoot = selectionRoot;
     this.readFooterFromEntries();
 
   }
 
+  public String getSelectionRoot() {
+    return selectionRoot;
+  }
+
   public ParquetGroupScan(List<FileStatus> files, //
-      ParquetFormatPlugin formatPlugin) //
+      ParquetFormatPlugin formatPlugin, //
+      String selectionRoot) //
       throws IOException {
     this.formatPlugin = formatPlugin;
     this.columns = null;
@@ -140,6 +146,8 @@ public class ParquetGroupScan extends AbstractGroupScan {
       entries.add(new ReadEntryWithPath(file.getPath().toString()));
     }
     
+    this.selectionRoot = selectionRoot;
+
     readFooter(files);
   }
 
@@ -202,6 +210,7 @@ public class ParquetGroupScan extends AbstractGroupScan {
 
     private EndpointByteMap byteMap;
     private int rowGroupIndex;
+    private String root;
 
     @JsonCreator
     public RowGroupInfo(@JsonProperty("path") String path, @JsonProperty("start") long start,
@@ -282,7 +291,7 @@ public class ParquetGroupScan extends AbstractGroupScan {
     Preconditions.checkArgument(!rowGroupsForMinor.isEmpty(),
         String.format("MinorFragmentId %d has no read entries assigned", minorFragmentId));
 
-    return new ParquetRowGroupScan(formatPlugin, convertToReadEntries(rowGroupsForMinor), columns);
+    return new ParquetRowGroupScan(formatPlugin, convertToReadEntries(rowGroupsForMinor), columns, selectionRoot);
   }
 
   

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRowGroupScan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRowGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRowGroupScan.java
index 0b1a788..dd5c91c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRowGroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRowGroupScan.java
@@ -23,7 +23,6 @@ import java.util.List;
 
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.expression.FieldReference;
-import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
@@ -34,7 +33,6 @@ import org.apache.drill.exec.physical.base.PhysicalVisitor;
 import org.apache.drill.exec.physical.base.Size;
 import org.apache.drill.exec.physical.base.SubScan;
 import org.apache.drill.exec.store.StoragePluginRegistry;
-import org.apache.drill.exec.store.dfs.FileSystemPlugin;
 
 import com.fasterxml.jackson.annotation.JacksonInject;
 import com.fasterxml.jackson.annotation.JsonCreator;
@@ -53,6 +51,7 @@ public class ParquetRowGroupScan extends AbstractBase implements SubScan {
   private final ParquetFormatPlugin formatPlugin;
   private final List<RowGroupReadEntry> rowGroupReadEntries;
   private final List<SchemaPath> columns;
+  private String selectionRoot;
 
   @JsonCreator
   public ParquetRowGroupScan( //
@@ -60,7 +59,8 @@ public class ParquetRowGroupScan extends AbstractBase implements SubScan {
       @JsonProperty("storage") StoragePluginConfig storageConfig, //
       @JsonProperty("format") FormatPluginConfig formatConfig, //
       @JsonProperty("entries") LinkedList<RowGroupReadEntry> rowGroupReadEntries, //
-      @JsonProperty("columns") List<SchemaPath> columns //
+      @JsonProperty("columns") List<SchemaPath> columns, //
+      @JsonProperty("selectionRoot") String selectionRoot //
   ) throws ExecutionSetupException {
 
     if(formatConfig == null) formatConfig = new ParquetFormatConfig();
@@ -71,16 +71,19 @@ public class ParquetRowGroupScan extends AbstractBase implements SubScan {
     this.rowGroupReadEntries = rowGroupReadEntries;
     this.formatConfig = formatPlugin.getConfig();
     this.columns = columns;
+    this.selectionRoot = selectionRoot;
   }
 
   public ParquetRowGroupScan( //
       ParquetFormatPlugin formatPlugin, //
       List<RowGroupReadEntry> rowGroupReadEntries, //
-      List<SchemaPath> columns) {
+      List<SchemaPath> columns,
+      String selectionRoot) {
     this.formatPlugin = formatPlugin;
     this.formatConfig = formatPlugin.getConfig();
     this.rowGroupReadEntries = rowGroupReadEntries;
     this.columns = columns;
+    this.selectionRoot = selectionRoot;
   }
 
   @JsonProperty("entries")
@@ -93,6 +96,10 @@ public class ParquetRowGroupScan extends AbstractBase implements SubScan {
     return formatPlugin.getStorageConfig();
   }
 
+  public String getSelectionRoot() {
+    return selectionRoot;
+  }
+
   @Override
   public OperatorCost getCost() {
     return null;
@@ -121,7 +128,7 @@ public class ParquetRowGroupScan extends AbstractBase implements SubScan {
   @Override
   public PhysicalOperator getNewWithChildren(List<PhysicalOperator> children) throws ExecutionSetupException {
     Preconditions.checkArgument(children.isEmpty());
-    return new ParquetRowGroupScan(formatPlugin, rowGroupReadEntries, columns);
+    return new ParquetRowGroupScan(formatPlugin, rowGroupReadEntries, columns, selectionRoot);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetScanBatchCreator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetScanBatchCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetScanBatchCreator.java
index d36dbc0..6278a79 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetScanBatchCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetScanBatchCreator.java
@@ -18,33 +18,29 @@
 package org.apache.drill.exec.store.parquet;
 
 import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-import com.google.common.base.Stopwatch;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
+import org.apache.commons.lang3.ArrayUtils;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
-import org.apache.drill.common.expression.FieldReference;
 import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.impl.BatchCreator;
 import org.apache.drill.exec.physical.impl.ScanBatch;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.store.RecordReader;
-import org.apache.drill.exec.store.mock.MockScanBatchCreator;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-import parquet.hadoop.CodecFactoryExposer;
 import parquet.hadoop.ParquetFileReader;
 import parquet.hadoop.metadata.ParquetMetadata;
 
@@ -54,12 +50,34 @@ public class ParquetScanBatchCreator implements BatchCreator<ParquetRowGroupScan
   @Override
   public RecordBatch getBatch(FragmentContext context, ParquetRowGroupScan rowGroupScan, List<RecordBatch> children) throws ExecutionSetupException {
     Preconditions.checkArgument(children.isEmpty());
+    String partitionDesignator = context.getConfig().getString(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL);
+    List<SchemaPath> columns = rowGroupScan.getColumns();
+
     List<RecordReader> readers = Lists.newArrayList();
-    
+
+    List<String[]> partitionColumns = Lists.newArrayList();
+    List<Integer> selectedPartitionColumns = Lists.newArrayList();
+    boolean selectAllColumns = false;
+
+    if (columns == null || columns.size() == 0) {
+      selectAllColumns = true;
+    } else {
+      Pattern pattern = Pattern.compile(String.format("%s[0-9]+", partitionDesignator));
+      for (SchemaPath column : columns) {
+        Matcher m = pattern.matcher(column.getAsUnescapedPath());
+        if (m.matches()) {
+          columns.remove(column);
+          selectedPartitionColumns.add(Integer.parseInt(column.getAsUnescapedPath().toString().substring(partitionDesignator.length())));
+        }
+      }
+    }
+
+
     FileSystem fs = rowGroupScan.getStorageEngine().getFileSystem().getUnderlying();
     
     // keep footers in a map to avoid re-reading them
     Map<String, ParquetMetadata> footers = new HashMap<String, ParquetMetadata>();
+    int numParts = 0;
     for(RowGroupReadEntry e : rowGroupScan.getRowGroupReadEntries()){
       /*
       Here we could store a map from file names to footers, to prevent re-reading the footer for each row group in a file
@@ -81,10 +99,30 @@ public class ParquetScanBatchCreator implements BatchCreator<ParquetRowGroupScan
                 rowGroupScan.getColumns()
             )
         );
+        if (rowGroupScan.getSelectionRoot() != null) {
+          String[] r = rowGroupScan.getSelectionRoot().split("/");
+          String[] p = e.getPath().split("/");
+          if (p.length > r.length) {
+            String[] q = ArrayUtils.subarray(p, r.length, p.length - 1);
+            partitionColumns.add(q);
+            numParts = Math.max(numParts, q.length);
+          } else {
+            partitionColumns.add(new String[] {});
+          }
+        } else {
+          partitionColumns.add(new String[] {});
+        }
       } catch (IOException e1) {
         throw new ExecutionSetupException(e1);
       }
     }
-    return new ScanBatch(context, readers.iterator());
+
+    if (selectAllColumns) {
+      for (int i = 0; i < numParts; i++) {
+        selectedPartitionColumns.add(i);
+      }
+    }
+
+    return new ScanBatch(context, readers.iterator(), partitionColumns, selectedPartitionColumns);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/main/resources/drill-module.conf
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/resources/drill-module.conf b/exec/java-exec/src/main/resources/drill-module.conf
index f3b04b5..2f145a7 100644
--- a/exec/java-exec/src/main/resources/drill-module.conf
+++ b/exec/java-exec/src/main/resources/drill-module.conf
@@ -41,7 +41,8 @@ drill.exec: {
       text: {
         buffer.size: 262144,
         batch.size: 4000
-      }
+      },
+      partition.column.label: "dir"
     }
   },
   metrics : { 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
index 191115b..e0e874b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
@@ -17,6 +17,7 @@
  */
 package org.apache.drill;
 
+import org.apache.drill.common.util.FileUtils;
 import org.junit.Test;
 
 public class TestExampleQueries extends BaseTestQuery{
@@ -43,6 +44,20 @@ public class TestExampleQueries extends BaseTestQuery{
   }
 
   @Test
+  public void testText() throws Exception {
+    String root = FileUtils.getResourceAsFile("/store/text/data/regions.csv").toURI().toString();
+    String query = String.format("select * from dfs.`%s`", root);
+    test(query);
+  }
+
+  @Test
+  public void testTextPartitions() throws Exception {
+    String root = FileUtils.getResourceAsFile("/store/text/data/").toURI().toString();
+    String query = String.format("select * from dfs.`%s`", root);
+    test(query);
+  }
+
+  @Test
   public void testJoin() throws Exception{
     test("SELECT\n" +
         "  nations.N_NAME,\n" +

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TextRecordReaderTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TextRecordReaderTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TextRecordReaderTest.java
index 5fbcc8b..0155690 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TextRecordReaderTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TextRecordReaderTest.java
@@ -71,7 +71,7 @@ public class TextRecordReaderTest extends PopUnitTestBase {
       List<QueryResultBatch> results = client.runQuery(UserProtos.QueryType.PHYSICAL,
               Files.toString(
                       FileUtils.getResourceAsFile("/store/text/test.json"), Charsets.UTF_8)
-                      .replace("#{DATA_FILE}", FileUtils.getResourceAsFile("/store/text/regions.csv").toURI().toString()));
+                      .replace("#{DATA_FILE}", FileUtils.getResourceAsFile("/store/text/data/regions.csv").toURI().toString()));
       int count = 0;
       RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
       for(QueryResultBatch b : results) {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/test/resources/storage-engines.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/storage-engines.json b/exec/java-exec/src/test/resources/storage-engines.json
deleted file mode 100644
index 73899ee..0000000
--- a/exec/java-exec/src/test/resources/storage-engines.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
-  "storage":{
-    dfs: {
-      type: "file",
-      connection: "file:///"
-    },  
-    cp: {
-      type: "file",
-      connection: "classpath:///"
-    }      
-
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/test/resources/store/text/data/d1/regions.csv
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/store/text/data/d1/regions.csv b/exec/java-exec/src/test/resources/store/text/data/d1/regions.csv
new file mode 100644
index 0000000..e97d2ed
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/text/data/d1/regions.csv
@@ -0,0 +1,5 @@
+0,AFRICA,lar deposits. blithely final packages cajole. regular waters are final requests. regular accounts are according to ,
+1,AMERICA,hs use ironic, even requests. s,
+2,ASIA,ges. thinly even pinto beans ca,
+3,EUROPE,ly final courts cajole furiously final excuse,
+4,MIDDLE EAST,uickly special accounts cajole carefully blithely close requests. carefully final asymptotes haggle furiousl,
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/test/resources/store/text/data/regions.csv
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/store/text/data/regions.csv b/exec/java-exec/src/test/resources/store/text/data/regions.csv
new file mode 100644
index 0000000..e97d2ed
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/text/data/regions.csv
@@ -0,0 +1,5 @@
+0,AFRICA,lar deposits. blithely final packages cajole. regular waters are final requests. regular accounts are according to ,
+1,AMERICA,hs use ironic, even requests. s,
+2,ASIA,ges. thinly even pinto beans ca,
+3,EUROPE,ly final courts cajole furiously final excuse,
+4,MIDDLE EAST,uickly special accounts cajole carefully blithely close requests. carefully final asymptotes haggle furiousl,
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/exec/java-exec/src/test/resources/store/text/regions.csv
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/store/text/regions.csv b/exec/java-exec/src/test/resources/store/text/regions.csv
deleted file mode 100644
index e97d2ed..0000000
--- a/exec/java-exec/src/test/resources/store/text/regions.csv
+++ /dev/null
@@ -1,5 +0,0 @@
-0,AFRICA,lar deposits. blithely final packages cajole. regular waters are final requests. regular accounts are according to ,
-1,AMERICA,hs use ironic, even requests. s,
-2,ASIA,ges. thinly even pinto beans ca,
-3,EUROPE,ly final courts cajole furiously final excuse,
-4,MIDDLE EAST,uickly special accounts cajole carefully blithely close requests. carefully final asymptotes haggle furiousl,
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5d1123d..7166ee4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -262,7 +262,7 @@
             <forkCount>8</forkCount>
             <reuseForks>true</reuseForks>
             <additionalClasspathElements>
-              <additionalClasspathElement>./sqlparser/src/test/resources/storage-engines.json</additionalClasspathElement>
+              <additionalClasspathElement>./sqlparser/src/test/resources/storage-plugins.json</additionalClasspathElement>
             </additionalClasspathElements>
           </configuration>
         </plugin>

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/69c571cc/sqlparser/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java
----------------------------------------------------------------------
diff --git a/sqlparser/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java b/sqlparser/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java
index b454b52..ef1674f 100644
--- a/sqlparser/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java
+++ b/sqlparser/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java
@@ -26,6 +26,7 @@ import java.sql.Statement;
 import java.util.concurrent.TimeUnit;
 
 import com.google.common.base.Function;
+import org.apache.drill.common.util.FileUtils;
 import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.store.hive.HiveTestDataGenerator;
 import org.apache.drill.jdbc.Driver;


[09/10] git commit: DRILL-442: Implement text format plugin

Posted by ja...@apache.org.
DRILL-442: Implement text format plugin

rename storage-engines.json storage-plugins.json
allow reading a particular value in a repeated vector
fix test caused by change that allows selecting element of repeated record
set def recordCount for explain query
fix bug loading repeated vectors
storage plugin/format plugin changes. store storage plugin configuration in distributed cache.
add repeated vector allocators
add support for for reading compressed files.


Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/54287d07
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/54287d07
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/54287d07

Branch: refs/heads/master
Commit: 54287d0761f97f337035aa8988faf380178aba08
Parents: 7b6c7a1
Author: Steven Phillips <sp...@maprtech.com>
Authored: Sun Mar 16 18:56:50 2014 -0700
Committer: Jacques Nadeau <ja...@apache.org>
Committed: Tue Apr 22 20:05:15 2014 -0700

----------------------------------------------------------------------
 .../drill/common/util/DataInputInputStream.java |  15 +-
 .../drill/exec/store/hbase/HBaseGroupScan.java  |   9 +-
 .../drill/exec/store/hbase/HBaseSubScan.java    |   2 +-
 ...base_scan_screen_physical_column_select.json |   2 +-
 distribution/src/assemble/bin.xml               |   2 +-
 distribution/src/resources/drill-override.conf  |  36 +++-
 distribution/src/resources/storage-engines.json |  26 ---
 distribution/src/resources/storage-plugins.json |  49 ++++++
 .../codegen/templates/RepeatedValueVectors.java |  60 ++++++-
 .../templates/VariableLengthVectors.java        |   4 +-
 .../org/apache/drill/exec/ExecConstants.java    |   2 +
 .../drill/exec/cache/DrillSerializable.java     |   8 +-
 .../exec/cache/JacksonDrillSerializable.java    |  86 ++++++++++
 .../org/apache/drill/exec/cache/LocalCache.java |  18 +-
 .../cache/VectorAccessibleSerializable.java     |   6 +-
 .../drill/exec/expr/EvaluationVisitor.java      |   8 +-
 .../exec/expr/ExpressionTreeMaterializer.java   |  31 +++-
 .../exec/expr/ValueVectorReadExpression.java    |  28 ++-
 .../drill/exec/expr/fn/impl/Alternator.java     |   4 +
 .../apache/drill/exec/opt/BasicOptimizer.java   |   6 +-
 .../impl/project/ProjectRecordBatch.java        |   8 +-
 .../impl/svremover/RemovingRecordBatch.java     |   8 +-
 .../drill/exec/planner/PhysicalPlanReader.java  |   4 +-
 .../drill/exec/planner/logical/DrillOptiq.java  |   2 +-
 .../exec/planner/logical/StorageEngines.java    |  74 --------
 .../exec/planner/logical/StoragePlugins.java    |  93 ++++++++++
 .../org/apache/drill/exec/server/Drillbit.java  |   1 +
 .../drill/exec/store/StoragePluginRegistry.java | 119 ++++++++-----
 .../exec/store/dfs/BasicFormatMatcher.java      |  44 ++++-
 .../drill/exec/store/dfs/FileSystemConfig.java  |  13 +-
 .../drill/exec/store/dfs/FileSystemPlugin.java  |   2 +
 .../drill/exec/store/dfs/FormatCreator.java     |   5 +-
 .../exec/store/dfs/NamedFormatPluginConfig.java |   3 +-
 .../exec/store/dfs/easy/EasyFormatPlugin.java   |  24 ++-
 .../exec/store/dfs/easy/EasyGroupScan.java      |  36 ++--
 .../drill/exec/store/dfs/easy/EasySubScan.java  |  12 +-
 .../exec/store/easy/json/JSONFormatPlugin.java  |   4 +-
 .../exec/store/easy/text/TextFormatPlugin.java  |  98 +++++++++++
 .../apache/drill/exec/store/hive/HiveScan.java  |  10 +-
 .../exec/store/parquet/ParquetFormatConfig.java |   1 +
 .../exec/store/parquet/ParquetGroupScan.java    |   2 +-
 .../exec/store/schedule/AssignmentCreator.java  |   2 +
 .../exec/store/schedule/BlockMapBuilder.java    |  10 +-
 .../exec/store/schedule/CompleteFileWork.java   |   5 +
 .../exec/store/text/DrillTextRecordReader.java  | 169 +++++++++++++++++++
 .../org/apache/drill/exec/util/VectorUtil.java  |   5 +-
 .../vector/RepeatedVariableWidthVector.java     |   8 +-
 .../RepeatedVariableEstimatedAllocator.java     |  36 ++++
 .../allocator/RepeatedVectorAllocator.java      |  36 ++++
 .../exec/vector/allocator/VectorAllocator.java  |   7 +-
 .../src/main/resources/drill-module.conf        |   8 +-
 .../java/org/apache/drill/PlanningBase.java     |  10 ++
 .../exec/physical/impl/join/TestMergeJoin.java  |   3 +
 .../record/ExpressionTreeMaterializerTest.java  |   9 +-
 .../drill/exec/store/TestOrphanSchema.java      |   9 +-
 .../drill/exec/store/ischema/OrphanSchema.java  |  11 +-
 .../exec/store/ischema/TestOrphanSchema.java    |   8 +-
 .../exec/store/text/TextRecordReaderTest.java   |  88 ++++++++++
 .../src/test/resources/storage-plugins.json     |  40 +++++
 .../src/test/resources/store/text/regions.csv   |   5 +
 .../src/test/resources/store/text/test.json     |  40 +++++
 pom.xml                                         |   1 +
 .../src/test/resources/storage-engines.json     |  27 ---
 .../src/test/resources/storage-plugins.json     |  47 ++++++
 64 files changed, 1280 insertions(+), 269 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/common/src/main/java/org/apache/drill/common/util/DataInputInputStream.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/util/DataInputInputStream.java b/common/src/main/java/org/apache/drill/common/util/DataInputInputStream.java
index c4c2282..f61b301 100644
--- a/common/src/main/java/org/apache/drill/common/util/DataInputInputStream.java
+++ b/common/src/main/java/org/apache/drill/common/util/DataInputInputStream.java
@@ -17,6 +17,9 @@
  */
 package org.apache.drill.common.util;
 
+
+import org.apache.commons.lang3.exception.ExceptionUtils;
+
 import java.io.*;
 
 
@@ -62,7 +65,17 @@ public class DataInputInputStream extends InputStream {
 
   @Override
   public int read(byte[] b, int off, int len) throws IOException {
-    in.readFully(b, off, len);
+    for (int i = off; i < off + len; i++) {
+      try {
+        b[i] = in.readByte();
+      } catch(Exception e) {
+        if (ExceptionUtils.getRootCause(e) instanceof EOFException) {
+          return i - off;
+        } else {
+          throw e;
+        }
+      }
+    }
     return len;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseGroupScan.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseGroupScan.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseGroupScan.java
index bb0adcc..b8b6af4 100644
--- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseGroupScan.java
+++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseGroupScan.java
@@ -76,14 +76,13 @@ public class HBaseGroupScan extends AbstractGroupScan {
 
   @JsonCreator
   public HBaseGroupScan(@JsonProperty("entries") List<HTableReadEntry> entries,
-                          @JsonProperty("storage") HBaseStoragePluginConfig storageEngineConfig,
+                          @JsonProperty("storage") HBaseStoragePluginConfig storagePluginConfig,
                           @JsonProperty("columns") List<SchemaPath> columns,
-                          @JacksonInject StoragePluginRegistry engineRegistry
+                          @JacksonInject StoragePluginRegistry pluginRegistry
                            )throws IOException, ExecutionSetupException {
     Preconditions.checkArgument(entries.size() == 1);
-    engineRegistry.init(DrillConfig.create());
-    this.storagePlugin = (HBaseStoragePlugin) engineRegistry.getEngine(storageEngineConfig);
-    this.storagePluginConfig = storageEngineConfig;
+    this.storagePlugin = (HBaseStoragePlugin) pluginRegistry.getPlugin(storagePluginConfig);
+    this.storagePluginConfig = storagePluginConfig;
     this.tableName = entries.get(0).getTableName();
     this.columns = columns;
     getRegionInfos();

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseSubScan.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseSubScan.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseSubScan.java
index 0e8a934..81a8af5 100644
--- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseSubScan.java
+++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseSubScan.java
@@ -57,7 +57,7 @@ public class HBaseSubScan extends AbstractBase implements SubScan {
   public HBaseSubScan(@JacksonInject StoragePluginRegistry registry, @JsonProperty("storage") StoragePluginConfig storage,
                       @JsonProperty("rowGroupReadEntries") LinkedList<HBaseSubScanReadEntry> rowGroupReadEntries,
                       @JsonProperty("columns") List<SchemaPath> columns) throws ExecutionSetupException {
-    hbaseStoragePlugin = (HBaseStoragePlugin) registry.getEngine(storage);
+    hbaseStoragePlugin = (HBaseStoragePlugin) registry.getPlugin(storage);
     this.rowGroupReadEntries = rowGroupReadEntries;
     this.storage = storage;
     this.columns = columns;

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/contrib/storage-hbase/src/test/resources/hbase/hbase_scan_screen_physical_column_select.json
----------------------------------------------------------------------
diff --git a/contrib/storage-hbase/src/test/resources/hbase/hbase_scan_screen_physical_column_select.json b/contrib/storage-hbase/src/test/resources/hbase/hbase_scan_screen_physical_column_select.json
index c3d356d..7940c65 100644
--- a/contrib/storage-hbase/src/test/resources/hbase/hbase_scan_screen_physical_column_select.json
+++ b/contrib/storage-hbase/src/test/resources/hbase/hbase_scan_screen_physical_column_select.json
@@ -20,7 +20,7 @@
       "zookeeperPort" : 2181
     },
     columns: [
-      "f2.c1", "f2.c2", "row_key"
+      "`f2`.c1", "`f2`.c2", "row_key"
     ]
   },
   {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/distribution/src/assemble/bin.xml
----------------------------------------------------------------------
diff --git a/distribution/src/assemble/bin.xml b/distribution/src/assemble/bin.xml
index 1579832..b981355 100644
--- a/distribution/src/assemble/bin.xml
+++ b/distribution/src/assemble/bin.xml
@@ -137,7 +137,7 @@
       <outputDirectory>conf</outputDirectory>
     </file>
     <file>
-      <source>src/resources/storage-engines.json</source>
+      <source>src/resources/storage-plugins.json</source>
       <outputDirectory>conf</outputDirectory>
     </file>
   </files>   

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/distribution/src/resources/drill-override.conf
----------------------------------------------------------------------
diff --git a/distribution/src/resources/drill-override.conf b/distribution/src/resources/drill-override.conf
index e837e74..a5e5522 100644
--- a/distribution/src/resources/drill-override.conf
+++ b/distribution/src/resources/drill-override.conf
@@ -51,9 +51,15 @@ drill.exec: {
   },
   functions: ["org.apache.drill.expr.fn.impl"],
   storage: {
-    packages += "org.apache.drill.exec.store"  
+    packages += "org.apache.drill.exec.store",
+    file: {
+      text: {
+        buffer.size: 262144,
+        batch.size: 4000
+      }
+    }
   },
-  metrics : { 
+  metrics : {
     context: "drillbit",
     jmx: {
       enabled : true
@@ -71,7 +77,7 @@ drill.exec: {
   	retry: {
   	  count: 7200,
   	  delay: 500
-  	}    
+  	}
   },
   functions: ["org.apache.drill.expr.fn.impl"],
   network: {
@@ -90,10 +96,26 @@ drill.exec: {
     directories: ["/tmp/drill"],
     filesystem: "drill-local:///"
   },
-  spooling: {
+  buffer:{
     impl: "org.apache.drill.exec.work.batch.SpoolingRawBatchBuffer",
-    delete: false,
-    size: 100000000
+    size: "20000",
+    spooling: {
+      delete: false,
+      size: 100000000
+    }
   },
-  sort.purge.threshold : 100
+  cache.hazel.subnets: ["*.*.*.*"],
+  sort: {
+    purge.threshold : 100,
+    external: {
+      batch.size : 4000,
+      spill: {
+        batch.size : 4000,
+        group.size : 100,
+        threshold : 200,
+        directories : [ "/tmp/drill/spill" ],
+        fs : "file:///"
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/distribution/src/resources/storage-engines.json
----------------------------------------------------------------------
diff --git a/distribution/src/resources/storage-engines.json b/distribution/src/resources/storage-engines.json
deleted file mode 100644
index 8b22858..0000000
--- a/distribution/src/resources/storage-engines.json
+++ /dev/null
@@ -1,26 +0,0 @@
-{
-  "storage":{
-    dfs: {
-      type: "file",
-      connection: "file:///"
-    },
-    cp: {
-      type: "file",
-      connection: "classpath:///"
-    } 
-
-    /*,
-    hive : {
-        type:"hive",
-        config :
-          {
-            "hive.metastore.uris" : "",
-            "javax.jdo.option.ConnectionURL" : "jdbc:derby:;databaseName=../../sample-data/drill_hive_db;create=true",
-            "hive.metastore.warehouse.dir" : "/tmp/drill_hive_wh",
-            "fs.default.name" : "file:///",
-            "hive.metastore.sasl.enabled" : "false"
-          }
-      }
-      */
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/distribution/src/resources/storage-plugins.json
----------------------------------------------------------------------
diff --git a/distribution/src/resources/storage-plugins.json b/distribution/src/resources/storage-plugins.json
new file mode 100644
index 0000000..6f2c015
--- /dev/null
+++ b/distribution/src/resources/storage-plugins.json
@@ -0,0 +1,49 @@
+{
+  "storage":{
+    dfs: {
+      type: "file",
+      connection: "file:///",
+      formats: {
+        "psv" : {
+          type: "text",
+          extensions: [ "tbl" ],
+          delimiter: "|"
+        },
+        "csv" : {
+          type: "text",
+          extensions: [ "csv" ],
+          delimiter: ","
+        },
+        "tsv" : {
+          type: "text",
+          extensions: [ "tsv" ],
+          delimiter: "\t"
+        },
+        "parquet" : {
+          type: "parquet"
+        },
+        "json" : {
+          type: "json"
+        }
+      }
+    },
+    cp: {
+      type: "file",
+      connection: "classpath:///"
+    } 
+
+    /*,
+    hive : {
+        type:"hive",
+        config :
+          {
+            "hive.metastore.uris" : "",
+            "javax.jdo.option.ConnectionURL" : "jdbc:derby:;databaseName=../../sample-data/drill_hive_db;create=true",
+            "hive.metastore.warehouse.dir" : "/tmp/drill_hive_wh",
+            "fs.default.name" : "file:///",
+            "hive.metastore.sasl.enabled" : "false"
+          }
+      }
+      */
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/codegen/templates/RepeatedValueVectors.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/RepeatedValueVectors.java b/exec/java-exec/src/main/codegen/templates/RepeatedValueVectors.java
index 8d5e90a..35bd480 100644
--- a/exec/java-exec/src/main/codegen/templates/RepeatedValueVectors.java
+++ b/exec/java-exec/src/main/codegen/templates/RepeatedValueVectors.java
@@ -108,15 +108,37 @@ package org.apache.drill.exec.vector;
       to.copyFrom(fromIndex, toIndex, Repeated${minor.class}Vector.this);
     }
   }
-  
-  public void copyFrom(int inIndex, int outIndex, Repeated${minor.class}Vector v){
-    throw new UnsupportedOperationException();
-  }
-  
-  public boolean copyFromSafe(int inIndex, int outIndex, Repeated${minor.class}Vector v){
-    throw new UnsupportedOperationException();
-  }
-  
+
+<#if type.major == "VarLen">
+    public void copyFrom(int inIndex, int outIndex, Repeated${minor.class}Vector v){
+      int count = v.getAccessor().getCount(inIndex);
+      getMutator().startNewGroup(outIndex);
+      for (int i = 0; i < count; i++) {
+        getMutator().add(outIndex, v.getAccessor().get(inIndex, i));
+      }
+    }
+
+    public boolean copyFromSafe(int inIndex, int outIndex, Repeated${minor.class}Vector v){
+      int count = v.getAccessor().getCount(inIndex);
+      getMutator().startNewGroup(outIndex);
+      for (int i = 0; i < count; i++) {
+        if (!getMutator().addSafe(outIndex, v.getAccessor().get(inIndex, i))) {
+          return false;
+        }
+      }
+      return true;
+    }
+<#else>
+
+    public void copyFrom(int inIndex, int outIndex, Repeated${minor.class}Vector v){
+        throw new UnsupportedOperationException();
+    }
+
+    public boolean copyFromSafe(int inIndex, int outIndex, Repeated${minor.class}Vector v){
+        throw new UnsupportedOperationException();
+    }
+</#if>
+
   <#if type.major == "VarLen">
   @Override
   public FieldMetadata getMetadata() {
@@ -131,6 +153,7 @@ package org.apache.drill.exec.vector;
   
   public void allocateNew(int totalBytes, int parentValueCount, int childValueCount) {
     offsets.allocateNew(parentValueCount+1);
+    offsets.getMutator().set(0,0);
     values.allocateNew(totalBytes, childValueCount);
     mutator.reset();
     accessor.reset();
@@ -261,6 +284,12 @@ package org.apache.drill.exec.vector;
       holder.vector = values;
     }
 
+    public void get(int index, int positionIndex, ${minor.class}Holder holder) {
+      int offset = offsets.getAccessor().get(index);
+      assert offset >= 0;
+      values.getAccessor().get(offset + positionIndex, holder);
+    }
+
     public MaterializedField getField() {
       return field;
     }
@@ -301,6 +330,19 @@ package org.apache.drill.exec.vector;
       offsets.getMutator().set(index+1, nextOffset+1);
     }
 
+    <#if type.major == "VarLen">
+    public boolean addSafe(int index, byte[] bytes) {
+      return addSafe(index, bytes, 0, bytes.length);
+    }
+
+    public boolean addSafe(int index, byte[] bytes, int start, int length) {
+      int nextOffset = offsets.getAccessor().get(index+1);
+      boolean b1 = values.getMutator().setSafe(nextOffset, bytes, start, length);
+      boolean b2 = offsets.getMutator().setSafe(index+1, nextOffset+1);
+      return (b1 && b2);
+    }
+    </#if>
+
     public void add(int index, ${minor.class}Holder holder){
       int nextOffset = offsets.getAccessor().get(index+1);
       values.getMutator().set(nextOffset, holder);

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/codegen/templates/VariableLengthVectors.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/VariableLengthVectors.java b/exec/java-exec/src/main/codegen/templates/VariableLengthVectors.java
index dcc7135..9cec943 100644
--- a/exec/java-exec/src/main/codegen/templates/VariableLengthVectors.java
+++ b/exec/java-exec/src/main/codegen/templates/VariableLengthVectors.java
@@ -323,7 +323,9 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements V
 
       if (data.capacity() < currentOffset + length) return false;
 
-      offsetVector.getMutator().set(index + 1, currentOffset + length);
+      if (!offsetVector.getMutator().setSafe(index + 1, currentOffset + length)) {
+        return false;
+      }
       data.setBytes(currentOffset, bytes, start, length);
       return true;
     }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index be59ea6..f88b1b4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -58,5 +58,7 @@ public interface ExecConstants {
   public static final String EXTERNAL_SORT_SPILL_THRESHOLD = "drill.exec.sort.external.spill.threshold";
   public static final String EXTERNAL_SORT_SPILL_DIRS = "drill.exec.sort.external.spill.directories";
   public static final String EXTERNAL_SORT_SPILL_FILESYSTEM = "drill.exec.sort.external.spill.fs";
+  public static final String TEXT_LINE_READER_BUFFER_SIZE = "drill.exec.storage.file.text.buffer.size";
+  public static final String TEXT_LINE_READER_BATCH_SIZE = "drill.exec.storage.file.text.batch.size";
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DrillSerializable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DrillSerializable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DrillSerializable.java
index 875e8b6..4f266f7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DrillSerializable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DrillSerializable.java
@@ -17,6 +17,10 @@
  */
 package org.apache.drill.exec.cache;
 
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.DataSerializable;
+
 import java.io.*;
 
 /**
@@ -24,8 +28,8 @@ import java.io.*;
  */
 public interface DrillSerializable {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillSerializable.class);
-  public void read(DataInput input) throws IOException;
+  public void readData(ObjectDataInput input) throws IOException;
   public void readFromStream(InputStream input) throws IOException;
-  public void write(DataOutput output) throws IOException;
+  public void writeData(ObjectDataOutput output) throws IOException;
   public void writeToStream(OutputStream output) throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/cache/JacksonDrillSerializable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/JacksonDrillSerializable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/JacksonDrillSerializable.java
new file mode 100644
index 0000000..a7b0be2
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/JacksonDrillSerializable.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.cache;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.DataSerializable;
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.util.DataInputInputStream;
+import org.apache.drill.common.util.DataOutputOutputStream;
+import org.apache.drill.exec.memory.BufferAllocator;
+import org.apache.drill.exec.planner.logical.StoragePlugins;
+import org.apache.drill.exec.server.DrillbitContext;
+
+import java.io.*;
+
+public abstract class JacksonDrillSerializable<T> implements DrillSerializable, DataSerializable{
+  private ObjectMapper mapper;
+  private T obj;
+
+  public JacksonDrillSerializable(DrillbitContext context, T obj) {
+    this.mapper = context.getConfig().getMapper();
+    this.obj = obj;
+  }
+
+  public JacksonDrillSerializable() {
+  }
+
+  @Override
+  public void readData(ObjectDataInput input) throws IOException {
+    readFromStream(DataInputInputStream.constructInputStream(input));
+  }
+
+  public void readFromStream(InputStream input, Class clazz) throws IOException {
+    mapper = DrillConfig.create().getMapper();
+    obj = (T) mapper.readValue(input, clazz);
+  }
+
+  @Override
+  public void writeData(ObjectDataOutput output) throws IOException {
+    writeToStream(DataOutputOutputStream.constructOutputStream(output));
+  }
+
+  @Override
+  public void writeToStream(OutputStream output) throws IOException {
+    output.write(mapper.writeValueAsBytes(obj));
+  }
+
+  public T getObj() {
+    return obj;
+  }
+
+  public static class StoragePluginsSerializable extends JacksonDrillSerializable<StoragePlugins> {
+
+    public StoragePluginsSerializable(DrillbitContext context, StoragePlugins obj) {
+      super(context, obj);
+    }
+
+    public StoragePluginsSerializable(BufferAllocator allocator) {
+    }
+
+    public StoragePluginsSerializable() {
+    }
+
+    @Override
+    public void readFromStream(InputStream input) throws IOException {
+      readFromStream(input, StoragePlugins.class);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/cache/LocalCache.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/LocalCache.java b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/LocalCache.java
index 38de688..119764b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/LocalCache.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/LocalCache.java
@@ -17,7 +17,10 @@
  */
 package org.apache.drill.exec.cache;
 
+import java.io.ByteArrayOutputStream;
 import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
 import java.lang.reflect.InvocationTargetException;
 import java.util.Collection;
 import java.util.List;
@@ -27,6 +30,8 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.util.DataInputInputStream;
+import org.apache.drill.common.util.DataOutputOutputStream;
 import org.apache.drill.exec.exception.DrillbitStartupException;
 import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.memory.TopLevelAllocator;
@@ -109,8 +114,14 @@ public class LocalCache implements DistributedCache {
 
   public static ByteArrayDataOutput serialize(DrillSerializable obj) {
     ByteArrayDataOutput out = ByteStreams.newDataOutput();
+    OutputStream outputStream = DataOutputOutputStream.constructOutputStream(out);
     try {
-      obj.write(out);
+      obj.writeToStream(outputStream);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+    try {
+      outputStream.flush();
     } catch (IOException e) {
       throw new RuntimeException(e);
     }
@@ -119,9 +130,10 @@ public class LocalCache implements DistributedCache {
 
   public static <V extends DrillSerializable> V deserialize(byte[] bytes, Class<V> clazz) {
     ByteArrayDataInput in = ByteStreams.newDataInput(bytes);
+    InputStream inputStream = DataInputInputStream.constructInputStream(in);
     try {
       V obj = clazz.getConstructor(BufferAllocator.class).newInstance(allocator);
-      obj.read(in);
+      obj.readFromStream(inputStream);
       return obj;
     } catch (InstantiationException | IllegalAccessException | IOException | NoSuchMethodException | InvocationTargetException e) {
       throw new RuntimeException(e);
@@ -164,6 +176,8 @@ public class LocalCache implements DistributedCache {
     @Override
     public V get(String key) {
       if (m.get(key) == null) return null;
+      ByteArrayDataOutput b = m.get(key);
+      byte[] bytes = b.toByteArray();
       return (V) deserialize(m.get(key).toByteArray(), this.clazz);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java
index ff7ab02..9511992 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java
@@ -21,6 +21,8 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
 import io.netty.buffer.ByteBuf;
 import org.apache.drill.common.util.DataInputInputStream;
 import org.apache.drill.common.util.DataOutputOutputStream;
@@ -82,7 +84,7 @@ public class VectorAccessibleSerializable implements DrillSerializable {
   }
 
   @Override
-  public void read(DataInput input) throws IOException {
+  public void readData(ObjectDataInput input) throws IOException {
     readFromStream(DataInputInputStream.constructInputStream(input));
   }
 
@@ -125,7 +127,7 @@ public class VectorAccessibleSerializable implements DrillSerializable {
   }
 
   @Override
-  public void write(DataOutput output) throws IOException {
+  public void writeData(ObjectDataOutput output) throws IOException {
     writeToStream(DataOutputOutputStream.constructOutputStream(output));
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
index 2e632a3..fd547e1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
@@ -21,6 +21,7 @@ import java.util.List;
 import java.util.Set;
 
 import io.netty.buffer.ByteBuf;
+import com.google.common.collect.Lists;
 import org.apache.drill.common.expression.CastExpression;
 import org.apache.drill.common.expression.FunctionCall;
 import org.apache.drill.common.expression.FunctionHolderExpression;
@@ -45,6 +46,7 @@ import org.apache.drill.common.expression.ValueExpressions.Decimal28Expression;
 import org.apache.drill.common.expression.ValueExpressions.Decimal38Expression;
 import org.apache.drill.common.expression.ValueExpressions.QuotedString;
 import org.apache.drill.common.expression.visitors.AbstractExprVisitor;
+import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
@@ -332,7 +334,11 @@ public class EvaluationVisitor {
         }
       } else {
         if (Types.usesHolderForGet(e.getMajorType())) {
-          generator.getEvalBlock().add(getValueAccessor.arg(indexVariable).arg(out.getHolder()));
+          if (e.isArrayElement()) {
+            generator.getEvalBlock().add(getValueAccessor.arg(indexVariable).arg(JExpr.lit(e.getIndex())).arg(out.getHolder()));
+          } else {
+            generator.getEvalBlock().add(getValueAccessor.arg(indexVariable).arg(out.getHolder()));
+          }
         } else {
           generator.getEvalBlock().assign(out.getValue(), getValueAccessor.arg(indexVariable));
         }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
index f9572db..e3b1002 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
@@ -19,6 +19,7 @@ package org.apache.drill.exec.expr;
 
 import java.util.List;
 
+import com.google.common.base.Joiner;
 import org.apache.drill.common.expression.CastExpression;
 import org.apache.drill.common.expression.ErrorCollector;
 import org.apache.drill.common.expression.ExpressionPosition;
@@ -26,6 +27,7 @@ import org.apache.drill.common.expression.FunctionCall;
 import org.apache.drill.common.expression.FunctionHolderExpression;
 import org.apache.drill.common.expression.IfExpression;
 import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.expression.PathSegment;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.expression.TypedNullConstant;
 import org.apache.drill.common.expression.ValueExpressions;
@@ -228,12 +230,37 @@ public class ExpressionTreeMaterializer {
     @Override
     public LogicalExpression visitSchemaPath(SchemaPath path, FunctionImplementationRegistry value) {
 //      logger.debug("Visiting schema path {}", path);
-      TypedFieldId tfId = batch.getValueVectorId(path);
+      PathSegment seg = path.getRootSegment();
+      List<String> segments = Lists.newArrayList();
+      segments.add(seg.getNameSegment().getPath().toString());
+      boolean isArrayElement = false;
+      int index = -1;
+      while((seg = seg.getChild()) != null) {
+        if (seg.isNamed()) {
+          segments.add(seg.getNameSegment().getPath().toString());
+          if (seg.isLastPath()) {
+            break;
+          }
+        } else {
+          if (!seg.isLastPath()) {
+            throw new UnsupportedOperationException("Repeated map type not supported");
+          }
+          index = seg.getArraySegment().getIndex();
+          isArrayElement = true;
+          break;
+        }
+      }
+      SchemaPath newPath = SchemaPath.getCompoundPath((String[]) segments.toArray(new String[0]));
+      TypedFieldId tfId = batch.getValueVectorId(newPath);
       if (tfId == null) {
         logger.warn("Unable to find value vector of path {}, returning null instance.", path);
         return NullExpression.INSTANCE;
       } else {
-        return new ValueVectorReadExpression(tfId);
+        ValueVectorReadExpression e = new ValueVectorReadExpression(tfId, index, isArrayElement);
+        if (isArrayElement) {
+          e.required();
+        }
+        return e;
       }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ValueVectorReadExpression.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ValueVectorReadExpression.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ValueVectorReadExpression.java
index 5e251a1..4ba503d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ValueVectorReadExpression.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ValueVectorReadExpression.java
@@ -22,23 +22,43 @@ import java.util.Iterator;
 import org.apache.drill.common.expression.ExpressionPosition;
 import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.common.expression.visitors.ExprVisitor;
+import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.record.TypedFieldId;
 
 import com.google.common.collect.Iterators;
 
+import javax.sound.sampled.FloatControl;
+
 public class ValueVectorReadExpression implements LogicalExpression{
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ValueVectorReadExpression.class);
 
-  private final MajorType type;
+  private MajorType type;
   private final TypedFieldId fieldId;
   private final boolean superReader;
+  private final int index;
+  private final boolean isArrayElement;
   
   
-  public ValueVectorReadExpression(TypedFieldId tfId){
+  public ValueVectorReadExpression(TypedFieldId tfId, int index, boolean isArrayElement){
     this.type = tfId.getType();
     this.fieldId = tfId;
     this.superReader = tfId.isHyperReader();
+    this.index = index;
+    this.isArrayElement = isArrayElement;
+  }
+
+  public void required() {
+    type = Types.required(type.getMinorType());
+  }
+
+  public boolean isArrayElement() {
+    return isArrayElement;
+  }
+
+  public ValueVectorReadExpression(TypedFieldId tfId) {
+    this(tfId, -1, false);
   }
   
   public TypedFieldId getTypedFieldId(){
@@ -62,6 +82,10 @@ public class ValueVectorReadExpression implements LogicalExpression{
     return fieldId;
   }
 
+  public int getIndex() {
+    return index;
+  }
+
   @Override
   public ExpressionPosition getPosition() {
     return ExpressionPosition.UNKNOWN;

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/Alternator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/Alternator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/Alternator.java
index bdd227c..641063b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/Alternator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/Alternator.java
@@ -17,12 +17,16 @@
  */
 package org.apache.drill.exec.expr.fn.impl;
 
+import org.apache.drill.exec.expr.DrillAggFunc;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope;
 import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.annotations.Workspace;
 import org.apache.drill.exec.expr.holders.BigIntHolder;
+import org.apache.drill.exec.expr.holders.IntHolder;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
 import org.apache.drill.exec.record.RecordBatch;
 
 public class Alternator {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/opt/BasicOptimizer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/opt/BasicOptimizer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/opt/BasicOptimizer.java
index 97ec026..d7d5ccb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/opt/BasicOptimizer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/opt/BasicOptimizer.java
@@ -184,10 +184,10 @@ public class BasicOptimizer extends Optimizer{
     public PhysicalOperator visitScan(Scan scan, Object obj) throws OptimizerException {
       StoragePluginConfig config = logicalPlan.getStorageEngineConfig(scan.getStorageEngine());
       if(config == null) throw new OptimizerException(String.format("Logical plan referenced the storage engine config %s but the logical plan didn't have that available as a config.", scan.getStorageEngine()));
-      StoragePlugin engine;
+      StoragePlugin storagePlugin;
       try {
-        engine = context.getStorage().getEngine(config);
-        return engine.getPhysicalScan(scan.getSelection());
+        storagePlugin = context.getStorage().getPlugin(config);
+        return storagePlugin.getPhysicalScan(scan.getSelection());
       } catch (IOException | ExecutionSetupException e) {
         throw new OptimizerException("Failure while attempting to retrieve storage engine.", e);
       }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
index aaee8e7..e8ee3cc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
@@ -30,6 +30,7 @@ import org.apache.drill.common.expression.PathSegment;
 import org.apache.drill.common.expression.PathSegment.NameSegment;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.logical.data.NamedExpression;
+import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.exception.ClassTransformationException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
@@ -140,9 +141,10 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
 
 
         // add value vector to transfer if direct reference and this is allowed, otherwise, add to evaluation stack.
-        if(expr instanceof ValueVectorReadExpression && incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.NONE &&
-                !isAnyWildcard &&
-                !transferFieldIds.contains(((ValueVectorReadExpression) expr).getFieldId().getFieldId())) {
+        if(expr instanceof ValueVectorReadExpression && incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.NONE
+                && !isAnyWildcard
+                &&!transferFieldIds.contains(((ValueVectorReadExpression) expr).getFieldId().getFieldId())
+                && !((ValueVectorReadExpression) expr).isArrayElement()) {
           ValueVectorReadExpression vectorRead = (ValueVectorReadExpression) expr;
           ValueVector vvIn = incoming.getValueAccessorById(vectorRead.getFieldId().getFieldId(), TypeHelper.getValueVectorClass(vectorRead.getMajorType().getMinorType(), vectorRead.getMajorType().getMode())).getValueVector();
           Preconditions.checkNotNull(incoming);

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java
index 1377881..499f4d1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java
@@ -28,8 +28,14 @@ import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.SelectionVectorRemover;
 import org.apache.drill.exec.record.*;
+import org.apache.drill.exec.record.AbstractSingleRecordBatch;
 import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
-import org.apache.drill.exec.record.selection.SelectionVector4;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.record.TransferPair;
+import org.apache.drill.exec.record.TypedFieldId;
+import org.apache.drill.exec.record.VectorWrapper;
+import org.apache.drill.exec.record.WritableBatch;
+import org.apache.drill.exec.vector.AllocationHelper;
 import org.apache.drill.exec.vector.FixedWidthVector;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.VariableWidthVector;

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PhysicalPlanReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PhysicalPlanReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PhysicalPlanReader.java
index a100163..167a992 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PhysicalPlanReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PhysicalPlanReader.java
@@ -48,7 +48,7 @@ public class PhysicalPlanReader {
   private final ObjectReader logicalPlanReader;
 
   public PhysicalPlanReader(DrillConfig config, ObjectMapper mapper, final DrillbitEndpoint endpoint,
-                            final StoragePluginRegistry engineRegistry) {
+                            final StoragePluginRegistry pluginRegistry) {
 
     // Endpoint serializer/deserializer.
     SimpleModule deserModule = new SimpleModule("PhysicalOperatorModule") //
@@ -61,7 +61,7 @@ public class PhysicalPlanReader {
     mapper.registerModule(deserModule);
     mapper.registerSubtypes(PhysicalOperatorUtil.getSubTypes(config));
     InjectableValues injectables = new InjectableValues.Std() //
-            .addValue(StoragePluginRegistry.class, engineRegistry) //
+            .addValue(StoragePluginRegistry.class, pluginRegistry) //
         .addValue(DrillbitEndpoint.class, endpoint); //
 
     this.mapper = mapper;

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
index bc2178b..46eed45 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
@@ -157,7 +157,7 @@ public class DrillOptiq {
         if (call.getOperator() == SqlStdOperatorTable.ITEM) {
           SchemaPath left = (SchemaPath) call.getOperands().get(0).accept(this);
           final RexLiteral literal = (RexLiteral) call.getOperands().get(1);
-          return left.getChild((String) literal.getValue2());
+          return left.getChild(literal.getValue2().toString());
         }
         
         // fall through

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StorageEngines.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StorageEngines.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StorageEngines.java
deleted file mode 100644
index d298040..0000000
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StorageEngines.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.planner.logical;
-
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.logical.StoragePluginConfig;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Charsets;
-import com.google.common.io.Resources;
-
-public class StorageEngines implements Iterable<Map.Entry<String, StoragePluginConfig>>{
-  
-  private Map<String, StoragePluginConfig> storage;
-  
-  @JsonCreator
-  public StorageEngines(@JsonProperty("storage") Map<String, StoragePluginConfig> storage){
-    this.storage = storage;
-  }
-  
-  public static void main(String[] args) throws Exception{
-    DrillConfig config = DrillConfig.create();
-    String data = Resources.toString(Resources.getResource("storage-engines.json"), Charsets.UTF_8);
-    StorageEngines se = config.getMapper().readValue(data,  StorageEngines.class);
-    System.out.println(se);
-  }
-
-  @Override
-  public String toString() {
-    final int maxLen = 10;
-    return "StorageEngines [storage=" + (storage != null ? toString(storage.entrySet(), maxLen) : null) + "]";
-  }
-
-  @Override
-  public Iterator<Entry<String, StoragePluginConfig>> iterator() {
-    return storage.entrySet().iterator();
-  }
-
-  private String toString(Collection<?> collection, int maxLen) {
-    StringBuilder builder = new StringBuilder();
-    builder.append("[");
-    int i = 0;
-    for (Iterator<?> iterator = collection.iterator(); iterator.hasNext() && i < maxLen; i++) {
-      if (i > 0)
-        builder.append(", ");
-      builder.append(iterator.next());
-    }
-    builder.append("]");
-    return builder.toString();
-  }
-  
-  
-}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StoragePlugins.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StoragePlugins.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StoragePlugins.java
new file mode 100644
index 0000000..939b77c
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StoragePlugins.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.planner.logical;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.logical.StoragePluginConfig;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.Charsets;
+import com.google.common.io.Resources;
+
+public class StoragePlugins implements Iterable<Map.Entry<String, StoragePluginConfig>>{
+  
+  private Map<String, StoragePluginConfig> storage;
+  
+  @JsonCreator
+  public StoragePlugins(@JsonProperty("storage") Map<String, StoragePluginConfig> storage){
+    this.storage = storage;
+  }
+  
+  public static void main(String[] args) throws Exception{
+    DrillConfig config = DrillConfig.create();
+    String data = Resources.toString(Resources.getResource("storage-engines.json"), Charsets.UTF_8);
+    StoragePlugins se = config.getMapper().readValue(data,  StoragePlugins.class);
+    ByteArrayOutputStream os = new ByteArrayOutputStream();
+    config.getMapper().writeValue(System.out, se);
+    config.getMapper().writeValue(os, se);
+    se = config.getMapper().readValue(new ByteArrayInputStream(os.toByteArray()), StoragePlugins.class);
+    System.out.println(se);
+  }
+
+  @JsonProperty("storage")
+  public Map<String, StoragePluginConfig> getStorage() {
+    return storage;
+  }
+
+  @Override
+  public String toString() {
+    final int maxLen = 10;
+    return "StoragePlugins [storage=" + (storage != null ? toString(storage.entrySet(), maxLen) : null) + "]";
+  }
+
+  @Override
+  public Iterator<Entry<String, StoragePluginConfig>> iterator() {
+    return storage.entrySet().iterator();
+  }
+
+  private String toString(Collection<?> collection, int maxLen) {
+    StringBuilder builder = new StringBuilder();
+    builder.append("[");
+    int i = 0;
+    for (Iterator<?> iterator = collection.iterator(); iterator.hasNext() && i < maxLen; i++) {
+      if (i > 0)
+        builder.append(", ");
+      builder.append(iterator.next());
+    }
+    builder.append("]");
+    return builder.toString();
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (!(obj instanceof StoragePlugins)) {
+      return false;
+    }
+    return storage.equals(((StoragePlugins) obj).getStorage());
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
index 6400c75..411a76c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
@@ -95,6 +95,7 @@ public class Drillbit implements Closeable{
     DrillbitEndpoint md = engine.start();
     manager.start(md, cache, engine.getController(), engine.getDataConnectionCreator(), coord);
     cache.run();
+    manager.getContext().getStorage().init();
     handle = coord.register(md);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
index 2386915..7a88098 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
@@ -20,12 +20,14 @@ package org.apache.drill.exec.store;
 import java.io.IOException;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
+import java.net.URL;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
 import java.util.Map.Entry;
 
+import com.google.common.base.Preconditions;
 import net.hydromatic.linq4j.expressions.DefaultExpression;
 import net.hydromatic.linq4j.expressions.Expression;
 import net.hydromatic.optiq.SchemaPlus;
@@ -37,7 +39,10 @@ import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
 import org.apache.drill.common.util.PathScanner;
 import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.exec.planner.logical.StorageEngines;
+import org.apache.drill.exec.cache.DistributedMap;
+import org.apache.drill.exec.cache.JacksonDrillSerializable.StoragePluginsSerializable;
+import org.apache.drill.exec.exception.DrillbitStartupException;
+import org.apache.drill.exec.planner.logical.StoragePlugins;
 import org.apache.drill.exec.rpc.user.DrillUser;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.dfs.FileSystemPlugin;
@@ -53,8 +58,8 @@ import com.google.common.io.Resources;
 public class StoragePluginRegistry implements Iterable<Map.Entry<String, StoragePlugin>>{
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StoragePluginRegistry.class);
 
-  private Map<Object, Constructor<? extends StoragePlugin>> availableEngines = new HashMap<Object, Constructor<? extends StoragePlugin>>();
-  private final ImmutableMap<String, StoragePlugin> engines;
+  private Map<Object, Constructor<? extends StoragePlugin>> availablePlugins = new HashMap<Object, Constructor<? extends StoragePlugin>>();
+  private ImmutableMap<String, StoragePlugin> plugins;
 
   private DrillbitContext context;
   private final DrillSchemaFactory schemaFactory = new DrillSchemaFactory();
@@ -64,67 +69,93 @@ public class StoragePluginRegistry implements Iterable<Map.Entry<String, Storage
   public StoragePluginRegistry(DrillbitContext context) {
     try{
     this.context = context;
-    init(context.getConfig());
-    this.engines = ImmutableMap.copyOf(createEngines());
     }catch(RuntimeException e){
-      logger.error("Failure while loading storage engine registry.", e);
+      logger.error("Failure while loading storage plugin registry.", e);
       throw new RuntimeException("Faiure while reading and loading storage plugin configuration.", e);
     }
   }
 
   @SuppressWarnings("unchecked")
-  public void init(DrillConfig config){
-    Collection<Class<? extends StoragePlugin>> engines = PathScanner.scanForImplementations(StoragePlugin.class, config.getStringList(ExecConstants.STORAGE_ENGINE_SCAN_PACKAGES));
-    logger.debug("Loading storage engines {}", engines);
-    for(Class<? extends StoragePlugin> engine: engines){
+  public void init() throws DrillbitStartupException {
+    DrillConfig config = context.getConfig();
+    Collection<Class<? extends StoragePlugin>> plugins = PathScanner.scanForImplementations(StoragePlugin.class, config.getStringList(ExecConstants.STORAGE_ENGINE_SCAN_PACKAGES));
+    logger.debug("Loading storage plugins {}", plugins);
+    for(Class<? extends StoragePlugin> plugin: plugins){
       int i =0;
-      for(Constructor<?> c : engine.getConstructors()){
+      for(Constructor<?> c : plugin.getConstructors()){
         Class<?>[] params = c.getParameterTypes();
         if(params.length != 3 || params[1] != DrillbitContext.class || !StoragePluginConfig.class.isAssignableFrom(params[0]) || params[2] != String.class){
-          logger.info("Skipping StorageEngine constructor {} for engine class {} since it doesn't implement a [constructor(StorageEngineConfig, DrillbitContext, String)]", c, engine);
+          logger.info("Skipping StoragePlugin constructor {} for plugin class {} since it doesn't implement a [constructor(StoragePluginConfig, DrillbitContext, String)]", c, plugin);
           continue;
         }
-        availableEngines.put(params[0], (Constructor<? extends StoragePlugin>) c);
+        availablePlugins.put(params[0], (Constructor<? extends StoragePlugin>) c);
         i++;
       }
       if(i == 0){
-        logger.debug("Skipping registration of StorageEngine {} as it doesn't have a constructor with the parameters of (StorangeEngineConfig, Config)", engine.getCanonicalName());
+        logger.debug("Skipping registration of StoragePlugin {} as it doesn't have a constructor with the parameters of (StorangePluginConfig, Config)", plugin.getCanonicalName());
       }
     }
 
+    this.plugins = ImmutableMap.copyOf(createPlugins());
 
   }
-
-  private Map<String, StoragePlugin> createEngines(){
-    StorageEngines engines = null;
-    Map<String, StoragePlugin> activeEngines = new HashMap<String, StoragePlugin>();
+  
+  private Map<String, StoragePlugin> createPlugins() throws DrillbitStartupException {
+    /*
+     * Check if "storage-plugins.json" exists. Also check if "storage-plugins" object exists in Distributed Cache.
+     * If both exist, check that they are the same. If they differ, throw exception. If "storage-plugins.json" exists, but
+     * nothing found in cache, then add it to the cache. If neither are found, throw exception.
+     */
+    StoragePlugins plugins = null;
+    StoragePlugins cachedPlugins = null;
+    Map<String, StoragePlugin> activePlugins = new HashMap<String, StoragePlugin>();
     try{
-      String enginesData = Resources.toString(Resources.getResource("storage-engines.json"), Charsets.UTF_8);
-      engines = context.getConfig().getMapper().readValue(enginesData, StorageEngines.class);
+      URL url = Resources.class.getClassLoader().getResource("storage-plugins.json");
+      if (url != null) {
+        String pluginsData = Resources.toString(url, Charsets.UTF_8);
+        plugins = context.getConfig().getMapper().readValue(pluginsData, StoragePlugins.class);
+      }
+      DistributedMap<StoragePluginsSerializable> map = context.getCache().getMap(StoragePluginsSerializable.class);
+      StoragePluginsSerializable cachedPluginsSerializable = map.get("storage-plugins");
+      if (cachedPluginsSerializable != null) {
+        cachedPlugins = cachedPluginsSerializable.getObj();
+        logger.debug("Found cached storage plugin config: {}", cachedPlugins);
+      } else {
+        Preconditions.checkNotNull(plugins,"No storage plugin configuration found");
+        logger.debug("caching storage plugin config {}", plugins);
+        map.put("storage-plugins", new StoragePluginsSerializable(context, plugins));
+        cachedPluginsSerializable = map.get("storage-plugins");
+        cachedPlugins = cachedPluginsSerializable.getObj();
+      }
+      if(!(plugins == null || cachedPlugins.equals(plugins))) {
+        logger.error("Storage plugin config mismatch. {}. {}", plugins, cachedPlugins);
+        throw new DrillbitStartupException("Storage plugin config mismatch");
+      }
+      logger.debug("using plugin config: {}", cachedPlugins);
     }catch(IOException e){
-      throw new IllegalStateException("Failure while reading storage engines data.", e);
+      throw new IllegalStateException("Failure while reading storage plugins data.", e);
     }
-
-    for(Map.Entry<String, StoragePluginConfig> config : engines){
+    
+    for(Map.Entry<String, StoragePluginConfig> config : cachedPlugins){
       try{
         StoragePlugin plugin = create(config.getKey(), config.getValue());
-        activeEngines.put(config.getKey(), plugin);
+        activePlugins.put(config.getKey(), plugin);
       }catch(ExecutionSetupException e){
         logger.error("Failure while setting up StoragePlugin with name: '{}'.", config.getKey(), e);
       }
     }
-    activeEngines.put("INFORMATION_SCHEMA", new InfoSchemaStoragePlugin(new InfoSchemaConfig(), context, "INFORMATION_SCHEMA"));
-
-    return activeEngines;
+    activePlugins.put("INFORMATION_SCHEMA", new InfoSchemaStoragePlugin(new InfoSchemaConfig(), context, "INFORMATION_SCHEMA"));
+    
+    return activePlugins;
   }
 
-  public StoragePlugin getEngine(String registeredStorageEngineName) throws ExecutionSetupException {
-    return engines.get(registeredStorageEngineName);
+  public StoragePlugin getPlugin(String registeredStoragePluginName) throws ExecutionSetupException {
+    return plugins.get(registeredStoragePluginName);
   }
-
-  public StoragePlugin getEngine(StoragePluginConfig config) throws ExecutionSetupException {
+  
+  public StoragePlugin getPlugin(StoragePluginConfig config) throws ExecutionSetupException {
     if(config instanceof NamedStoragePluginConfig){
-      return engines.get(((NamedStoragePluginConfig) config).name);
+      return plugins.get(((NamedStoragePluginConfig) config).name);
     }else{
       // TODO: for now, we'll throw away transient configs.  we really ought to clean these up.
       return create(null, config);
@@ -132,33 +163,33 @@ public class StoragePluginRegistry implements Iterable<Map.Entry<String, Storage
   }
 
   public FormatPlugin getFormatPlugin(StoragePluginConfig storageConfig, FormatPluginConfig formatConfig) throws ExecutionSetupException{
-    StoragePlugin p = getEngine(storageConfig);
-    if(!(p instanceof FileSystemPlugin)) throw new ExecutionSetupException(String.format("You tried to request a format plugin for a stroage engine that wasn't of type FileSystemPlugin.  The actual type of plugin was %s.", p.getClass().getName()));
+    StoragePlugin p = getPlugin(storageConfig);
+    if(!(p instanceof FileSystemPlugin)) throw new ExecutionSetupException(String.format("You tried to request a format plugin for a storage plugin that wasn't of type FileSystemPlugin.  The actual type of plugin was %s.", p.getClass().getName()));
     FileSystemPlugin storage = (FileSystemPlugin) p;
     return storage.getFormatPlugin(formatConfig);
   }
 
-  private StoragePlugin create(String name, StoragePluginConfig engineConfig) throws ExecutionSetupException {
-    StoragePlugin engine = null;
-    Constructor<? extends StoragePlugin> c = availableEngines.get(engineConfig.getClass());
+  private StoragePlugin create(String name, StoragePluginConfig pluginConfig) throws ExecutionSetupException {
+    StoragePlugin plugin = null;
+    Constructor<? extends StoragePlugin> c = availablePlugins.get(pluginConfig.getClass());
     if (c == null)
-      throw new ExecutionSetupException(String.format("Failure finding StorageEngine constructor for config %s",
-          engineConfig));
+      throw new ExecutionSetupException(String.format("Failure finding StoragePlugin constructor for config %s",
+          pluginConfig));
     try {
-      engine = c.newInstance(engineConfig, context, name);
-      return engine;
+      plugin = c.newInstance(pluginConfig, context, name);
+      return plugin;
     } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
       Throwable t = e instanceof InvocationTargetException ? ((InvocationTargetException) e).getTargetException() : e;
       if (t instanceof ExecutionSetupException)
         throw ((ExecutionSetupException) t);
       throw new ExecutionSetupException(String.format(
-          "Failure setting up new storage engine configuration for config %s", engineConfig), t);
+          "Failure setting up new storage plugin configuration for config %s", pluginConfig), t);
     }
   }
 
   @Override
   public Iterator<Entry<String, StoragePlugin>> iterator() {
-    return engines.entrySet().iterator();
+    return plugins.entrySet().iterator();
   }
 
   public DrillSchemaFactory getSchemaFactory(){
@@ -169,7 +200,7 @@ public class StoragePluginRegistry implements Iterable<Map.Entry<String, Storage
 
     @Override
     public void registerSchemas(DrillUser user, SchemaPlus parent) {
-      for(Map.Entry<String, StoragePlugin> e : engines.entrySet()){
+      for(Map.Entry<String, StoragePlugin> e : plugins.entrySet()){
         e.getValue().registerSchemas(user, parent);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
index 50678a6..232ec07 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
@@ -18,11 +18,13 @@
 package org.apache.drill.exec.store.dfs;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.regex.Pattern;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.drill.exec.store.dfs.shim.DrillFileSystem;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileStatus;
@@ -31,6 +33,8 @@ import com.beust.jcommander.internal.Lists;
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Range;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.CompressionCodecFactory;
 
 public class BasicFormatMatcher extends FormatMatcher{
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BasicFormatMatcher.class);
@@ -39,6 +43,8 @@ public class BasicFormatMatcher extends FormatMatcher{
   private final MagicStringMatcher matcher;
   protected final DrillFileSystem fs;
   protected final FormatPlugin plugin;
+  protected final boolean compressible;
+  protected final CompressionCodecFactory codecFactory;
   
   public BasicFormatMatcher(FormatPlugin plugin, DrillFileSystem fs, List<Pattern> patterns, List<MagicString> magicStrings) {
     super();
@@ -46,12 +52,21 @@ public class BasicFormatMatcher extends FormatMatcher{
     this.matcher = new MagicStringMatcher(magicStrings);
     this.fs = fs;
     this.plugin = plugin;
+    this.compressible = false;
+    this.codecFactory = null;
   }
   
-  public BasicFormatMatcher(FormatPlugin plugin, DrillFileSystem fs, String extension){
-    this(plugin, fs, //
-        Lists.newArrayList(Pattern.compile(".*\\." + extension)), //
-        (List<MagicString>) Collections.EMPTY_LIST);
+  public BasicFormatMatcher(FormatPlugin plugin, DrillFileSystem fs, List<String> extensions, boolean compressible){
+    List<Pattern> patterns = Lists.newArrayList();
+    for (String extension : extensions) {
+      patterns.add(Pattern.compile(".*\\." + extension));
+    }
+    this.patterns = patterns;
+    this.matcher = new MagicStringMatcher(new ArrayList<MagicString>());
+    this.fs = fs;
+    this.plugin = plugin;
+    this.compressible = compressible;
+    this.codecFactory = new CompressionCodecFactory(fs.getUnderlying().getConf());
   }
   
   @Override
@@ -62,14 +77,31 @@ public class BasicFormatMatcher extends FormatMatcher{
   @Override
   public FormatSelection isReadable(FileSelection selection) throws IOException {
     if(isReadable(selection.getFirstPath(fs))){
-      return new FormatSelection(plugin.getConfig(), selection);
+      if (plugin.getName() != null) {
+        NamedFormatPluginConfig namedConfig = new NamedFormatPluginConfig();
+        namedConfig.name = plugin.getName();
+        return new FormatSelection(namedConfig, selection);
+      } else {
+        return new FormatSelection(plugin.getConfig(), selection);
+      }
     }
     return null;
   }
 
   protected final boolean isReadable(FileStatus status) throws IOException {
+    CompressionCodec codec = null;
+    if (compressible) {
+      codec = codecFactory.getCodec(status.getPath());
+    }
+    String fileName;
+    if (codec != null) {
+      String path = status.getPath().toString();
+      fileName = path.substring(0, path.lastIndexOf('.'));
+    } else {
+      fileName = status.getPath().toString();
+    }
     for(Pattern p : patterns){
-      if(p.matcher(status.getPath().toString()).matches()){
+      if(p.matcher(fileName).matches()){
         return true;
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
index 455c4b2..e392fa5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
@@ -31,5 +31,16 @@ public class FileSystemConfig implements StoragePluginConfig{
   public String connection;
   public Map<String, String> workspaces;
   public Map<String, FormatPluginConfig> formats;
-  
+
+  @Override
+  public boolean equals(Object obj) {
+    if (!(obj instanceof FileSystemConfig)) {
+      return false;
+    }
+    FileSystemConfig that = (FileSystemConfig) obj;
+    boolean same = ((this.connection == null && that.connection == null) || this.connection.equals(that.connection)) &&
+            ((this.workspaces == null && that.workspaces == null) || this.workspaces.equals(that.workspaces)) &&
+            ((this.formats== null && that.formats == null) || this.formats.equals(that.formats));
+    return same;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
index ebd8507..840a011 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
@@ -36,6 +36,7 @@ import org.apache.drill.exec.rpc.user.UserSession;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.AbstractStoragePlugin;
 import org.apache.drill.exec.store.ClassPathFileSystem;
+import org.apache.drill.exec.store.LocalSyncableFileSystem;
 import org.apache.drill.exec.store.dfs.shim.DrillFileSystem;
 import org.apache.drill.exec.store.dfs.shim.FileSystemCreator;
 import org.apache.hadoop.conf.Configuration;
@@ -69,6 +70,7 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
       Configuration fsConf = new Configuration();
       fsConf.set(FileSystem.FS_DEFAULT_NAME_KEY, config.connection);
       fsConf.set("fs.classpath.impl", ClassPathFileSystem.class.getName());
+      fsConf.set("fs.drill-local.impl", LocalSyncableFileSystem.class.getName());
       this.fs = FileSystemCreator.getFileSystem(context.getConfig(), fsConf);
       this.formatsByName = FormatCreator.getFormatPlugins(context, fs, config);
       List<FormatMatcher> matchers = Lists.newArrayList();

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
index 7ce8c50..b40502f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
@@ -37,7 +37,7 @@ public class FormatCreator {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FormatCreator.class);
   
   
-  static final ConstructorChecker FORMAT_BASED = new ConstructorChecker(String.class, DrillbitContext.class, DrillFileSystem.class, StoragePluginConfig.class, FormatPlugin.class);
+  static final ConstructorChecker FORMAT_BASED = new ConstructorChecker(String.class, DrillbitContext.class, DrillFileSystem.class, StoragePluginConfig.class, FormatPluginConfig.class);
   static final ConstructorChecker DEFAULT_BASED = new ConstructorChecker(String.class, DrillbitContext.class, DrillFileSystem.class, StoragePluginConfig.class);
   
   static Map<String, FormatPlugin> getFormatPlugins(DrillbitContext context, DrillFileSystem fileSystem, FileSystemConfig storageConfig){
@@ -69,7 +69,8 @@ public class FormatCreator {
         for(Constructor<?> c : pluginClass.getConstructors()){
           try{
             if(!FORMAT_BASED.check(c)) continue;
-            constructors.put(pluginClass, c);
+            Class<? extends FormatPluginConfig> configClass = (Class<? extends FormatPluginConfig>) c.getParameterTypes()[4];
+            constructors.put(configClass, c);
           }catch(Exception e){
             logger.warn(String.format("Failure while trying instantiate FormatPlugin %s.", pluginClass.getName()), e);
           }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/NamedFormatPluginConfig.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/NamedFormatPluginConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/NamedFormatPluginConfig.java
index 6b98ea2..173dfeb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/NamedFormatPluginConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/NamedFormatPluginConfig.java
@@ -17,12 +17,13 @@
  */
 package org.apache.drill.exec.store.dfs;
 
+import com.fasterxml.jackson.annotation.JsonIgnore;
 import org.apache.drill.common.logical.FormatPluginConfig;
 
 import com.fasterxml.jackson.annotation.JsonTypeName;
 
 
 @JsonTypeName("named")
-public class NamedFormatPluginConfig implements FormatPluginConfig{
+public class NamedFormatPluginConfig implements FormatPluginConfig {
   public String name;
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
index 780ec14..9c1dc74 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
@@ -40,6 +40,10 @@ import org.apache.drill.exec.store.dfs.FormatPlugin;
 import org.apache.drill.exec.store.dfs.shim.DrillFileSystem;
 
 import com.beust.jcommander.internal.Lists;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.CompressionCodecFactory;
 
 public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements FormatPlugin {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(EasyFormatPlugin.class);
@@ -51,20 +55,24 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
   private final boolean blockSplittable;
   private final DrillFileSystem fs;
   private final StoragePluginConfig storageConfig;
-  private final FormatPluginConfig formatConfig;
+  protected final FormatPluginConfig formatConfig;
   private final String name;
+  protected final CompressionCodecFactory codecFactory;
+  private final boolean compressible;
   
   protected EasyFormatPlugin(String name, DrillbitContext context, DrillFileSystem fs, StoragePluginConfig storageConfig,
-                             T formatConfig, boolean readable, boolean writable, boolean blockSplittable, String extension, String defaultName){
-    this.matcher = new BasicFormatMatcher(this, fs, extension);
+                             T formatConfig, boolean readable, boolean writable, boolean blockSplittable, boolean compressible, List<String> extensions, String defaultName){
+    this.matcher = new BasicFormatMatcher(this, fs, extensions, compressible);
     this.readable = readable;
     this.writable = writable;
     this.context = context;
     this.blockSplittable = blockSplittable;
+    this.compressible = compressible;
     this.fs = fs;
     this.storageConfig = storageConfig;
     this.formatConfig = formatConfig;
-    this.name = name == null ? defaultName : name; 
+    this.name = name == null ? defaultName : name;
+    this.codecFactory = new CompressionCodecFactory(new Configuration(fs.getUnderlying().getConf()));
   }
   
   @Override
@@ -88,9 +96,13 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
    * 
    * @return True if splittable.
    */
-  public boolean isBlockSplittable(){
+  public boolean isBlockSplittable() {
     return blockSplittable;
-  };
+  }
+
+  public boolean isCompressible() {
+    return compressible;
+  }
 
   public abstract RecordReader getRecordReader(FragmentContext context, FileWork fileWork, List<SchemaPath> columns) throws ExecutionSetupException;
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
index 6015865..fc2ae2c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
@@ -32,8 +32,11 @@ import org.apache.drill.exec.physical.base.AbstractGroupScan;
 import org.apache.drill.exec.physical.base.PhysicalOperator;
 import org.apache.drill.exec.physical.base.Size;
 import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
+import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.store.dfs.FileSelection;
+import org.apache.drill.exec.store.dfs.shim.DrillFileSystem;
+import org.apache.drill.exec.store.easy.text.TextFormatPlugin;
 import org.apache.drill.exec.store.schedule.AffinityCreator;
 import org.apache.drill.exec.store.schedule.AssignmentCreator;
 import org.apache.drill.exec.store.schedule.BlockMapBuilder;
@@ -70,11 +73,19 @@ public class EasyGroupScan extends AbstractGroupScan{
       @JacksonInject StoragePluginRegistry engineRegistry, // 
       @JsonProperty("columns") List<SchemaPath> columns
       ) throws IOException, ExecutionSetupException {
-    
+
     this.formatPlugin = (EasyFormatPlugin<?>) engineRegistry.getFormatPlugin(storageConfig, formatConfig);
     Preconditions.checkNotNull(formatPlugin, "Unable to load format plugin for provided format config.");
     this.selection = new FileSelection(files, true);
-    this.maxWidth = selection.getFileStatusList(formatPlugin.getFileSystem()).size();
+    try{
+      BlockMapBuilder b = new BlockMapBuilder(formatPlugin.getFileSystem().getUnderlying(), formatPlugin.getContext().getBits());
+      this.chunks = b.generateFileWork(selection.getFileStatusList(formatPlugin.getFileSystem()), formatPlugin.isBlockSplittable());
+      this.endpointAffinities = AffinityCreator.getAffinityMap(chunks);
+    }catch(IOException e){
+      logger.warn("Failure determining endpoint affinity.", e);
+      this.endpointAffinities = Collections.emptyList();
+    }
+    maxWidth = chunks.size();
     this.columns = columns;
   }
   
@@ -84,9 +95,17 @@ public class EasyGroupScan extends AbstractGroupScan{
       List<SchemaPath> columns
       ) throws IOException{
     this.selection = selection;
-    this.maxWidth = selection.getFileStatusList(formatPlugin.getFileSystem()).size();
     this.formatPlugin = formatPlugin;
     this.columns = columns;
+    try{
+      BlockMapBuilder b = new BlockMapBuilder(formatPlugin.getFileSystem().getUnderlying(), formatPlugin.getContext().getBits());
+      this.chunks = b.generateFileWork(selection.getFileStatusList(formatPlugin.getFileSystem()), formatPlugin.isBlockSplittable());
+      this.endpointAffinities = AffinityCreator.getAffinityMap(chunks);
+    }catch(IOException e){
+      logger.warn("Failure determining endpoint affinity.", e);
+      this.endpointAffinities = Collections.emptyList();
+    }
+    maxWidth = chunks.size();
   }
 
   @Override
@@ -127,15 +146,10 @@ public class EasyGroupScan extends AbstractGroupScan{
   
   @Override
   public List<EndpointAffinity> getOperatorAffinity() {
+    assert chunks != null && chunks.size() > 0;
     if (this.endpointAffinities == null) {
-      try{
-      BlockMapBuilder b = new BlockMapBuilder(formatPlugin.getFileSystem().getUnderlying(), formatPlugin.getContext().getBits());
-      this.chunks = b.generateFileWork(selection.getFileStatusList(formatPlugin.getFileSystem()), formatPlugin.isBlockSplittable());
-      this.endpointAffinities = AffinityCreator.getAffinityMap(chunks);
-      }catch(IOException e){
-        logger.warn("Failure determining endpoint affinity.", e);
-        this.endpointAffinities = Collections.emptyList();
-      }
+        logger.debug("chunks: {}", chunks.size());
+        this.endpointAffinities = AffinityCreator.getAffinityMap(chunks);
     }
     return this.endpointAffinities;
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasySubScan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasySubScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasySubScan.java
index 6631a6a..c01fb84 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasySubScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasySubScan.java
@@ -25,8 +25,10 @@ import org.apache.drill.common.expression.FieldReference;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.exec.exception.DrillbitStartupException;
 import org.apache.drill.exec.physical.base.AbstractSubScan;
 import org.apache.drill.exec.store.StoragePluginRegistry;
+import org.apache.drill.exec.store.dfs.NamedFormatPluginConfig;
 import org.apache.drill.exec.store.schedule.CompleteFileWork.FileWorkImpl;
 
 import com.fasterxml.jackson.annotation.JacksonInject;
@@ -52,7 +54,7 @@ public class EasySubScan extends AbstractSubScan{
       @JacksonInject StoragePluginRegistry engineRegistry, // 
       @JsonProperty("columns") List<SchemaPath> columns //
       ) throws IOException, ExecutionSetupException {
-    
+
     this.formatPlugin = (EasyFormatPlugin<?>) engineRegistry.getFormatPlugin(storageConfig, formatConfig);
     Preconditions.checkNotNull(this.formatPlugin);
     this.files = files;
@@ -82,7 +84,13 @@ public class EasySubScan extends AbstractSubScan{
 
   @JsonProperty("format")
   public FormatPluginConfig getFormatConfig(){
-    return formatPlugin.getConfig();
+    if (formatPlugin.getName() != null) {
+      NamedFormatPluginConfig namedConfig = new NamedFormatPluginConfig();
+      namedConfig.name = formatPlugin.getName();
+      return namedConfig;
+    } else {
+      return formatPlugin.getConfig();
+    }
   }
   
   @JsonProperty("columns")


[04/10] DRILL-332: Support for decimal data type

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalVarchar.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalVarchar.java b/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalVarchar.java
new file mode 100644
index 0000000..52e5513
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/Decimal/CastDecimalVarchar.java
@@ -0,0 +1,212 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+<@pp.dropOutputFile />
+
+
+<#list cast.types as type>
+
+<#if type.major == "DecimalSimpleVarChar"> <#-- Cast function template for conversion from Decimal9, Decimal18 to VarChar -->
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.common.util.DecimalUtility;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.SwappedByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc {
+
+    @Param ${type.from}Holder in;
+    @Param BigIntHolder len;
+    @Workspace ByteBuf buffer;
+    @Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+        buffer = io.netty.buffer.Unpooled.wrappedBuffer(new byte[${type.bufferSize}]);
+        buffer = new io.netty.buffer.SwappedByteBuf(buffer);
+    }
+
+    public void eval() {
+
+        StringBuilder str = new StringBuilder();
+
+        if (in.value < 0) {
+            // Negative value, add '-' to the string
+            str.append("-");
+
+            // Negate the number
+            in.value *= -1;
+        }
+
+        ${type.javatype} separator = (${type.javatype}) Math.pow(10, in.scale);
+
+        str.append(in.value / separator);
+
+        if (in.scale > 0) {
+            str.append(".");
+
+            String fractionalPart = String.valueOf(in.value % separator);
+
+            /* Since we are taking modulus to find fractional part,
+             * we will miss printing the leading zeroes in the fractional part
+             * Account for those zeroes
+             *
+             * Eg: 1.0002
+             * Scale: 3
+             *
+             * Stored as: 10002
+             *
+             * We print integer part by 10002/1000 = 1
+             * We print fractional part by 10002 % 1000 = 2
+             *
+             * We missed the initial zeroes in the fractional part. Below logic accounts for this case
+             */
+            str.append(org.apache.drill.common.util.DecimalUtility.toStringWithZeroes((in.value % separator), in.scale));
+        }
+
+        out.buffer = buffer;
+        out.start = 0;
+        out.end = Math.min((int)len.value, str.length()); // truncate if target type has length smaller than that of input's string
+        out.buffer.setBytes(0, String.valueOf(str.substring(0,out.end)).getBytes());
+    }
+}
+<#elseif type.major == "DecimalComplexVarChar"> <#-- Cast function template for conversion from Decimal28Sparse, Decimal38Sparse to VarChar -->
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.common.util.DecimalUtility;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc {
+
+    @Param ${type.from}Holder in;
+    @Param BigIntHolder len;
+    @Workspace ByteBuf buffer;
+    @Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+        buffer = io.netty.buffer.Unpooled.wrappedBuffer(new byte[${type.bufferSize}]);
+        buffer = new io.netty.buffer.SwappedByteBuf(buffer);
+    }
+
+    public void eval() {
+
+        StringBuilder str = new StringBuilder();
+        int index = 0;
+        int fractionalStartIndex = ${type.arraySize} - org.apache.drill.common.util.DecimalUtility.roundUp(in.scale);
+
+        // Find the first non-zero value in the integer part of the decimal
+        while (index < fractionalStartIndex && in.getInteger(index) == 0)  {
+            index++;
+        }
+
+
+        // If we have valid digits print '-' sign
+        if ((in.sign == true) && index < ${type.arraySize}) {
+            str.append("-");
+        }
+
+        // If all the integer digits are zero, print a single zero
+        if (index == fractionalStartIndex) {
+            str.append("0");
+        }
+
+        boolean fillZeroes = false;
+
+        // convert the integer part
+        while (index < fractionalStartIndex) {
+            int value =  in.getInteger(index++);
+
+            if (fillZeroes == true) {
+                str.append(org.apache.drill.common.util.DecimalUtility.toStringWithZeroes(value, org.apache.drill.common.util.DecimalUtility.MAX_DIGITS));
+            } else {
+                str.append(value);
+                fillZeroes = true;
+            }
+            //str.append(value);
+        }
+
+        if (fractionalStartIndex < ${type.arraySize}) {
+            // We have fractional part, print '.'
+            str.append(".");
+
+            /* convert the fractional part (except the last decimal digit,
+             * as it might have padding that needs to be stripped
+             */
+            while (fractionalStartIndex < ${type.arraySize} - 1) {
+                int value = in.getInteger(fractionalStartIndex++);
+
+                // Fill zeroes at the beginning of the decimal digit
+                str.append(org.apache.drill.common.util.DecimalUtility.toStringWithZeroes(value, org.apache.drill.common.util.DecimalUtility.MAX_DIGITS));
+            }
+
+            // Last decimal digit, strip the extra zeroes we may have padded
+            int actualDigits = in.scale % org.apache.drill.common.util.DecimalUtility.MAX_DIGITS;
+
+            int lastFractionalDigit = in.getInteger(${type.arraySize} - 1);
+
+            if (actualDigits != 0) {
+
+                // Strip padded zeroes at the end that is not part of the scale
+                lastFractionalDigit /= (int) (Math.pow(10, org.apache.drill.common.util.DecimalUtility.MAX_DIGITS - actualDigits));
+                str.append(org.apache.drill.common.util.DecimalUtility.toStringWithZeroes(lastFractionalDigit, actualDigits));
+            } else {
+                // Last digit does not have any padding print as is
+                str.append(org.apache.drill.common.util.DecimalUtility.toStringWithZeroes(lastFractionalDigit, org.apache.drill.common.util.DecimalUtility.MAX_DIGITS));
+            }
+
+
+        }
+
+        out.buffer = buffer;
+        out.start = 0;
+        out.end = Math.min((int)len.value, str.length()); // truncate if target type has length smaller than that of input's string
+        out.buffer.setBytes(0, String.valueOf(str.substring(0,out.end)).getBytes());
+    }
+}
+</#if> <#-- type.major -->
+</#list>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/Decimal/CastFloatDecimal.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/Decimal/CastFloatDecimal.java b/exec/java-exec/src/main/codegen/templates/Decimal/CastFloatDecimal.java
new file mode 100644
index 0000000..9ebb86f
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/Decimal/CastFloatDecimal.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+<@pp.dropOutputFile />
+
+
+<#list cast.types as type>
+
+<#-- Cast function template for conversion from Float to Decimal9, Decimal18, Decimal28, Decimal38 -->
+<#if type.major == "FloatDecimalComplex" || type.major == "DoubleDecimalComplex" || type.major == "FloatDecimalSimple" || type.major == "DoubleDecimalSimple">
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.common.util.DecimalUtility;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.DECIMAL_CAST, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc {
+
+@Param ${type.from}Holder in;
+<#if type.major == "FloatDecimalComplex" || type.major == "DoubleDecimalComplex">
+@Workspace ByteBuf buffer;
+</#if>
+@Param BigIntHolder precision;
+@Param BigIntHolder scale;
+@Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+        <#if type.major == "FloatDecimalComplex" || type.major == "DoubleDecimalComplex">
+        int size = ${type.arraySize} * (org.apache.drill.common.util.DecimalUtility.integerSize);
+        buffer = io.netty.buffer.Unpooled.wrappedBuffer(new byte[size]);
+        buffer = new io.netty.buffer.SwappedByteBuf(buffer);
+        </#if>
+    }
+
+    public void eval() {
+
+        out.scale = (int) scale.value;
+        out.precision = (int) precision.value;
+
+        <#if type.major == "FloatDecimalComplex" || type.major == "DoubleDecimalComplex">
+        out.start = 0;
+        out.buffer = buffer;
+
+        if (in.value < 0) {
+            out.sign = true;
+        }
+
+        // Initialize the buffer
+        for (int i = 0; i < ${type.arraySize}; i++) {
+            out.setInteger(i, 0);
+        }
+        // Assign the integer part of the decimal to the output holder
+        org.apache.drill.common.util.DecimalUtility.getSparseFromBigDecimal(new java.math.BigDecimal(String.valueOf(in.value)), out.buffer, out.start, out.scale, out.precision, out.nDecimalDigits);
+
+        <#elseif type.to.endsWith("Decimal9")>
+        out.value = org.apache.drill.common.util.DecimalUtility.getDecimal9FromBigDecimal(new java.math.BigDecimal(String.valueOf(in.value)), out.scale, out.precision);
+        <#elseif type.to.endsWith("Decimal18")>
+        out.value = org.apache.drill.common.util.DecimalUtility.getDecimal18FromBigDecimal(new java.math.BigDecimal(String.valueOf(in.value)), out.scale, out.precision);
+        </#if>
+    }
+}
+</#if>
+</#list>

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/Decimal/CastIntDecimal.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/Decimal/CastIntDecimal.java b/exec/java-exec/src/main/codegen/templates/Decimal/CastIntDecimal.java
new file mode 100644
index 0000000..1efe21f
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/Decimal/CastIntDecimal.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+<@pp.dropOutputFile />
+
+<#list cast.types as type>
+<#if type.major == ("IntDecimal") || type.major == ("BigIntDecimal")>
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.common.util.DecimalUtility;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.DECIMAL_CAST, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc {
+
+    @Param ${type.from}Holder in;
+    <#if type.to.startsWith("Decimal28") || type.to.startsWith("Decimal38")>
+    @Workspace ByteBuf buffer;
+    </#if>
+    @Param BigIntHolder precision;
+    @Param BigIntHolder scale;
+    @Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+        <#if type.to.startsWith("Decimal28") || type.to.startsWith("Decimal38")>
+        int size = ${type.arraySize} * (org.apache.drill.common.util.DecimalUtility.integerSize);
+        buffer = io.netty.buffer.Unpooled.wrappedBuffer(new byte[size]);
+        buffer = new io.netty.buffer.SwappedByteBuf(buffer);
+        </#if>
+
+    }
+
+    public void eval() {
+
+        out.scale = (int) scale.value;
+        out.precision = (int) precision.value;
+
+        <#if type.to == "Decimal9" || type.to == "Decimal18">
+        out.value = (${type.javatype}) in.value;
+
+        // converting from integer to decimal, pad zeroes if scale is non zero
+        out.value = (${type.javatype}) (out.value * Math.pow(10, scale.value));
+
+        <#else>
+        out.start = 0;
+        out.buffer = buffer;
+
+        // Initialize the buffer
+        for (int i = 0; i < ${type.arraySize}; i++) {
+            out.setInteger(i, 0);
+        }
+
+        // check if input is a negative number and store the sign
+        if (in.value < 0) {
+            out.sign = true;
+            /* we are going to split the input into base 1 billion numbers
+             * by dividing, if we leave the input to be negative all the splits
+             * will be negative. We store the sign in the output at the end in the
+             * most significant bit
+             */
+            in.value = in.value * -1;
+        }
+
+        // Figure out how many array positions to be left for the scale part
+        int scaleSize = org.apache.drill.common.util.DecimalUtility.roundUp((int) scale.value);
+        int integerIndex = (${type.arraySize} - scaleSize - 1);
+
+        while (in.value > 0 && integerIndex >= 0) {
+            out.setInteger(integerIndex, (int) (in.value % org.apache.drill.common.util.DecimalUtility.DIGITS_BASE));
+            in.value = in.value / org.apache.drill.common.util.DecimalUtility.DIGITS_BASE;
+        }
+
+        </#if>
+    }
+}
+</#if> <#-- type.major -->
+</#list>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/Decimal/CastSrcDecimalSimple.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/Decimal/CastSrcDecimalSimple.java b/exec/java-exec/src/main/codegen/templates/Decimal/CastSrcDecimalSimple.java
new file mode 100644
index 0000000..242fdb1
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/Decimal/CastSrcDecimalSimple.java
@@ -0,0 +1,256 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+<@pp.dropOutputFile />
+
+<#list cast.types as type>
+
+<#-- Template code for converting from Decimal9, Decimal18 to Decimal28Dense and Decimal38Dense -->
+<#if type.major == "DecimalSimpleDecimalDense">
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.DECIMAL_CAST, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc {
+
+    @Param ${type.from}Holder in;
+    <#if type.to.startsWith("Decimal28") || type.to.startsWith("Decimal38")>
+    @Workspace ByteBuf buffer;
+    </#if>
+    @Param BigIntHolder precision;
+    @Param BigIntHolder scale;
+    @Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+        int size = (${type.arraySize} * (org.apache.drill.common.util.DecimalUtility.integerSize));
+        buffer = io.netty.buffer.Unpooled.wrappedBuffer(new byte[size]);
+        buffer = new io.netty.buffer.SwappedByteBuf(buffer);
+    }
+
+    public void eval() {
+
+        out.buffer = buffer;
+        out.start = 0;
+
+        // Re initialize the buffer everytime
+        for (int i = 0; i < ${type.arraySize}; i++) {
+            out.setInteger(i, 0);
+        }
+
+        out.scale = (int) scale.value;
+        out.precision = (int) precision.value;
+
+        out.buffer = buffer;
+        out.start = 0;
+        out.sign = (in.value < 0) ? true : false;
+
+        /* Since we will be dividing the decimal value with base 1 billion
+         * we don't want negative results if the decimal is negative.
+         */
+        long value = (in.value < 0) ? (in.value * -1) : in.value;
+
+        int index = out.nDecimalDigits - 1;
+
+        // store the decimal value as sequence of integers of base 1 billion.
+        while (value > 0) {
+
+            out.setInteger(index, (int) (value % org.apache.drill.common.util.DecimalUtility.DIGITS_BASE));
+            value = value/org.apache.drill.common.util.DecimalUtility.DIGITS_BASE;
+            index--;
+        }
+
+        /* We have stored the decimal value in the intermediate format, which is basically that the
+         * scale and integer part of the decimal together, with no additional zeroes padded to the
+         * scale. Now we simply need to shift the bits around to get a more compact representation
+         */
+        int[] mask = {0x03, 0x0F, 0x3F, 0xFF};
+        int maskIndex = 0;
+        int shiftOrder = 2;
+
+        // Start shifting bits just after the first integer
+        int byteIndex = in.WIDTH - (org.apache.drill.common.util.DecimalUtility.integerSize + 1);
+
+        while (byteIndex >= 0) {
+
+            /* get the last bits that need to shifted to the next byte */
+            byte shiftBits = (byte) ((out.buffer.getByte(byteIndex) & mask[maskIndex]) << (8 - shiftOrder));
+
+            int shiftOrder1 = ((byteIndex % 4) == 0) ? shiftOrder - 2 : shiftOrder;
+
+            /* transfer the bits from the left to the right */
+            out.buffer.setByte(byteIndex + 1,  (byte) (((out.buffer.getByte(byteIndex + 1) & 0xFF) >>> (shiftOrder1)) | shiftBits));
+
+            byteIndex--;
+
+            if (byteIndex % 4 == 0) {
+                /* We are on a border */
+                shiftOrder += 2;
+                maskIndex++;
+            }
+        }
+    }
+}
+
+<#-- Template code for converting from Decimal9, Decimal18 to Decimal28Sparse and Decimal38Sparse -->
+<#elseif type.major == "DecimalSimpleDecimalSparse">
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.DECIMAL_CAST, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc{
+
+    @Param ${type.from}Holder in;
+    @Workspace ByteBuf buffer;
+    @Param BigIntHolder precision;
+    @Param BigIntHolder scale;
+    @Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+        int size = (${type.arraySize} * (org.apache.drill.common.util.DecimalUtility.integerSize));
+        buffer = io.netty.buffer.Unpooled.wrappedBuffer(new byte[size]);
+        buffer = new io.netty.buffer.SwappedByteBuf(buffer);
+    }
+
+    public void eval() {
+        out.buffer = buffer;
+        out.start = 0;
+
+        // Re initialize the buffer everytime
+        for (int i = 0; i < ${type.arraySize}; i++) {
+            out.setInteger(i, 0);
+        }
+        out.scale = (int) scale.value;
+        out.precision = (int) precision.value;
+
+        out.buffer = buffer;
+        out.start = 0;
+        out.sign = (in.value < 0) ? true : false;
+
+        /* Since we will be dividing the decimal value with base 1 billion
+         * we don't want negative results if the decimal is negative.
+         */
+        long value = (in.value < 0) ? (in.value * -1) : in.value;
+
+        int index = out.nDecimalDigits - 1;
+
+        // Separate out the scale part and store it
+        int remainingScale = in.scale;
+
+        while(remainingScale > 0) {
+
+            int power = (remainingScale % org.apache.drill.common.util.DecimalUtility.MAX_DIGITS);
+            int padding = 1;
+
+            if (power == 0) {
+                power = 9;
+            } else {
+                padding = (int) (Math.pow(10, (org.apache.drill.common.util.DecimalUtility.MAX_DIGITS - power)));
+            }
+
+            int mask = (int) Math.pow(10, power);
+
+            out.setInteger(index, (int) ((value % mask) * padding));
+
+            value = value/mask;
+
+            remainingScale -= power;
+
+            index--;
+        }
+
+        while (value > 0) {
+            out.setInteger(index, (int) (value % org.apache.drill.common.util.DecimalUtility.DIGITS_BASE));
+            value = value/org.apache.drill.common.util.DecimalUtility.DIGITS_BASE;
+            index--;
+        }
+    }
+}
+
+<#-- Template code for converting from Decimal9 to Decimal18 -->
+<#elseif type.major == "DecimalSimpleDecimalSimple">
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.DECIMAL_CAST, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc {
+
+    @Param ${type.from}Holder in;
+    @Param BigIntHolder precision;
+    @Param BigIntHolder scale;
+    @Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+    }
+
+    public void eval() {
+        out.scale = (int) scale.value;
+        out.precision = (int) precision.value;
+        out.value = in.value;
+
+        // Truncate or pad additional zeroes if the output scale is different from input scale
+        out.value = (${type.javatype}) (out.value * ((int) Math.pow(10, (out.scale - in.scale))));
+    }
+}
+</#if>
+</#list>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/Decimal/CastVarCharDecimal.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/Decimal/CastVarCharDecimal.java b/exec/java-exec/src/main/codegen/templates/Decimal/CastVarCharDecimal.java
new file mode 100644
index 0000000..b0214f4
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/Decimal/CastVarCharDecimal.java
@@ -0,0 +1,331 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+<@pp.dropOutputFile />
+
+<#list cast.types as type>
+
+<#if type.major == "VarCharDecimalSimple">  <#-- Cast function template for conversion from VarChar to Decimal9, Decimal18 -->
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.common.util.DecimalUtility;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.DECIMAL_CAST, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc {
+
+    @Param ${type.from}Holder in;
+    @Param BigIntHolder precision;
+    @Param BigIntHolder scale;
+    @Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+    }
+
+    public void eval() {
+
+        // Assign the scale and precision
+        out.scale = (int) scale.value;
+        out.precision = (int) precision.value;
+
+        int readIndex = in.start;
+        int endIndex  = in.end;
+
+        // Check if its an empty string
+        if (endIndex - readIndex == 0) {
+            throw new org.apache.drill.common.exceptions.DrillRuntimeException("Empty String, cannot cast to Decimal");
+        }
+        // Starting position of fractional part
+        int scaleIndex = -1;
+        // true if we have a negative sign at the beginning
+        boolean negative = false;
+
+        // Check the first byte for '-'
+        byte next = (in.buffer.getByte(readIndex));
+
+        // If its a negative number
+        if (next == '-') {
+            negative = true;
+            readIndex++;
+        }
+
+
+        /* Below two fields are used to compute if the precision is sufficient to store
+         * the scale along with the integer digits in the string
+         */
+        int integerStartIndex = readIndex;
+        int integerEndIndex = endIndex;
+
+        int radix = 10;
+
+        // Start parsing the digits
+        while (readIndex < endIndex) {
+            next = in.buffer.getByte(readIndex++);
+
+            if (next == '.') {
+                scaleIndex = readIndex;
+                // Integer end index is just before the scale part begins
+                integerEndIndex = scaleIndex - 1;
+                // If the number of fractional digits is > scale specified we might have to truncate
+                endIndex = (scaleIndex + out.scale) < endIndex ? (scaleIndex + out.scale) : endIndex;
+
+                continue;
+            } else {
+                // If its not a '.' we expect only numbers
+                next = (byte) Character.digit(next, radix);
+            }
+
+            if (next == -1) {
+                // not a valid digit
+                byte[] buf = new byte[in.end - in.start];
+                in.buffer.getBytes(in.start, buf, 0, in.end - in.start);
+                throw new org.apache.drill.common.exceptions.DrillRuntimeException(new String(buf));
+            }
+            out.value *= radix;
+            out.value += next;
+        }
+
+        // Check if the provided precision is enough to store the given input
+        if (((integerEndIndex - integerStartIndex) + out.scale) > out.precision) {
+            byte[] buf = new byte[in.end - in.start];
+            in.buffer.getBytes(in.start, buf, 0, in.end - in.start);
+            throw new org.apache.drill.common.exceptions.DrillRuntimeException("Precision is insufficient for the provided input: " + new String(buf) + " Precision: " + out.precision +
+                                                                               " Total Digits: " + (out.scale + (integerEndIndex - integerStartIndex)));
+        }
+
+        // Number of fractional digits in the input
+        int fractionalDigits = (scaleIndex == -1) ? 0 : ((endIndex - scaleIndex));
+
+        // Pad the number with zeroes if number of fractional digits is less than scale
+        if (fractionalDigits < scale.value) {
+            out.value *= Math.pow(10, scale.value - fractionalDigits);
+        }
+
+        // Negate the number if we saw a -ve sign
+        if (negative == true) {
+            out.value *= -1;
+        }
+    }
+}
+
+<#elseif type.major == "VarCharDecimalComplex">  <#-- Cast function template for conversion from VarChar to Decimal9, Decimal18 -->
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/gcast/Cast${type.from}${type.to}.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl.gcast;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.common.util.DecimalUtility;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.DECIMAL_CAST, nulls=NullHandling.NULL_IF_NULL)
+public class Cast${type.from}${type.to} implements DrillSimpleFunc {
+
+    @Param ${type.from}Holder in;
+    @Workspace ByteBuf buffer;
+    @Param BigIntHolder precision;
+    @Param BigIntHolder scale;
+    @Output ${type.to}Holder out;
+
+    public void setup(RecordBatch incoming) {
+        int size = ${type.arraySize} * (org.apache.drill.common.util.DecimalUtility.integerSize);
+        buffer = io.netty.buffer.Unpooled.wrappedBuffer(new byte[size]);
+        buffer = new io.netty.buffer.SwappedByteBuf(buffer);
+    }
+
+    public void eval() {
+
+        out.buffer = buffer;
+        out.start  = 0;
+
+        out.scale = (int) scale.value;
+        out.precision = (int) precision.value;
+
+        // Initialize the output buffer
+        for (int i = 0; i < ${type.arraySize}; i++) {
+            out.setInteger(i, 0);
+        }
+
+        int startIndex;
+        int readIndex = in.start;
+        int integerDigits = 0;
+        int fractionalDigits = 0;
+        int scaleIndex = -1;
+        int scaleEndIndex = in.end;
+
+        byte[] buf1 = new byte[in.end - in.start];
+        in.buffer.getBytes(in.start, buf1, 0, in.end - in.start);
+
+        Byte next = in.buffer.getByte(readIndex);
+
+
+        if (next == '-') {
+            readIndex++;
+            out.sign = true;
+        }
+
+        if (next == '.') {
+            readIndex++;
+            scaleIndex = readIndex; // Fractional part starts at the first position
+        }
+
+        if (in.end - readIndex == 0) {
+            throw new org.apache.drill.common.exceptions.DrillRuntimeException("Empty String, cannot cast to Decimal");
+        }
+
+        // Store start index for the second pass
+        startIndex = readIndex;
+
+        int radix = 10;
+
+        /* This is the first pass, we get the number of integer digits and based on the provided scale
+         * we compute which index into the ByteBuf we start storing the integer part of the Decimal
+         */
+        if (scaleIndex == -1) {
+
+            while (readIndex < in.end) {
+                next = in.buffer.getByte(readIndex++);
+
+                if (next == '.') {
+
+                    // We have found the decimal point. we can compute the starting index into the Decimal's bytebuf
+                    scaleIndex = readIndex;
+                    // We may have to truncate fractional part if > scale
+                    scaleEndIndex = ((in.end - scaleIndex) <= out.scale) ? in.end : (scaleIndex + out.scale);
+                    break;
+                }
+
+                // If its not a '.' we expect only numbers
+                next = (byte) Character.digit(next, radix);
+
+                if (next == -1) {
+                    // not a valid digit
+                    byte[] buf = new byte[in.end - in.start];
+                    in.buffer.getBytes(in.start, buf, 0, in.end - in.start);
+                    throw new NumberFormatException(new String(buf));
+                }
+
+                integerDigits++;
+            }
+        }
+
+        /* Based on the number of integer digits computed and the scale throw an
+         * exception if the provided precision is not sufficient to store the value
+         */
+        if (integerDigits + out.scale > out.precision) {
+            byte[] buf = new byte[in.end - in.start];
+            in.buffer.getBytes(in.start, buf, 0, in.end - in.start);
+            throw new org.apache.drill.common.exceptions.DrillRuntimeException("Precision is insufficient for the provided input: " + new String(buf) + " Precision: " + out.precision + " Total Digits: " + (out.scale + integerDigits));
+        }
+
+
+        // Compute the number of slots needed in the ByteBuf to store the integer and fractional part
+        int scaleRoundedUp   = org.apache.drill.common.util.DecimalUtility.roundUp(out.scale);
+        int integerRoundedUp = org.apache.drill.common.util.DecimalUtility.roundUp(integerDigits);
+
+        int ndigits = 0;
+
+        int decimalBufferIndex = ${type.arraySize} - scaleRoundedUp - 1;
+
+        /* Compute the end index of the integer part.
+         * If we haven't seen a '.' then entire string is integer.
+         * If we have seen a '.' it ends before the '.'
+         */
+        int integerEndIndex = (scaleIndex == -1) ? (in.end - 1) : (scaleIndex - 2);
+
+        // Traverse and extract the integer part
+        while (integerEndIndex >= startIndex) {
+            next = in.buffer.getByte(integerEndIndex--);
+
+            next = (byte) Character.digit(next, radix);
+
+            int value = (((int) Math.pow(10, ndigits)) * next) + (out.getInteger(decimalBufferIndex));
+            out.setInteger(decimalBufferIndex, value);
+
+            ndigits++;
+
+            /* We store the entire decimal as base 1 billion values, which has maximum of 9 digits (MAX_DIGITS)
+             * Once we have stored MAX_DIGITS in a given slot move to the next slot.
+             */
+            if (ndigits >= org.apache.drill.common.util.DecimalUtility.MAX_DIGITS) {
+                ndigits = 0;
+                decimalBufferIndex--;
+            }
+        }
+
+        // Traverse and extract the fractional part
+        decimalBufferIndex = ${type.arraySize} - scaleRoundedUp;
+        ndigits = 0;
+
+        if (scaleIndex != -1 && out.scale > 0) {
+            while (scaleIndex < scaleEndIndex) {
+
+                // check if we have scanned MAX_DIGITS and we need to move to the next index
+                if (ndigits >= org.apache.drill.common.util.DecimalUtility.MAX_DIGITS) {
+                    ndigits = 0;
+                    decimalBufferIndex++;
+                }
+
+                next = in.buffer.getByte(scaleIndex++);
+
+                // We expect only numbers beyond this
+                next = (byte) Character.digit(next, radix);
+
+                if (next == -1) {
+                    // not a valid digit
+                    byte[] buf = new byte[in.end - in.start];
+                    in.buffer.getBytes(in.start, buf, 0, in.end - in.start);
+                    throw new NumberFormatException(new String(buf));
+                }
+                int value = (out.getInteger(decimalBufferIndex) * radix) + next;
+                out.setInteger(decimalBufferIndex, value);
+
+                // added another digit to the current index
+                ndigits++;
+            }
+            // Pad zeroes in the fractional part so that number of digits = MAX_DIGITS
+            int padding = (int) Math.pow(10, org.apache.drill.common.util.DecimalUtility.MAX_DIGITS - ndigits);
+            out.setInteger(decimalBufferIndex, out.getInteger(decimalBufferIndex) * padding);
+        }
+    }
+}
+</#if> <#-- type.major -->
+</#list>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/Decimal/DecimalFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/Decimal/DecimalFunctions.java b/exec/java-exec/src/main/codegen/templates/Decimal/DecimalFunctions.java
new file mode 100644
index 0000000..4603853
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/Decimal/DecimalFunctions.java
@@ -0,0 +1,1078 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.drill.exec.expr.annotations.Workspace;
+
+<@pp.dropOutputFile />
+
+<#macro denseCompareBlock left right output>
+
+            int invert = 1;
+
+            outside: {
+
+                /* If signs are different then simply look at the
+                 * sign of the two inputs and determine which is greater
+                 */
+                if (left.sign != right.sign) {
+
+                    ${output} = (left.sign == true) ? -1 : 1;
+                    break outside;
+                } else if(left.sign == true) {
+                    /* Both inputs are negative, at the end we will
+                     * have to invert the comparison
+                     */
+                    invert = -1;
+                }
+
+                ${output} = 0;
+                for (int i = 0; i < left.WIDTH; i++) {
+                    byte leftByte  = left.buffer.getByte(left.start + i);
+                    byte rightByte = right.buffer.getByte(right.start + i);
+
+                    // Unsigned byte comparison
+                    if ((leftByte & 0xFF) > (rightByte & 0xFF)) {
+                        ${output} = 1;
+                        break;
+                    } else if ((leftByte & 0xFF) < (rightByte & 0xFF)) {
+                        ${output} = -1;
+                        break;
+                    }
+                }
+                ${output} *= invert; // invert the comparison if both were negative values
+            }
+</#macro>
+
+<#macro compareBlock holderType left right absCompare output>
+
+        outside:{
+
+            <#if absCompare == "false">
+            if (left.sign != right.sign) {
+                /* signs are different, we can simply look at the sign
+                 * and determine which decimal is greater
+                 */
+                ${output} = left.sign == true ? -1 : 1;
+                break outside;
+            } else if (left.sign == true) {
+                /* Because both decimals are negative, we swap them
+                 * and go ahead with the regular comparison
+                 */
+                left.swap(right);
+            }
+            </#if>
+            /* compute the number of integer digits in each decimal */
+            int leftInt  = left.precision - left.scale;
+            int rightInt = right.precision - right.scale;
+
+            /* compute the number of indexes required for storing integer digits */
+            int leftIntRoundedUp = org.apache.drill.common.util.DecimalUtility.roundUp(leftInt);
+            int rightIntRoundedUp = org.apache.drill.common.util.DecimalUtility.roundUp(rightInt);
+
+            /* compute number of indexes required for storing scale */
+            int leftScaleRoundedUp = org.apache.drill.common.util.DecimalUtility.roundUp(left.scale);
+            int rightScaleRoundedUp = org.apache.drill.common.util.DecimalUtility.roundUp(right.scale);
+
+            /* compute index of the most significant integer digits */
+            int leftIndex1 = left.nDecimalDigits - leftScaleRoundedUp - leftIntRoundedUp;
+            int rightIndex1 = right.nDecimalDigits - rightScaleRoundedUp - rightIntRoundedUp;
+
+            int leftStopIndex = left.nDecimalDigits - leftScaleRoundedUp;
+            int rightStopIndex = right.nDecimalDigits - rightScaleRoundedUp;
+
+            /* Discard the zeroes in the integer part */
+            while (leftIndex1 < leftStopIndex) {
+                if (left.getInteger(leftIndex1) != 0) {
+                     break;
+                }
+
+                /* Digit in this location is zero, decrement the actual number
+                 * of integer digits
+                 */
+                leftIntRoundedUp--;
+                leftIndex1++;
+            }
+
+            /* If we reached the stop index then the number of integers is zero */
+            if (leftIndex1 == leftStopIndex) {
+                leftIntRoundedUp = 0;
+            }
+
+            while (rightIndex1 < rightStopIndex) {
+                if (right.getInteger(rightIndex1) != 0) {
+                    break;
+                }
+
+                /* Digit in this location is zero, decrement the actual number
+                 * of integer digits
+                 */
+                rightIntRoundedUp--;
+                rightIndex1++;
+            }
+
+            if (rightIndex1 == rightStopIndex) {
+                rightIntRoundedUp = 0;
+            }
+
+            /* We have the accurate number of non-zero integer digits,
+             * if the number of integer digits are different then we can determine
+             * which decimal is larger and needn't go down to comparing individual values
+             */
+            if (leftIntRoundedUp > rightIntRoundedUp) {
+                ${output }= 1;
+                break outside;
+            }
+            else if (rightIntRoundedUp > leftIntRoundedUp) {
+                ${output} =  -1;
+                break outside;
+            }
+
+            /* The number of integer digits are the same, set the each index
+             * to the first non-zero integer and compare each digit
+             */
+            leftIndex1 = left.nDecimalDigits - leftScaleRoundedUp - leftIntRoundedUp;
+            rightIndex1 = right.nDecimalDigits - rightScaleRoundedUp - rightIntRoundedUp;
+
+            while (leftIndex1 < leftStopIndex && rightIndex1 < rightStopIndex) {
+                if (left.getInteger(leftIndex1) > right.getInteger(rightIndex1)) {
+                    ${output} = 1;
+                    break outside;
+                }
+                else if (right.getInteger(rightIndex1) > left.getInteger(leftIndex1)) {
+                    ${output} =  -1;
+                    break outside;
+                }
+
+                leftIndex1++;
+                rightIndex1++;
+            }
+
+            /* The integer part of both the decimal's are equal, now compare
+             * each individual fractional part. Set the index to be at the
+             * beginning of the fractional part
+             */
+            leftIndex1 = leftStopIndex;
+            rightIndex1 = rightStopIndex;
+
+            /* Stop indexes will be the end of the array */
+            leftStopIndex = left.nDecimalDigits;
+            rightStopIndex = right.nDecimalDigits;
+
+            /* compare the two fractional parts of the decimal */
+            while (leftIndex1 < leftStopIndex && rightIndex1 < rightStopIndex) {
+                if (left.getInteger(leftIndex1) > right.getInteger(rightIndex1)) {
+                    ${output} = 1;
+                    break outside;
+                }
+                else if (right.getInteger(rightIndex1) > left.getInteger(leftIndex1)) {
+                    ${output} = -1;
+                    break outside;
+                }
+
+                leftIndex1++;
+                rightIndex1++;
+            }
+
+            /* Till now the fractional part of the decimals are equal, check
+             * if one of the decimal has fractional part that is remaining
+             * and is non-zero
+             */
+            while (leftIndex1 < leftStopIndex) {
+                if (left.getInteger(leftIndex1) != 0) {
+                    ${output} = 1;
+                    break outside;
+                }
+                leftIndex1++;
+            }
+
+            while(rightIndex1 < rightStopIndex) {
+                if (right.getInteger(rightIndex1) != 0) {
+                    ${output} = -1;
+                    break outside;
+                }
+                rightIndex1++;
+            }
+
+            /* Both decimal values are equal */
+            ${output} = 0;
+
+        }
+</#macro>
+
+<#macro subtractBlock holderType left right result>
+
+            /* compute the result's size, integer part and fractional part */
+            result.scale   = Math.max(left.scale, right.scale);
+            result.precision = result.maxPrecision;
+
+
+            int resultScaleRoundedUp = org.apache.drill.common.util.DecimalUtility.roundUp(result.scale);
+            int resultIndex = result.nDecimalDigits- 1;
+
+            int leftScaleRoundedUp  = org.apache.drill.common.util.DecimalUtility.roundUp(left.scale);
+            int leftIntRoundedUp    = org.apache.drill.common.util.DecimalUtility.roundUp(left.precision - left.scale);
+            int rightScaleRoundedUp = org.apache.drill.common.util.DecimalUtility.roundUp(right.scale);
+
+            int leftIndex  = left.nDecimalDigits - 1;
+            int rightIndex = right.nDecimalDigits - 1;
+
+            /* If the left scale is bigger, simply copy over the digits into result */
+            while (leftScaleRoundedUp > rightScaleRoundedUp) {
+                result.setInteger(resultIndex, left.getInteger(leftIndex));
+                leftIndex--;
+                resultIndex--;
+                leftScaleRoundedUp--;
+            }
+
+            /* If the right scale is bigger, subtract with zero at each array location */
+            int carry = 0;
+            while(rightScaleRoundedUp > leftScaleRoundedUp) {
+
+                int difference = 0 - right.getInteger(rightIndex) - carry;
+                rightIndex--;
+
+                if (difference < 0) {
+                    carry = 1;
+                    result.setInteger(resultIndex, (difference + org.apache.drill.common.util.DecimalUtility.DIGITS_BASE));
+                } else {
+                    result.setInteger(resultIndex, difference);
+                    carry = 0;
+                }
+                resultIndex--;
+                rightScaleRoundedUp--;
+
+            }
+
+            /* Now both the scales are equal perform subtraction use one of the scales
+             * for terminal condition in the while loop
+             */
+            while (leftScaleRoundedUp > 0) {
+
+                int difference = left.getInteger(leftIndex) - right.getInteger(rightIndex) - carry;
+                leftIndex--;
+                rightIndex--;
+
+                if (difference < 0) {
+                    carry = 1;
+                    result.setInteger(resultIndex, (difference + org.apache.drill.common.util.DecimalUtility.DIGITS_BASE));
+                } else {
+                    result.setInteger(resultIndex, difference);
+                    carry = 0;
+                }
+                resultIndex--;
+                leftScaleRoundedUp--;
+            }
+
+            /* Since we are gurranteed to have the left input >= right input, iterate
+             * over the remaining left input's integers
+             */
+            while(leftIntRoundedUp > 0) {
+
+                int difference = left.getInteger(leftIndex);
+                leftIndex--;
+
+                if (rightIndex >= 0) {
+                    difference -= right.getInteger(rightIndex);
+                    rightIndex--;
+                }
+
+                difference -= carry;
+
+                if (difference < 0) {
+                    carry = 1;
+                    result.setInteger(resultIndex, (difference + org.apache.drill.common.util.DecimalUtility.DIGITS_BASE));
+                } else {
+                    carry = 0;
+                    result.setInteger(resultIndex, difference);
+                }
+                resultIndex--;
+                leftIntRoundedUp--;
+            }
+
+</#macro>
+
+<#macro addBlock holderType left right result>
+
+        /* compute the result scale */
+        result.scale = Math.max(left.scale, right.scale);
+        result.precision = result.maxPrecision;
+
+        int resultScaleRoundedUp = org.apache.drill.common.util.DecimalUtility.roundUp(result.scale);
+
+        int leftScaleRoundedUp  = org.apache.drill.common.util.DecimalUtility.roundUp(left.scale);
+        int rightScaleRoundedUp = org.apache.drill.common.util.DecimalUtility.roundUp(right.scale);
+
+        /* starting index for each decimal */
+        int leftIndex  = left.nDecimalDigits - 1;
+        int rightIndex = right.nDecimalDigits - 1;
+        int resultIndex = result.nDecimalDigits - 1;
+
+        /* If one of the scale is larger then simply copy it over
+         * to the result digits
+         */
+        while (leftScaleRoundedUp > rightScaleRoundedUp) {
+
+            result.setInteger(resultIndex, left.getInteger(leftIndex));
+            leftIndex--;
+            resultIndex--;
+            leftScaleRoundedUp--;
+            resultScaleRoundedUp--;
+        }
+
+        while (rightScaleRoundedUp > leftScaleRoundedUp) {
+            result.setInteger((resultIndex), right.getInteger(rightIndex));
+            rightIndex--;
+            resultIndex--;
+            rightScaleRoundedUp--;
+            resultScaleRoundedUp--;
+        }
+
+        int sum = 0;
+
+        /* now the two scales are at the same level, we can add them */
+        while (resultScaleRoundedUp > 0) {
+
+            sum += left.getInteger(leftIndex) + right.getInteger(rightIndex);
+            leftIndex--;
+            rightIndex--;
+
+            if (sum >= org.apache.drill.common.util.DecimalUtility.DIGITS_BASE) {
+                result.setInteger(resultIndex, (sum - org.apache.drill.common.util.DecimalUtility.DIGITS_BASE));
+                sum = 1;
+            } else {
+                result.setInteger(resultIndex, sum);
+                sum = 0;
+            }
+            resultIndex--;
+            resultScaleRoundedUp--;
+        }
+
+        /* add the integer part */
+        while (leftIndex >= 0 && rightIndex >= 0) {
+
+            sum += left.getInteger(leftIndex) + right.getInteger(rightIndex);
+            leftIndex--;
+            rightIndex--;
+
+            if (sum > org.apache.drill.common.util.DecimalUtility.DIGITS_BASE) {
+                result.setInteger(resultIndex, (sum - org.apache.drill.common.util.DecimalUtility.DIGITS_BASE));
+                sum = 1;
+            } else {
+                result.setInteger(resultIndex, sum);
+                sum = 0;
+            }
+            resultIndex--;
+        }
+
+        while (resultIndex >= 0 && leftIndex >= 0) {
+            sum += left.getInteger(leftIndex);
+            leftIndex--;
+
+            if (sum > org.apache.drill.common.util.DecimalUtility.DIGITS_BASE) {
+                result.setInteger(resultIndex, (sum - org.apache.drill.common.util.DecimalUtility.DIGITS_BASE));
+                sum = 1;
+            } else {
+                result.setInteger(resultIndex, sum);
+                sum = 0;
+            }
+        }
+
+        while (resultIndex >= 0 && rightIndex >= 0) {
+            sum += right.getInteger(rightIndex);
+            rightIndex--;
+
+            if (sum > org.apache.drill.common.util.DecimalUtility.DIGITS_BASE) {
+                result.setInteger(resultIndex, (sum - org.apache.drill.common.util.DecimalUtility.DIGITS_BASE));
+                sum = 1;
+            } else {
+                result.setInteger(resultIndex, sum);
+                sum = 0;
+            }
+        }
+
+        /* store the last carry */
+        if (sum > 0)
+        result.setInteger(resultIndex, sum);
+
+</#macro>
+
+
+<#macro adjustScale holderType javaType left right>
+
+            // Adjust the scale of the two inputs to be the same
+
+            int adjustment = 0;
+
+            if (left.scale < right.scale) {
+                left.value = (${javaType}) (left.value * Math.pow(10, (right.scale - left.scale)));
+            } else if (right.scale < left.scale) {
+                right.value = (${javaType}) (right.value * Math.pow(10, (left.scale - right.scale)));
+            }
+</#macro>
+
+<#list decimal.decimalTypes as type>
+
+<#if type.name.endsWith("Sparse")>
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/${type.name}Functions.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+public class ${type.name}Functions {
+
+    @FunctionTemplate(name = "subtract", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}SubtractFunction implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Workspace ByteBuf buffer;
+        @Output ${type.name}Holder result;
+
+        public void setup(RecordBatch incoming) {
+            int size = (${type.storage} * (org.apache.drill.common.util.DecimalUtility.integerSize));
+            buffer = io.netty.buffer.Unpooled.wrappedBuffer(new byte[size]);
+            buffer = new io.netty.buffer.SwappedByteBuf(buffer);
+        }
+
+        public void eval() {
+
+            result.buffer = buffer;
+            result.start = 0;
+
+            // Re initialize the buffer everytime
+            for (int i = 0; i < ${type.storage}; i++) {
+                result.setInteger(i, 0);
+            }
+
+            /* If the sign of the two inputs is different, then the subtract
+             * causes the sign of one of the inputs to change and hence it effectively
+             * becomes addition
+             */
+            if (left.sign != right.sign) {
+                <@addBlock holderType=type.name left="left" right="right" result="result"/>
+                result.sign = left.sign;
+            } else {
+                /* Sign of the inputs are the same, meaning we have to perform subtraction
+                 * For subtraction we need left input to be greater than right input
+                 * Compare the two inputs, swap if necessary
+                 */
+                int cmp;
+                <@compareBlock holderType=type.name left="left" right="right" absCompare="true" output="cmp"/>
+
+                if (cmp == -1) {
+                    left.swap(right);
+                }
+
+                //Determine the sign of the result
+                if ((left.sign == false && cmp == -1) || (left.sign == true && cmp == 1)) {
+                    result.sign = true;
+                } else {
+                    result.sign = false;
+                }
+
+                // Perform the subtraction
+                <@subtractBlock holderType=type.name left="left" right="right" result="result"/>
+
+            }
+
+        }
+    }
+
+    @FunctionTemplate(name = "add", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}AddFunction implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Workspace ByteBuf buffer;
+        @Output ${type.name}Holder result;
+
+        public void setup(RecordBatch incoming) {
+            int size = (${type.storage} * (org.apache.drill.common.util.DecimalUtility.integerSize));
+            buffer = io.netty.buffer.Unpooled.wrappedBuffer(new byte[size]);
+            buffer = new io.netty.buffer.SwappedByteBuf(buffer);
+        }
+
+        public void eval() {
+
+            result.buffer = buffer;
+            result.start = 0;
+
+            // Re initialize the buffer everytime
+            for (int i = 0; i < ${type.storage}; i++) {
+                result.setInteger(i, 0);
+            }
+
+            /* If sign is different use the subtraction logic */
+            if (left.sign != right.sign) {
+
+                /* Subtract logic assumes, left input is greater than right input
+                 * swap if necessary
+                 */
+                int cmp;
+                <@compareBlock holderType=type.name left="left" right="right" absCompare="true" output="cmp"/>
+
+                if (cmp == -1) {
+                    left.swap(right);
+                }
+                /* Perform the subtraction */
+                <@subtractBlock holderType=type.name left="left" right="right" result="result"/>
+            } else {
+                /* Sign of the two input decimals is the same, use the add logic */
+                <@addBlock holderType=type.name left="left" right="right" result="result"/>
+            }
+
+            /* Assign the result to be the sign of the left input
+             * If the two input signs are the same, we can choose either to be the resulting sign
+             * If the two input signs are different, we assign left input to be the greater absolute value
+             * hence result will have the same sign as left
+             */
+            result.sign = left.sign;
+        }
+    }
+
+    @FunctionTemplate(name = "multiply", scope = FunctionTemplate.FunctionScope.DECIMAL_SUM_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}MultiplyFunction implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Workspace ByteBuf buffer;
+        @Output ${type.name}Holder result;
+
+        public void setup(RecordBatch incoming) {
+            int size = (${type.storage} * (org.apache.drill.common.util.DecimalUtility.integerSize));
+            buffer = io.netty.buffer.Unpooled.wrappedBuffer(new byte[size]);
+            buffer = new io.netty.buffer.SwappedByteBuf(buffer);
+        }
+
+        public void eval() {
+
+            result.buffer = buffer;
+            result.start = 0;
+
+            // Re initialize the buffer everytime
+            for (int i = 0; i < ${type.storage}; i++) {
+                result.setInteger(i, 0);
+            }
+
+            /* Remove the leading zeroes from the integer part of the input */
+            int leftIndex = 0;
+            int leftStopIndex = left.nDecimalDigits - org.apache.drill.common.util.DecimalUtility.roundUp(left.scale);
+
+            while (leftIndex < leftStopIndex) {
+                if (left.getInteger(leftIndex) > 0)
+                    break;
+                leftIndex++;
+            }
+
+            int leftIntegerSize = leftStopIndex - leftIndex;
+
+            /* Remove the leaing zeroes from the integer part of the input */
+            int rightIndex = 0;
+            int rightStopIndex = right.nDecimalDigits - org.apache.drill.common.util.DecimalUtility.roundUp(right.scale);
+
+            while(rightIndex < rightStopIndex) {
+                if (right.getInteger(rightIndex) > 0)
+                    break;
+                rightIndex++;
+            }
+
+            int rightIntegerSize = rightStopIndex - rightIndex;
+
+            int resultIntegerSize = leftIntegerSize + rightIntegerSize;
+            int resultScaleSize = org.apache.drill.common.util.DecimalUtility.roundUp(left.scale + right.scale);
+
+            if ((resultIntegerSize + resultScaleSize) > result.nDecimalDigits) {
+                throw new org.apache.drill.common.exceptions.DrillRuntimeException("Cannot fit multiplication result in the given Decimal type");
+            }
+
+            int leftSize  = left.nDecimalDigits - 1;
+            int rightSize = right.nDecimalDigits - 1;
+
+            //int resultIndex = resultSize - 1;
+            int resultIndex = result.nDecimalDigits - 1;
+
+            for (int i = leftSize; i >= leftIndex; i--) {
+
+                int currentIndex = resultIndex;
+                int carry = 0;
+
+                for (int j = rightSize; j >= rightIndex; j--) {
+
+                    long mulResult = (long) right.getInteger(j) * (long) left.getInteger(i);
+
+                    //long tempSum = mulResultDigits[currentIndex] + mulResult + carry;
+                    long tempSum = result.getInteger(currentIndex) + mulResult + carry;
+
+                    if (tempSum >= org.apache.drill.common.util.DecimalUtility.DIGITS_BASE) {
+                        result.setInteger(currentIndex, (int) (tempSum % org.apache.drill.common.util.DecimalUtility.DIGITS_BASE));
+                        carry = (int) (tempSum / org.apache.drill.common.util.DecimalUtility.DIGITS_BASE);
+                    } else {
+                        result.setInteger(currentIndex, (int) tempSum);
+                        carry = 0;
+                    }
+
+                    currentIndex--;
+                }
+                /* propogate the carry */
+                if (carry > 0)
+                    result.setInteger(currentIndex,  (result.getInteger(currentIndex) + carry));
+
+                resultIndex--;
+            }
+
+
+            // Set the scale and precision
+            result.scale = left.scale + right.scale;
+            result.precision = result.maxPrecision;
+
+            result.sign = (left.sign == right.sign) ? false : true;
+        }
+    }
+
+
+    @FunctionTemplate(name = "compare_to", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}CompareTo implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output IntHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+             <@compareBlock holderType=type.name left="left" right="right" absCompare="false" output="out.value"/>
+        }
+    }
+
+
+    @FunctionTemplate(name = "less than", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}LessThan implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@compareBlock holderType=type.name left="left" right="right" absCompare="false" output="cmp"/>
+            out.value = cmp == -1 ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "less than or equal to", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}LessThanEq implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@compareBlock holderType=type.name left="left" right="right" absCompare="false" output="cmp"/>
+            out.value = cmp < 1 ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "greater than", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}GreaterThan implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@compareBlock holderType=type.name left="left" right="right" absCompare="false" output="cmp"/>
+            out.value = cmp == 1 ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "greater than or equal to", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}GreaterThanEq implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@compareBlock holderType=type.name left="left" right="right" absCompare="false" output="cmp"/>
+            out.value = cmp > -1 ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "Equal", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}Equal implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@compareBlock holderType=type.name left="left" right="right" absCompare="false" output="cmp"/>
+            out.value = cmp == 0 ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "not equal", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}NotEqual implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@compareBlock holderType=type.name left="left" right="right" absCompare="false" output="cmp"/>
+            out.value = cmp != 0 ? 1 : 0;
+        }
+    }
+}
+
+<#elseif type.name.endsWith("Dense")>
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/${type.name}Functions.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+public class ${type.name}Functions {
+
+
+    @FunctionTemplate(name = "compare_to", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}CompareTo implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output IntHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@denseCompareBlock left="left" right="right" output="cmp"/>
+            out.value = cmp;
+        }
+    }
+
+    @FunctionTemplate(name = "less than", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}LessThan implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@denseCompareBlock left="left" right="right" output="cmp"/>
+            out.value = cmp == -1 ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "less than or equal to", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}LessThanEq implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@denseCompareBlock left="left" right="right" output="cmp"/>
+            out.value = cmp < 1 ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "greater than", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}GreaterThan implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@denseCompareBlock left="left" right="right" output="cmp"/>
+            out.value = cmp == 1 ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "greater than or equal to", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}GreaterThanEq implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@denseCompareBlock left="left" right="right" output="cmp"/>
+            out.value = cmp > -1 ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "Equal", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}Equal implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@denseCompareBlock left="left" right="right" output="cmp"/>
+            out.value = cmp == 0 ? 1 : 0;
+        }
+    }
+
+
+    @FunctionTemplate(name = "not equal", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}NotEqual implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+
+            int cmp;
+            <@denseCompareBlock left="left" right="right" output="cmp"/>
+            out.value = cmp != 0 ? 1 : 0;
+        }
+    }
+}
+<#elseif type.name.endsWith("Decimal9") || type.name.endsWith("Decimal18")>
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/${type.name}Functions.java" />
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+import io.netty.buffer.ByteBuf;
+import java.nio.ByteBuffer;
+
+@SuppressWarnings("unused")
+public class ${type.name}Functions {
+
+    @FunctionTemplate(name = "add", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}AddFunction implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output ${type.name}Holder result;
+
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+
+            <@adjustScale holderType=type.name javaType=type.storage left="left" right="right"/>
+
+            result.value = left.value + right.value;
+            result.precision = result.maxPrecision;
+            result.scale = Math.max(left.scale, right.scale);
+        }
+    }
+
+    @FunctionTemplate(name = "subtract", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}SubtractFunction implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output ${type.name}Holder result;
+
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+
+            <@adjustScale holderType=type.name javaType=type.storage left="left" right="right"/>
+
+            result.value = left.value - right.value;
+            result.precision = result.maxPrecision;
+            result.scale = Math.max(left.scale, right.scale);
+        }
+    }
+    @FunctionTemplate(name = "multiply", scope = FunctionTemplate.FunctionScope.DECIMAL_SUM_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}MultiplyFunction implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output ${type.name}Holder result;
+
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+
+            result.value = left.value * right.value;
+            result.precision = result.maxPrecision;
+            result.scale = left.scale + right.scale;
+        }
+    }
+
+
+    @FunctionTemplate(name = "compare_to", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}CompareTo implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output IntHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            <@adjustScale holderType=type.name javaType=type.storage left="left" right="right"/>
+            out.value = (left.value < right.value) ? -1 : (left.value > right.value) ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "less than", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}LessThan implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@adjustScale holderType=type.name javaType=type.storage left="left" right="right"/>
+            out.value = (left.value < right.value) ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "less than or equal to", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}LessThanEq implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@adjustScale holderType=type.name javaType=type.storage left="left" right="right"/>
+            out.value = (left.value <= right.value) ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "greater than", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}GreaterThan implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@adjustScale holderType=type.name javaType=type.storage left="left" right="right"/>
+            out.value = (left.value > right.value) ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "greater than or equal to", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}GreaterThanEq implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@adjustScale holderType=type.name javaType=type.storage left="left" right="right"/>
+            out.value = (left.value >= right.value) ? 1 : 0;
+        }
+    }
+
+    @FunctionTemplate(name = "Equal", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}Equal implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+            int cmp;
+            <@adjustScale holderType=type.name javaType=type.storage left="left" right="right"/>
+            out.value = (left.value == right.value) ? 1 : 0;
+        }
+    }
+
+
+    @FunctionTemplate(name = "not equal", scope = FunctionTemplate.FunctionScope.DECIMAL_MAX_SCALE, nulls = NullHandling.NULL_IF_NULL)
+    public static class ${type.name}NotEqual implements DrillSimpleFunc {
+
+        @Param ${type.name}Holder left;
+        @Param ${type.name}Holder right;
+        @Output BitHolder out;
+        public void setup(RecordBatch incoming) {}
+
+        public void eval() {
+
+            int cmp;
+            <@adjustScale holderType=type.name javaType=type.storage left="left" right="right"/>
+            out.value = (left.value != right.value) ? 1 : 0;
+        }
+    }
+}
+
+</#if>
+</#list>
\ No newline at end of file


[02/10] DRILL-332: Support for decimal data type

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDecimal.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDecimal.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDecimal.java
new file mode 100644
index 0000000..5163a48
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDecimal.java
@@ -0,0 +1,357 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.physical.impl;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.exec.client.DrillClient;
+import org.apache.drill.exec.pop.PopUnitTestBase;
+import org.apache.drill.exec.proto.UserProtos;
+import org.apache.drill.exec.record.RecordBatchLoader;
+import org.apache.drill.exec.record.VectorWrapper;
+import org.apache.drill.exec.rpc.user.QueryResultBatch;
+import org.apache.drill.exec.server.Drillbit;
+import org.apache.drill.exec.server.RemoteServiceSet;
+import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.VarCharVector;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import com.google.common.base.Charsets;
+import com.google.common.io.Files;
+
+public class TestDecimal extends PopUnitTestBase{
+    DrillConfig c = DrillConfig.create();
+
+    @Test
+    public void testSimpleDecimal() throws Exception {
+
+        /* Function checks casting from VarChar to Decimal9, Decimal18 and vice versa
+         * Also tests instances where the scale might have to truncated when scale provided < input fraction
+         */
+        try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+             Drillbit bit = new Drillbit(CONFIG, serviceSet);
+             DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
+
+            // run query.
+            bit.run();
+            client.connect();
+            List<QueryResultBatch> results = client.runQuery(UserProtos.QueryType.PHYSICAL,
+                    Files.toString(FileUtils.getResourceAsFile("/decimal/cast_simple_decimal.json"), Charsets.UTF_8)
+                            .replace("#{TEST_FILE}", "/input_simple_decimal.json")
+            );
+
+            RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
+
+            QueryResultBatch batch = results.get(0);
+            assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
+
+            batchLoader.getValueAccessorById(0, VarCharVector.class);
+
+            String decimal9Output[] = {"99.0000", "11.1234", "0.1000", "-0.1200", "-123.1234", "-1.0001"};
+            String decimal18Output[] = {"123456789.000000000", "11.123456789", "0.100000000", "-0.100400000", "-987654321.123456789", "-2.030100000"};
+
+            Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
+
+            // Check the output of decimal9
+            ValueVector.Accessor dec9Accessor = itr.next().getValueVector().getAccessor();
+            ValueVector.Accessor dec18Accessor = itr.next().getValueVector().getAccessor();
+
+
+            for (int i = 0; i < dec9Accessor.getValueCount(); i++) {
+                assertEquals(dec9Accessor.getObject(i), decimal9Output[i]);
+                assertEquals(dec18Accessor.getObject(i), decimal18Output[i]);
+            }
+            assertEquals(6, dec9Accessor.getValueCount());
+            assertEquals(6, dec18Accessor.getValueCount());
+        }
+    }
+
+    @Test
+    public void testCastFromFloat() throws Exception {
+
+        // Function checks for casting from Float, Double to Decimal data types
+        try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+             Drillbit bit = new Drillbit(CONFIG, serviceSet);
+             DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
+
+            // run query.
+            bit.run();
+            client.connect();
+            List<QueryResultBatch> results = client.runQuery(UserProtos.QueryType.PHYSICAL,
+                    Files.toString(FileUtils.getResourceAsFile("/decimal/cast_float_decimal.json"), Charsets.UTF_8)
+                            .replace("#{TEST_FILE}", "/input_simple_decimal.json")
+            );
+
+            RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
+
+            QueryResultBatch batch = results.get(0);
+            assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
+
+            batchLoader.getValueAccessorById(0, VarCharVector.class);
+
+            String decimal9Output[] = {"99.0000", "11.1234", "0.1000", "-0.1200", "-123.1234", "-1.0001"};
+            String decimal38Output[] = {"123456789.0000", "11.1234", "0.1000", "-0.1004", "-987654321.1234", "-2.0301"};
+
+            Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
+
+            // Check the output of decimal9
+            ValueVector.Accessor dec9Accessor = itr.next().getValueVector().getAccessor();
+            ValueVector.Accessor dec38Accessor = itr.next().getValueVector().getAccessor();
+
+
+            for (int i = 0; i < dec9Accessor.getValueCount(); i++) {
+                assertEquals(dec9Accessor.getObject(i).toString(), decimal9Output[i]);
+                assertEquals(dec38Accessor.getObject(i).toString(), decimal38Output[i]);
+            }
+            assertEquals(6, dec9Accessor.getValueCount());
+            assertEquals(6, dec38Accessor.getValueCount());
+        }
+    }
+
+    @Test
+    public void testSimpleDecimalArithmetic() throws Exception {
+
+        // Function checks arithmetic operations on Decimal18
+        try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+             Drillbit bit = new Drillbit(CONFIG, serviceSet);
+             DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
+
+            // run query.
+            bit.run();
+            client.connect();
+            List<QueryResultBatch> results = client.runQuery(UserProtos.QueryType.PHYSICAL,
+                    Files.toString(FileUtils.getResourceAsFile("/decimal/simple_decimal_arithmetic.json"), Charsets.UTF_8)
+                            .replace("#{TEST_FILE}", "/input_simple_decimal.json")
+            );
+
+            RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
+
+            QueryResultBatch batch = results.get(0);
+            assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
+
+            batchLoader.getValueAccessorById(0, VarCharVector.class);
+
+            String addOutput[] = {"123456888.0", "22.2", "0.2", "-0.2", "-987654444.2","-3.0"};
+            String subtractOutput[] = {"123456690.0", "0.0", "0.0", "0.0", "-987654198.0", "-1.0"};
+            String multiplyOutput[] = {"12222222111.00" , "123.21" , "0.01", "0.01",  "121580246927.41", "2.00"};
+
+            Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
+
+            // Check the output of add
+            ValueVector.Accessor addAccessor = itr.next().getValueVector().getAccessor();
+            ValueVector.Accessor subAccessor = itr.next().getValueVector().getAccessor();
+            ValueVector.Accessor mulAccessor = itr.next().getValueVector().getAccessor();
+
+            for (int i = 0; i < addAccessor.getValueCount(); i++) {
+                assertEquals(addAccessor.getObject(i), addOutput[i]);
+                assertEquals(subAccessor.getObject(i), subtractOutput[i]);
+                assertEquals(mulAccessor.getObject(i), multiplyOutput[i]);
+
+            }
+            assertEquals(6, addAccessor.getValueCount());
+            assertEquals(6, subAccessor.getValueCount());
+            assertEquals(6, mulAccessor.getValueCount());
+        }
+    }
+
+    @Test
+    public void testComplexDecimal() throws Exception {
+
+        /* Function checks casting between varchar and decimal38sparse
+         * Also checks arithmetic on decimal38sparse
+         */
+        try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+             Drillbit bit = new Drillbit(CONFIG, serviceSet);
+             DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
+
+            // run query.
+            bit.run();
+            client.connect();
+            List<QueryResultBatch> results = client.runQuery(UserProtos.QueryType.PHYSICAL,
+                    Files.toString(FileUtils.getResourceAsFile("/decimal/test_decimal_complex.json"), Charsets.UTF_8)
+                            .replace("#{TEST_FILE}", "/input_complex_decimal.json")
+            );
+
+            RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
+
+            QueryResultBatch batch = results.get(0);
+            assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
+
+            batchLoader.getValueAccessorById(0, VarCharVector.class);
+
+            String addOutput[] = {"-99999998877.700000000", "11.423456789", "123456789.100000000", "-0.119998000", "100000000112.423456789" , "-99999999879.907000000", "123456789123456801.300000000"};
+            String subtractOutput[] = {"-100000001124.300000000", "10.823456789", "-123456788.900000000", "-0.120002000", "99999999889.823456789", "-100000000122.093000000", "123456789123456776.700000000"};
+            String multiplyOutput[] = {"-112330000001123.300000000000000000", "3.337037036700000000" , "12345678.900000000000000000", "-0.000000240000000000" , "11130000000125.040740615700000000" , "-12109300000121.093000000000000000", "1518518506218518504.700000000000000000" };
+
+            Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
+
+            ValueVector.Accessor addAccessor = itr.next().getValueVector().getAccessor();
+            ValueVector.Accessor subAccessor = itr.next().getValueVector().getAccessor();
+            ValueVector.Accessor mulAccessor = itr.next().getValueVector().getAccessor();
+
+            for (int i = 0; i < addAccessor.getValueCount(); i++) {
+                assertEquals(addAccessor.getObject(i), addOutput[i]);
+                assertEquals(subAccessor.getObject(i), subtractOutput[i]);
+                assertEquals(mulAccessor.getObject(i), multiplyOutput[i]);
+            }
+            assertEquals(7, addAccessor.getValueCount());
+            assertEquals(7, subAccessor.getValueCount());
+            assertEquals(7, mulAccessor.getValueCount());
+        }
+    }
+
+    @Test
+    public void testComplexDecimalSort() throws Exception {
+
+        // Function checks if sort output on complex decimal type works
+        try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+             Drillbit bit = new Drillbit(CONFIG, serviceSet);
+             DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
+
+            // run query.
+            bit.run();
+            client.connect();
+            List<QueryResultBatch> results = client.runQuery(UserProtos.QueryType.PHYSICAL,
+                    Files.toString(FileUtils.getResourceAsFile("/decimal/test_decimal_sort_complex.json"), Charsets.UTF_8)
+                            .replace("#{TEST_FILE}", "/input_sort_complex_decimal.json")
+            );
+
+            RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
+
+            QueryResultBatch batch = results.get(0);
+            assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
+
+            batchLoader.getValueAccessorById(0, VarCharVector.class);
+
+            String sortOutput[] = {"-100000000001.000000000000",
+                                   "-100000000001.000000000000",
+                                   "-145456789.120123000000",
+                                   "-0.120000000000",
+                                   "0.100000000001",
+                                   "11.123456789012",
+                                   "1278789.100000000000",
+                                   "145456789.120123000000",
+                                   "100000000001.123456789001",
+                                   "123456789123456789.000000000000"};
+
+            Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
+
+            // Check the output of sort
+            VectorWrapper<?> v = itr.next();
+            ValueVector.Accessor accessor = v.getValueVector().getAccessor();
+
+            for (int i = 0; i < accessor.getValueCount(); i++) {
+                assertEquals(accessor.getObject(i), sortOutput[i]);
+            }
+            assertEquals(10, accessor.getValueCount());
+        }
+    }
+
+    @Test
+    public void testDenseSparseConversion() throws Exception {
+
+        /* Function checks the following workflow
+         * VarChar -> Sparse -> Dense -> Sort(Dense) -> Sparse -> VarChar
+         */
+        try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+             Drillbit bit = new Drillbit(CONFIG, serviceSet);
+             DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
+
+            // run query.
+            bit.run();
+            client.connect();
+            List<QueryResultBatch> results = client.runQuery(UserProtos.QueryType.PHYSICAL,
+                    Files.toString(FileUtils.getResourceAsFile("/decimal/test_decimal_dense_sparse.json"), Charsets.UTF_8)
+                            .replace("#{TEST_FILE}", "/input_complex_decimal.json")
+            );
+
+            RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
+
+            QueryResultBatch batch = results.get(0);
+            assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
+
+            batchLoader.getValueAccessorById(0, VarCharVector.class);
+
+            String sortOutput[] = {"-100000000001.000000000000", "-100000000001.000000000000", "-0.120000000000", "0.100000000001",  "11.123456789012", "100000000001.123456789001", "123456789123456789.000000000000"};
+
+            Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
+
+            // Check the output of sort
+            VectorWrapper<?> v = itr.next();
+            ValueVector.Accessor accessor = v.getValueVector().getAccessor();
+
+            for (int i = 0; i < accessor.getValueCount(); i++) {
+                assertEquals(accessor.getObject(i), sortOutput[i]);
+            }
+            assertEquals(7, accessor.getValueCount());
+        }
+    }
+
+    @Test
+    public void testDenseSparseConversion1() throws Exception {
+
+        /* Function checks the following cast sequence.
+         * VarChar          -> Decimal28Sparse
+         * Decimal28Sparse  -> Decimal28Dense
+         * Decimal28Dense   -> Decimal38Dense
+         *
+         * Goal is to test the similar casting functionality 28Dense -> 38Dense
+         *
+         */
+        try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+             Drillbit bit = new Drillbit(CONFIG, serviceSet);
+             DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
+
+            // run query.
+            bit.run();
+            client.connect();
+            List<QueryResultBatch> results = client.runQuery(UserProtos.QueryType.PHYSICAL,
+                    Files.toString(FileUtils.getResourceAsFile("/decimal/test_decimal_sparse_dense_dense.json"), Charsets.UTF_8)
+                            .replace("#{TEST_FILE}", "/input_simple_decimal.json")
+            );
+
+            RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
+
+            QueryResultBatch batch = results.get(0);
+            assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
+
+            batchLoader.getValueAccessorById(0, VarCharVector.class);
+
+            String output[] = {"99.0000", "11.1234", "0.1000", "-0.1200", "-123.1234", "-1.0001"};
+
+            Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
+
+            // Check the output of sort
+            VectorWrapper<?> v = itr.next();
+            ValueVector.Accessor accessor = v.getValueVector().getAccessor();
+
+            for (int i = 0; i < accessor.getValueCount(); i++) {
+                assertEquals(accessor.getObject(i).toString(), output[i]);
+            }
+            assertEquals(6, accessor.getValueCount());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/test/resources/decimal/cast_float_decimal.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/decimal/cast_float_decimal.json b/exec/java-exec/src/test/resources/decimal/cast_float_decimal.json
new file mode 100644
index 0000000..33732ae
--- /dev/null
+++ b/exec/java-exec/src/test/resources/decimal/cast_float_decimal.json
@@ -0,0 +1,47 @@
+{
+  "head" : {
+    "version" : 1,
+    "generator" : {
+      "type" : "org.apache.drill.exec.planner.logical.DrillImplementor",
+      "info" : ""
+    },
+    "type" : "APACHE_DRILL_PHYSICAL",
+    "resultMode" : "EXEC"
+  },
+  graph:[
+  {
+      @id:1,
+      pop:"fs-scan",
+      format: {type: "json"},
+      storage:{type: "file", connection: "classpath:///"},
+      files:["#{TEST_FILE}"]
+  }, {
+    "pop" : "project",
+    "@id" : 2,
+    "exprs" : [ {
+      "ref" : "F4",
+      "expr" : " (cast(DEC9 as float4)) "
+    },
+    { "ref" : "F8", "expr": "(cast(DEC18 as float8))" }
+    ],
+
+    "child" : 1
+  },
+{
+    "pop" : "project",
+    "@id" : 4,
+    "exprs" : [ {
+      "ref" : "DECIMAL9",
+      "expr" : " cast(F4 as decimal9(9, 4))  "
+    },
+    {"ref": "DECIMAL38", "expr" : "cast(F8 as decimal38sparse(38, 4))"}
+    ],
+
+    "child" : 2
+  },
+{
+    "pop" : "screen",
+    "@id" : 5,
+    "child" : 4
+  } ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/test/resources/decimal/cast_simple_decimal.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/decimal/cast_simple_decimal.json b/exec/java-exec/src/test/resources/decimal/cast_simple_decimal.json
new file mode 100644
index 0000000..279de3c
--- /dev/null
+++ b/exec/java-exec/src/test/resources/decimal/cast_simple_decimal.json
@@ -0,0 +1,47 @@
+{
+  "head" : {
+    "version" : 1,
+    "generator" : {
+      "type" : "org.apache.drill.exec.planner.logical.DrillImplementor",
+      "info" : ""
+    },
+    "type" : "APACHE_DRILL_PHYSICAL",
+    "resultMode" : "EXEC"
+  },
+  graph:[
+  {
+      @id:1,
+      pop:"fs-scan",
+      format: {type: "json"},
+      storage:{type: "file", connection: "classpath:///"},
+      files:["#{TEST_FILE}"]
+  }, {
+    "pop" : "project",
+    "@id" : 2,
+    "exprs" : [ {
+      "ref" : "DECIMAL9",
+      "expr" : " (cast(DEC9 as decimal9(9, 4))) "
+    },
+    { "ref" : "DECIMAL18", "expr": "(cast(DEC18 as decimal18(18, 9)))" }
+    ],
+
+    "child" : 1
+  },
+{
+    "pop" : "project",
+    "@id" : 4,
+    "exprs" : [ {
+      "ref" : "DECIMAL9",
+      "expr" : " cast(DECIMAL9 as varchar(100))  "
+    },
+    {"ref": "DECIMAL18", "expr" : "cast(DECIMAL18 as varchar(100))"}
+    ],
+
+    "child" : 2
+  },
+{
+    "pop" : "screen",
+    "@id" : 5,
+    "child" : 4
+  } ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/test/resources/decimal/simple_decimal_arithmetic.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/decimal/simple_decimal_arithmetic.json b/exec/java-exec/src/test/resources/decimal/simple_decimal_arithmetic.json
new file mode 100644
index 0000000..ca31d88
--- /dev/null
+++ b/exec/java-exec/src/test/resources/decimal/simple_decimal_arithmetic.json
@@ -0,0 +1,55 @@
+{
+  "head" : {
+    "version" : 1,
+    "generator" : {
+      "type" : "org.apache.drill.exec.planner.logical.DrillImplementor",
+      "info" : ""
+    },
+    "type" : "APACHE_DRILL_PHYSICAL",
+    "resultMode" : "EXEC"
+  },
+  graph:[
+  {
+      @id:1,
+      pop:"fs-scan",
+      format: {type: "json"},
+      storage:{type: "file", connection: "classpath:///"},
+      files:["#{TEST_FILE}"]
+  },  {
+          "pop" : "project",
+          "@id" : 2,
+          "exprs" : [
+          { "ref" : "DECIMAL18_1", "expr": "(cast(DEC18 as decimal18(18, 1)))" },
+          { "ref" : "DECIMAL18_2", "expr": "(cast(DEC9 as decimal18(18, 1)))" }
+          ],
+
+          "child" : 1
+        },
+        {
+          "pop" : "project",
+          "@id" : 3,
+          "exprs" : [
+          { "ref": "DEC18ADD", "expr" : "DECIMAL18_1 + DECIMAL18_2"},
+          { "ref": "DEC18SUB", "expr" : "DECIMAL18_1 - DECIMAL18_2"},
+          { "ref": "DEC18MUL", "expr" : "DECIMAL18_1 * DECIMAL18_2"}
+          ],
+
+          "child" : 2
+        },
+      {
+          "pop" : "project",
+          "@id" : 4,
+          "exprs" : [
+          {"ref": "DECIMAL18ADD", "expr" : "cast(DEC18ADD as varchar(100))" },
+          {"ref": "DECIMAL18SUB", "expr" : "cast(DEC18SUB as varchar(100))" },
+          {"ref": "DECIMAL18MUL", "expr" : "cast(DEC18MUL as varchar(100))" }
+          ],
+
+          "child" : 3
+        },
+      {
+          "pop" : "screen",
+          "@id" : 5,
+          "child" : 4
+        } ]
+      }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/test/resources/decimal/test_decimal_complex.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/decimal/test_decimal_complex.json b/exec/java-exec/src/test/resources/decimal/test_decimal_complex.json
new file mode 100644
index 0000000..b2f1929
--- /dev/null
+++ b/exec/java-exec/src/test/resources/decimal/test_decimal_complex.json
@@ -0,0 +1,61 @@
+{
+  "head" : {
+    "version" : 1,
+    "generator" : {
+      "type" : "org.apache.drill.exec.planner.logical.DrillImplementor",
+      "info" : ""
+    },
+    "type" : "APACHE_DRILL_PHYSICAL",
+    "resultMode" : "EXEC"
+  },
+  graph:[
+  {
+      @id:1,
+      pop:"fs-scan",
+      format: {type: "json"},
+      storage:{type: "file", connection: "classpath:///"},
+      files:["#{TEST_FILE}"]
+  },{
+        "pop" : "project",
+        "@id" : 2,
+        "exprs" : [ {
+          "ref" : "DE",
+          "expr" : " (cast(B as decimal38sparse(38, 9))) "
+        },
+        {"ref" : "DE1", "expr": " cast(A as decimal38sparse(38, 9))" }
+        ],
+
+        "child" : 1
+      },
+    {
+        "pop" : "project",
+        "@id" : 3,
+        "exprs" : [ {
+          "ref" : "DEC38ADD",
+          "expr" : " (DE + DE1)  "
+        },
+        {"ref" : "DEC38SUB" , "expr" : " (DE - DE1) " },
+        {"ref" : "DEC38MUL" , "expr" : " (DE * DE1) " }
+        ],
+
+        "child" : 2
+      },
+    {
+        "pop" : "project",
+        "@id" : 4,
+        "exprs" : [ {
+          "ref" : "DEC38ADD",
+          "expr" : " cast(DEC38ADD as varchar(100))  "
+        },
+        {"ref" : "DEC38SUB" , "expr" : " cast(DEC38SUB as varchar(100)) " },
+        {"ref" : "DEC38MUL" , "expr" : " cast(DEC38MUL as varchar(100)) " }
+        ],
+
+        "child" : 3
+      },
+    {
+        "pop" : "screen",
+        "@id" : 5,
+        "child" : 4
+      } ]
+    }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/test/resources/decimal/test_decimal_dense_sparse.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/decimal/test_decimal_dense_sparse.json b/exec/java-exec/src/test/resources/decimal/test_decimal_dense_sparse.json
new file mode 100644
index 0000000..bc10f50
--- /dev/null
+++ b/exec/java-exec/src/test/resources/decimal/test_decimal_dense_sparse.json
@@ -0,0 +1,78 @@
+{
+  "head" : {
+    "version" : 1,
+    "generator" : {
+      "type" : "org.apache.drill.exec.planner.logical.DrillImplementor",
+      "info" : ""
+    },
+    "type" : "APACHE_DRILL_PHYSICAL",
+    "resultMode" : "EXEC"
+  },
+  graph:[
+  {
+      @id:1,
+      pop:"fs-scan",
+      format: {type: "json"},
+      storage:{type: "file", connection: "classpath:///"},
+      files:["#{TEST_FILE}"]
+  }, {
+
+    "pop" : "project",
+    "@id" : 2,
+    "exprs" : [ {
+      "ref" : "SPARSE",
+      "expr" : " (cast(B as decimal38sparse(38, 12))) "
+    }
+    ],
+
+    "child" : 1
+  },
+{
+    "pop" : "project",
+    "@id" : 3,
+    "exprs" : [ {
+      "ref" : "DENSE",
+      "expr" : " cast(SPARSE as decimal38dense(38, 12))  "
+    }
+    ],
+
+    "child" : 2
+  },
+{
+            @id:4,
+            child: 3,
+            pop:"sort",
+            orderings: [
+              {expr: "DENSE"}
+            ]
+        },
+        {
+            @id:5,
+            child: 4,
+            pop:"selection-vector-remover"
+        },
+{
+    "pop" : "project",
+    "@id" : 6,
+    "exprs" : [
+    { "ref" : "SPARSE", "expr" : "cast(DENSE as decimal38sparse(38, 12))" }
+    ],
+
+    "child" : 5
+  },
+{
+    "pop" : "project",
+    "@id" : 7,
+    "exprs" : [
+    { "ref" : "STRINGSPARSE", "expr" : "cast(SPARSE as varchar(100))" }
+    ],
+
+    "child" : 6
+  },
+{
+    "pop" : "screen",
+    "@id" : 8,
+    "child" : 7
+  } ]
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/test/resources/decimal/test_decimal_sort_complex.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/decimal/test_decimal_sort_complex.json b/exec/java-exec/src/test/resources/decimal/test_decimal_sort_complex.json
new file mode 100644
index 0000000..1fbe106
--- /dev/null
+++ b/exec/java-exec/src/test/resources/decimal/test_decimal_sort_complex.json
@@ -0,0 +1,56 @@
+{
+  "head" : {
+    "version" : 1,
+    "generator" : {
+      "type" : "org.apache.drill.exec.planner.logical.DrillImplementor",
+      "info" : ""
+    },
+    "type" : "APACHE_DRILL_PHYSICAL",
+    "resultMode" : "EXEC"
+  },
+  graph:[
+  {
+      @id:1,
+      pop:"fs-scan",
+      format: {type: "json"},
+      storage:{type: "file", connection: "classpath:///"},
+      files:["#{TEST_FILE}"]
+  },{
+        "pop" : "project",
+        "@id" : 2,
+        "exprs" : [ {
+          "ref" : "DEC",
+          "expr" : " (cast(B as decimal38sparse(38, 12)))"
+        }
+        ],
+        "child" : 1
+      },
+     {
+                @id:3,
+                child: 2,
+                pop:"sort",
+                orderings: [
+                  {expr: "DEC"}
+                ]
+            },
+            {
+                @id:4,
+                child: 3,
+                pop:"selection-vector-remover"
+            },
+     {
+        "pop" : "project",
+        "@id" : 5,
+        "exprs" : [ {
+          "ref" : "DESTR",
+          "expr" : " (cast(DEC as varchar(100)))"
+        }
+        ],
+        "child" : 4
+      },
+    {
+        "pop" : "screen",
+        "@id" : 6,
+        "child" : 5
+      } ]
+    }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/test/resources/decimal/test_decimal_sparse_dense_dense.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/decimal/test_decimal_sparse_dense_dense.json b/exec/java-exec/src/test/resources/decimal/test_decimal_sparse_dense_dense.json
new file mode 100644
index 0000000..03add9a
--- /dev/null
+++ b/exec/java-exec/src/test/resources/decimal/test_decimal_sparse_dense_dense.json
@@ -0,0 +1,56 @@
+{
+  "head" : {
+    "version" : 1,
+    "generator" : {
+      "type" : "org.apache.drill.exec.planner.logical.DrillImplementor",
+      "info" : ""
+    },
+    "type" : "APACHE_DRILL_PHYSICAL",
+    "resultMode" : "EXEC"
+  },
+  graph:[
+  {
+      @id:1,
+      pop:"fs-scan",
+      format: {type: "json"},
+      storage:{type: "file", connection: "classpath:///"},
+      files:["#{TEST_FILE}"]
+  }, {
+    "pop" : "project",
+    "@id" : 2,
+    "exprs" : [ {
+      "ref" : "DECIMAL28SPARSE",
+      "expr" : " (cast(DEC9 as decimal28sparse(28, 4))) "
+    }
+    ],
+
+    "child" : 1
+  },
+{
+    "pop" : "project",
+    "@id" : 3,
+    "exprs" : [ {
+      "ref" : "DECIMAL28DENSE",
+      "expr" : " cast(DECIMAL28SPARSE as decimal28dense(28, 4))  "
+    }
+    ],
+
+    "child" : 2
+  },
+{
+    "pop" : "project",
+    "@id" : 4,
+    "exprs" : [ {
+      "ref" : "DECIMAL38DENSE",
+      "expr" : " cast(DECIMAL28DENSE as decimal38dense(38, 4))  "
+    }
+    ],
+
+    "child" : 3
+  },
+{
+    "pop" : "screen",
+    "@id" : 5,
+    "child" : 4
+  } ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/test/resources/input_complex_decimal.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/input_complex_decimal.json b/exec/java-exec/src/test/resources/input_complex_decimal.json
new file mode 100644
index 0000000..a0838a9
--- /dev/null
+++ b/exec/java-exec/src/test/resources/input_complex_decimal.json
@@ -0,0 +1,28 @@
+{
+"B": "-100000000001",
+"A": "1123.3"
+}
+{
+"B": "11.1234567890123456",
+"A": "0.3"
+}
+{
+"B": "0.100000000001",
+"A": "123456789"
+}
+{
+"B": "-0.12",
+"A": "0.000002"
+}
+{
+"B": "100000000001.123456789001",
+"A": "111.3"
+}
+{
+"B": "-100000000001",
+"A": "121.093"
+}
+{
+"B": "123456789123456789",
+"A": "12.3"
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/test/resources/input_simple_decimal.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/input_simple_decimal.json b/exec/java-exec/src/test/resources/input_simple_decimal.json
new file mode 100644
index 0000000..b7c9481
--- /dev/null
+++ b/exec/java-exec/src/test/resources/input_simple_decimal.json
@@ -0,0 +1,24 @@
+{
+"DEC9": "99",
+"DEC18": "123456789"
+}
+{
+"DEC9": "11.1234567890123456",
+"DEC18": "11.123456789"
+}
+{
+"DEC9": "0.100000000001",
+"DEC18":"0.100000000001"
+}
+{
+"DEC9": "-0.12",
+"DEC18":"-0.1004"
+}
+{
+"DEC9": "-123.1234",
+"DEC18": "-987654321.1234567891"
+}
+{
+"DEC9": "-1.0001",
+"DEC18":"-2.0301"
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/test/resources/input_sort_complex_decimal.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/input_sort_complex_decimal.json b/exec/java-exec/src/test/resources/input_sort_complex_decimal.json
new file mode 100644
index 0000000..9a5b061
--- /dev/null
+++ b/exec/java-exec/src/test/resources/input_sort_complex_decimal.json
@@ -0,0 +1,30 @@
+{
+"B": "-100000000001"
+}
+{
+"B": "11.1234567890123456"
+}
+{
+"B": "0.100000000001"
+}
+{
+"B": "-0.12"
+}
+{
+"B": "100000000001.123456789001"
+}
+{
+"B": "-100000000001"
+}
+{
+"B": "123456789123456789"
+}
+{
+"B": "145456789.120123"
+}
+{
+"B" : "1278789.1"
+}
+{
+"B": "-145456789.120123"
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 8291844..2cac418 100644
--- a/pom.xml
+++ b/pom.xml
@@ -363,6 +363,12 @@
   <dependencies>
 
     <dependency>
+      <groupId>io.netty</groupId>
+      <artifactId>netty-handler</artifactId>
+      <version>4.0.7.Final</version>
+    </dependency>
+
+    <dependency>
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
       <version>14.0.1</version>

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/protocol/src/main/java/org/apache/drill/common/types/TypeProtos.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/common/types/TypeProtos.java b/protocol/src/main/java/org/apache/drill/common/types/TypeProtos.java
index 3a1bdcd..70125b2 100644
--- a/protocol/src/main/java/org/apache/drill/common/types/TypeProtos.java
+++ b/protocol/src/main/java/org/apache/drill/common/types/TypeProtos.java
@@ -87,37 +87,37 @@ public final class TypeProtos {
      */
     BIGINT(6, 6),
     /**
-     * <code>DECIMAL4 = 7;</code>
+     * <code>DECIMAL9 = 7;</code>
      *
      * <pre>
-     *  a decimal supporting precision between 1 and 8 (4 bits for decimal location, 1 sign)
+     *  a decimal supporting precision between 1 and 9
      * </pre>
      */
-    DECIMAL4(7, 7),
+    DECIMAL9(7, 7),
     /**
-     * <code>DECIMAL8 = 8;</code>
+     * <code>DECIMAL18 = 8;</code>
      *
      * <pre>
-     *  a decimal supporting precision between 9 and 18 (5 bits for decimal location, 1 sign)
+     *  a decimal supporting precision between 10 and 18
      * </pre>
      */
-    DECIMAL8(8, 8),
+    DECIMAL18(8, 8),
     /**
-     * <code>DECIMAL12 = 9;</code>
+     * <code>DECIMAL28SPARSE = 9;</code>
      *
      * <pre>
-     *  a decimal supporting precision between 19 and 28 (5 bits for decimal location, 1 sign)
+     *  a decimal supporting precision between 19 and 28
      * </pre>
      */
-    DECIMAL12(9, 9),
+    DECIMAL28SPARSE(9, 9),
     /**
-     * <code>DECIMAL16 = 10;</code>
+     * <code>DECIMAL38SPARSE = 10;</code>
      *
      * <pre>
-     *  a decimal supporting precision between 29 and 37 (6 bits for decimal location, 1 sign)
+     *  a decimal supporting precision between 29 and 38
      * </pre>
      */
-    DECIMAL16(10, 10),
+    DECIMAL38SPARSE(10, 10),
     /**
      * <code>MONEY = 11;</code>
      *
@@ -130,7 +130,7 @@ public final class TypeProtos {
      * <code>DATE = 12;</code>
      *
      * <pre>
-     *  days since 4713bc 
+     *  days since 4713bc
      * </pre>
      */
     DATE(12, 12),
@@ -275,6 +275,22 @@ public final class TypeProtos {
      */
     UINT8(30, 32),
     /**
+     * <code>DECIMAL28DENSE = 33;</code>
+     *
+     * <pre>
+     * dense decimal representation, supporting precision between 19 and 28
+     * </pre>
+     */
+    DECIMAL28DENSE(31, 33),
+    /**
+     * <code>DECIMAL38DENSE = 34;</code>
+     *
+     * <pre>
+     * dense decimal representation, supporting precision between 28 and 38
+     * </pre>
+     */
+    DECIMAL38DENSE(32, 34),
+    /**
      * <code>NULL = 37;</code>
      *
      * <pre>
@@ -284,7 +300,7 @@ public final class TypeProtos {
      *    MSGPACK4 = 36;   //  msgpack encoded complex type. (up to 2^32 in length)
      * </pre>
      */
-    NULL(31, 37),
+    NULL(33, 37),
     /**
      * <code>INTERVALYEAR = 38;</code>
      *
@@ -292,7 +308,7 @@ public final class TypeProtos {
      * Interval type specifying YEAR to MONTH
      * </pre>
      */
-    INTERVALYEAR(32, 38),
+    INTERVALYEAR(34, 38),
     /**
      * <code>INTERVALDAY = 39;</code>
      *
@@ -300,7 +316,7 @@ public final class TypeProtos {
      * Interval type specifying DAY to SECONDS
      * </pre>
      */
-    INTERVALDAY(33, 39),
+    INTERVALDAY(35, 39),
     ;
 
     /**
@@ -360,37 +376,37 @@ public final class TypeProtos {
      */
     public static final int BIGINT_VALUE = 6;
     /**
-     * <code>DECIMAL4 = 7;</code>
+     * <code>DECIMAL9 = 7;</code>
      *
      * <pre>
-     *  a decimal supporting precision between 1 and 8 (4 bits for decimal location, 1 sign)
+     *  a decimal supporting precision between 1 and 9
      * </pre>
      */
-    public static final int DECIMAL4_VALUE = 7;
+    public static final int DECIMAL9_VALUE = 7;
     /**
-     * <code>DECIMAL8 = 8;</code>
+     * <code>DECIMAL18 = 8;</code>
      *
      * <pre>
-     *  a decimal supporting precision between 9 and 18 (5 bits for decimal location, 1 sign)
+     *  a decimal supporting precision between 10 and 18
      * </pre>
      */
-    public static final int DECIMAL8_VALUE = 8;
+    public static final int DECIMAL18_VALUE = 8;
     /**
-     * <code>DECIMAL12 = 9;</code>
+     * <code>DECIMAL28SPARSE = 9;</code>
      *
      * <pre>
-     *  a decimal supporting precision between 19 and 28 (5 bits for decimal location, 1 sign)
+     *  a decimal supporting precision between 19 and 28
      * </pre>
      */
-    public static final int DECIMAL12_VALUE = 9;
+    public static final int DECIMAL28SPARSE_VALUE = 9;
     /**
-     * <code>DECIMAL16 = 10;</code>
+     * <code>DECIMAL38SPARSE = 10;</code>
      *
      * <pre>
-     *  a decimal supporting precision between 29 and 37 (6 bits for decimal location, 1 sign)
+     *  a decimal supporting precision between 29 and 38
      * </pre>
      */
-    public static final int DECIMAL16_VALUE = 10;
+    public static final int DECIMAL38SPARSE_VALUE = 10;
     /**
      * <code>MONEY = 11;</code>
      *
@@ -403,7 +419,7 @@ public final class TypeProtos {
      * <code>DATE = 12;</code>
      *
      * <pre>
-     *  days since 4713bc 
+     *  days since 4713bc
      * </pre>
      */
     public static final int DATE_VALUE = 12;
@@ -548,6 +564,22 @@ public final class TypeProtos {
      */
     public static final int UINT8_VALUE = 32;
     /**
+     * <code>DECIMAL28DENSE = 33;</code>
+     *
+     * <pre>
+     * dense decimal representation, supporting precision between 19 and 28
+     * </pre>
+     */
+    public static final int DECIMAL28DENSE_VALUE = 33;
+    /**
+     * <code>DECIMAL38DENSE = 34;</code>
+     *
+     * <pre>
+     * dense decimal representation, supporting precision between 28 and 38
+     * </pre>
+     */
+    public static final int DECIMAL38DENSE_VALUE = 34;
+    /**
      * <code>NULL = 37;</code>
      *
      * <pre>
@@ -587,10 +619,10 @@ public final class TypeProtos {
         case 4: return SMALLINT;
         case 5: return INT;
         case 6: return BIGINT;
-        case 7: return DECIMAL4;
-        case 8: return DECIMAL8;
-        case 9: return DECIMAL12;
-        case 10: return DECIMAL16;
+        case 7: return DECIMAL9;
+        case 8: return DECIMAL18;
+        case 9: return DECIMAL28SPARSE;
+        case 10: return DECIMAL38SPARSE;
         case 11: return MONEY;
         case 12: return DATE;
         case 13: return TIME;
@@ -611,6 +643,8 @@ public final class TypeProtos {
         case 30: return UINT2;
         case 31: return UINT4;
         case 32: return UINT8;
+        case 33: return DECIMAL28DENSE;
+        case 34: return DECIMAL38DENSE;
         case 37: return NULL;
         case 38: return INTERVALYEAR;
         case 39: return INTERVALDAY;
@@ -1745,21 +1779,22 @@ public final class TypeProtos {
       "inor_type\030\001 \001(\0162\021.common.MinorType\022\036\n\004mo" +
       "de\030\002 \001(\0162\020.common.DataMode\022\r\n\005width\030\003 \001(" +
       "\005\022\021\n\tprecision\030\004 \001(\005\022\r\n\005scale\030\005 \001(\005\022\020\n\010t" +
-      "imeZone\030\006 \001(\005*\306\003\n\tMinorType\022\010\n\004LATE\020\000\022\007\n" +
+      "imeZone\030\006 \001(\005*\373\003\n\tMinorType\022\010\n\004LATE\020\000\022\007\n" +
       "\003MAP\020\001\022\r\n\tREPEATMAP\020\002\022\013\n\007TINYINT\020\003\022\014\n\010SM" +
       "ALLINT\020\004\022\007\n\003INT\020\005\022\n\n\006BIGINT\020\006\022\014\n\010DECIMAL" +
-      "4\020\007\022\014\n\010DECIMAL8\020\010\022\r\n\tDECIMAL12\020\t\022\r\n\tDECI" +
-      "MAL16\020\n\022\t\n\005MONEY\020\013\022\010\n\004DATE\020\014\022\010\n\004TIME\020\r\022\n" +
-      "\n\006TIMETZ\020\016\022\017\n\013TIMESTAMPTZ\020\017\022\r\n\tTIMESTAMP",
-      "\020\020\022\014\n\010INTERVAL\020\021\022\n\n\006FLOAT4\020\022\022\n\n\006FLOAT8\020\023" +
-      "\022\007\n\003BIT\020\024\022\r\n\tFIXEDCHAR\020\025\022\017\n\013FIXED16CHAR\020" +
-      "\026\022\017\n\013FIXEDBINARY\020\027\022\013\n\007VARCHAR\020\030\022\r\n\tVAR16" +
-      "CHAR\020\031\022\r\n\tVARBINARY\020\032\022\t\n\005UINT1\020\035\022\t\n\005UINT" +
-      "2\020\036\022\t\n\005UINT4\020\037\022\t\n\005UINT8\020 \022\010\n\004NULL\020%\022\020\n\014I" +
-      "NTERVALYEAR\020&\022\017\n\013INTERVALDAY\020\'*4\n\010DataMo" +
-      "de\022\014\n\010OPTIONAL\020\000\022\014\n\010REQUIRED\020\001\022\014\n\010REPEAT" +
-      "ED\020\002B-\n\035org.apache.drill.common.typesB\nT" +
-      "ypeProtosH\001"
+      "9\020\007\022\r\n\tDECIMAL18\020\010\022\023\n\017DECIMAL28SPARSE\020\t\022" +
+      "\023\n\017DECIMAL38SPARSE\020\n\022\t\n\005MONEY\020\013\022\010\n\004DATE\020" +
+      "\014\022\010\n\004TIME\020\r\022\n\n\006TIMETZ\020\016\022\017\n\013TIMESTAMPTZ\020\017",
+      "\022\r\n\tTIMESTAMP\020\020\022\014\n\010INTERVAL\020\021\022\n\n\006FLOAT4\020" +
+      "\022\022\n\n\006FLOAT8\020\023\022\007\n\003BIT\020\024\022\r\n\tFIXEDCHAR\020\025\022\017\n" +
+      "\013FIXED16CHAR\020\026\022\017\n\013FIXEDBINARY\020\027\022\013\n\007VARCH" +
+      "AR\020\030\022\r\n\tVAR16CHAR\020\031\022\r\n\tVARBINARY\020\032\022\t\n\005UI" +
+      "NT1\020\035\022\t\n\005UINT2\020\036\022\t\n\005UINT4\020\037\022\t\n\005UINT8\020 \022\022" +
+      "\n\016DECIMAL28DENSE\020!\022\022\n\016DECIMAL38DENSE\020\"\022\010" +
+      "\n\004NULL\020%\022\020\n\014INTERVALYEAR\020&\022\017\n\013INTERVALDA" +
+      "Y\020\'*4\n\010DataMode\022\014\n\010OPTIONAL\020\000\022\014\n\010REQUIRE" +
+      "D\020\001\022\014\n\010REPEATED\020\002B-\n\035org.apache.drill.co" +
+      "mmon.typesB\nTypeProtosH\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/protocol/src/main/protobuf/Types.proto
----------------------------------------------------------------------
diff --git a/protocol/src/main/protobuf/Types.proto b/protocol/src/main/protobuf/Types.proto
index 9ca1ca9..3871a40 100644
--- a/protocol/src/main/protobuf/Types.proto
+++ b/protocol/src/main/protobuf/Types.proto
@@ -29,12 +29,12 @@ enum MinorType {
     SMALLINT = 4;   //  two byte signed integer
     INT = 5;   //  four byte signed integer
     BIGINT = 6;   //  eight byte signed integer
-    DECIMAL4 = 7;   //  a decimal supporting precision between 1 and 8 (4 bits for decimal location, 1 sign)
-    DECIMAL8 = 8;   //  a decimal supporting precision between 9 and 18 (5 bits for decimal location, 1 sign)
-    DECIMAL12 = 9;   //  a decimal supporting precision between 19 and 28 (5 bits for decimal location, 1 sign)
-    DECIMAL16 = 10;   //  a decimal supporting precision between 29 and 37 (6 bits for decimal location, 1 sign)
+    DECIMAL9 = 7;   //  a decimal supporting precision between 1 and 9
+    DECIMAL18 = 8;   //  a decimal supporting precision between 10 and 18
+    DECIMAL28SPARSE = 9;   //  a decimal supporting precision between 19 and 28
+    DECIMAL38SPARSE = 10;   //  a decimal supporting precision between 29 and 38
     MONEY = 11;   //  signed decimal with two digit precision
-    DATE = 12;   //  days since 4713bc 
+    DATE = 12;   //  days since 4713bc
     TIME = 13;   //  time in micros before or after 2000/1/1
     TIMETZ = 14;   //  time in micros before or after 2000/1/1 with timezone
     TIMESTAMPTZ = 15;   //  unix epoch time in millis
@@ -53,6 +53,8 @@ enum MinorType {
     UINT2 = 30;   //  unsigned 2 byte integer
     UINT4 = 31;   //  unsigned 4 byte integer
     UINT8 = 32;   //  unsigned 8 byte integer
+    DECIMAL28DENSE = 33; // dense decimal representation, supporting precision between 19 and 28
+    DECIMAL38DENSE = 34; // dense decimal representation, supporting precision between 28 and 38
 //    PROTO2 = 33;   //  protobuf encoded complex type. (up to 2^16 in length)
 //    PROTO4 = 34;   //  protobuf encoded complex type. (up to 2^32 in length)
 //    MSGPACK2 = 35;   //  msgpack encoded complex type. (up to 2^16 in length)


[03/10] DRILL-332: Support for decimal data type

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/FixedValueVectors.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/FixedValueVectors.java b/exec/java-exec/src/main/codegen/templates/FixedValueVectors.java
index caabade..cba9f97 100644
--- a/exec/java-exec/src/main/codegen/templates/FixedValueVectors.java
+++ b/exec/java-exec/src/main/codegen/templates/FixedValueVectors.java
@@ -20,6 +20,8 @@ import java.lang.Long;
 import java.lang.Override;
 import java.sql.Time;
 import java.sql.Timestamp;
+import java.math.BigDecimal;
+import java.math.BigInteger;
 
 <@pp.dropOutputFile />
 <#list vv.types as type>
@@ -44,9 +46,7 @@ package org.apache.drill.exec.vector;
  */
 @SuppressWarnings("unused")
 public final class ${minor.class}Vector extends BaseDataValueVector implements FixedWidthVector{
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(${minor.class}Vector.class);
 
- 
   private final Accessor accessor = new Accessor();
   private final Mutator mutator = new Mutator();
   
@@ -176,8 +176,10 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
     <#if (type.width > 8)>
 
     public ${minor.javaType!type.javaType} get(int index) {
-      ByteBuf dst = allocator.buffer(${type.width});
+      ByteBuf dst = io.netty.buffer.Unpooled.wrappedBuffer(new byte[${type.width}]);
+      //dst = new io.netty.buffer.SwappedByteBuf(dst);
       data.getBytes(index * ${type.width}, dst, 0, ${type.width});
+
       return dst;
     }
 
@@ -290,8 +292,63 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
               append(millis));
     }
 
-    <#else>
+    <#elseif (minor.class == "Decimal28Sparse") || (minor.class == "Decimal38Sparse") || (minor.class == "Decimal28Dense") || (minor.class == "Decimal38Dense")>
+
+    public void get(int index, ${minor.class}Holder holder) {
+
+        holder.start = index * ${type.width};
+
+        holder.buffer = data;
+
+        /* The buffer within the value vector is little endian.
+         * For the dense representation though, we use big endian
+         * byte ordering (internally). This is because we shift bits to the right and
+         * big endian ordering makes sense for this purpose.  So we have to deal with
+         * the sign bit for the two representation in a slightly different fashion
+         */
+
+        // Get the sign of the decimal
+          <#if minor.class.endsWith("Sparse")>
+          if ((holder.buffer.getInt(holder.start) & 0x80000000) != 0) {
+          <#elseif minor.class.endsWith("Dense")>
+          if ((holder.buffer.getInt(holder.start) & 0x00000080) != 0) {
+          </#if>
+            holder.sign = true;
+        }
+
+        holder.scale = getField().getScale();
+        holder.precision = getField().getPrecision();
+
+
+    }
+
+    void get(int index, Nullable${minor.class}Holder holder) {
 
+        holder.start = index * ${type.width};
+
+        holder.buffer = data;
+
+          // Get the sign the of the decimal
+          <#if minor.class.endsWith("Sparse")>
+          if ((holder.buffer.getInt(holder.start) & 0x80000000) != 0) {
+          <#elseif minor.class.endsWith("Dense")>
+          if ((holder.buffer.getInt(holder.start) & 0x00000080) != 0) {
+          </#if>
+            holder.sign = true;
+        }
+    }
+
+      @Override
+      public Object getObject(int index) {
+      <#if (minor.class == "Decimal28Sparse") || (minor.class == "Decimal38Sparse")>
+      // Get the BigDecimal object
+      return org.apache.drill.common.util.DecimalUtility.getBigDecimalFromSparse(data, index * ${type.width}, ${minor.nDecimalDigits}, getField().getScale());
+      <#else>
+      return org.apache.drill.common.util.DecimalUtility.getBigDecimalFromDense(data, index * ${type.width}, ${minor.nDecimalDigits}, getField().getScale(), ${minor.maxPrecisionDigits}, ${type.width});
+      </#if>
+    }
+
+    <#else>
     public void get(int index, ${minor.class}Holder holder){
       holder.buffer = data;
       holder.start = index * ${type.width};
@@ -304,13 +361,18 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
 
     @Override
     public Object getObject(int index) {
-      ByteBuf dst = allocator.buffer(${type.width});
-      data.getBytes(index, dst, 0, ${type.width});
+
+      ByteBuf dst = io.netty.buffer.Unpooled.wrappedBuffer(new byte[${type.width}]);
+      //dst = new io.netty.buffer.SwappedByteBuf(dst);
+      data.getBytes(index * ${type.width}, dst, 0, ${type.width});
+
       return dst;
+
+
+
     }
 
     </#if>
-
     <#else> <#-- type.width <= 8 -->
 
     public ${minor.javaType!type.javaType} get(int index) {
@@ -358,17 +420,32 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
         return new Time(time.getMillis());
     }
 
+
+
+    <#elseif minor.class == "Decimal9" || minor.class == "Decimal18">
+    @Override
+    public Object getObject(int index) {
+
+        BigInteger value = BigInteger.valueOf(((${type.boxedType})get(index)).${type.javaType}Value());
+        return new BigDecimal(value, getField().getScale());
+    }
+
     <#else>
     public Object getObject(int index) {
       return get(index);
     }
     </#if>
-    
     public void get(int index, ${minor.class}Holder holder){
+      <#if minor.class.startsWith("Decimal")>
+      holder.scale = getField().getScale();
+      holder.precision = getField().getPrecision();
+      </#if>
+
       holder.value = data.get${(minor.javaType!type.javaType)?cap_first}(index * ${type.width});
     }
 
     void get(int index, Nullable${minor.class}Holder holder){
+
       holder.value = data.get${(minor.javaType!type.javaType)?cap_first}(index * ${type.width});
     }
 
@@ -397,7 +474,7 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
     */
   <#if (type.width > 8)>
    public void set(int index, <#if (type.width > 4)>${minor.javaType!type.javaType}<#else>int</#if> value) {
-     data.setBytes(index * ${type.width}, value);
+     data.setBytes(index * ${type.width}, value, 0, ${type.width});
    }
 
    <#if (minor.class == "TimeStampTZ")>
@@ -473,6 +550,50 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
      set(index, holder);
      return true;
    }
+
+   <#elseif (minor.class == "Decimal28Sparse" || minor.class == "Decimal38Sparse") || (minor.class == "Decimal28Dense") || (minor.class == "Decimal38Dense")>
+
+   public void set(int index, ${minor.class}Holder holder){
+      data.setBytes(index * ${type.width}, holder.buffer, holder.start, ${type.width});
+
+      // Set the sign of the decimal
+      if (holder.sign == true) {
+          int value = data.getInt(index * ${type.width});
+          <#if minor.class.endsWith("Sparse")>
+          data.setInt(index * ${type.width}, (value | 0x80000000));
+          <#elseif minor.class.endsWith("Dense")>
+          data.setInt(index * ${type.width}, (value | 0x00000080));
+          </#if>
+
+      }
+   }
+
+   void set(int index, Nullable${minor.class}Holder holder){
+       data.setBytes(index * ${type.width}, holder.buffer, holder.start, ${type.width});
+
+      // Set the sign of the decimal
+      if (holder.sign == true) {
+          int value = data.getInt(index * ${type.width});
+          <#if minor.class.endsWith("Sparse")>
+          data.setInt(index * ${type.width}, (value | 0x80000000));
+          <#elseif minor.class.endsWith("Dense")>
+          data.setInt(index * ${type.width}, (value | 0x00000080));
+          </#if>
+      }
+   }
+
+   public boolean setSafe(int index,  Nullable${minor.class}Holder holder){
+       if(index >= getValueCapacity()) return false;
+       set(index, holder);
+       return true;
+   }
+
+   public boolean setSafe(int index,  ${minor.class}Holder holder){
+       if(index >= getValueCapacity()) return false;
+       set(index, holder);
+       return true;
+   }
+
    <#else>
    public void set(int index, ${minor.class}Holder holder){
      data.setBytes(index * ${type.width}, holder.buffer, holder.start, ${type.width});
@@ -500,8 +621,8 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
        }
      }
    }
-   
-  <#else> <#-- type.width <= 8 -->
+
+   <#else> <#-- type.width <= 8 -->
    public void set(int index, <#if (type.width >= 4)>${minor.javaType!type.javaType}<#else>int</#if> value) {
      data.set${(minor.javaType!type.javaType)?cap_first}(index * ${type.width}, value);
    }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java b/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java
index b23e53d..bb692dc 100644
--- a/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java
+++ b/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java
@@ -266,11 +266,30 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
     public void get(int index, Nullable${minor.class}Holder holder){
       holder.isSet = bits.getAccessor().get(index);
       values.getAccessor().get(index, holder);
+
+      <#if minor.class.startsWith("Decimal")>
+      holder.scale = getField().getScale();
+      holder.precision = getField().getPrecision();
+      </#if>
     }
 
     @Override
     public Object getObject(int index) {
-      return isNull(index) ? null : values.getAccessor().getObject(index);
+
+      if (isNull(index)) {
+          return null;
+      }
+      <#if minor.class == "Decimal9" || minor.class == "Decimal18">
+      // Get the value and construct a BigDecimal Object
+      BigInteger value = BigInteger.valueOf(((${type.boxedType})values.getAccessor().get(index)).${type.javaType}Value());
+      return new BigDecimal(value, getField().getScale());
+      <#elseif minor.class == "Decimal38Sparse" || minor.class == "Decimal28Sparse">
+      return org.apache.drill.common.util.DecimalUtility.getBigDecimalFromSparse(values.getData(), index * ${type.width}, ${minor.nDecimalDigits}, getField().getScale());
+      <#elseif minor.class == "Decimal38Dense" || minor.class == "Decimal28Dense">
+      return org.apache.drill.common.util.DecimalUtility.getBigDecimalFromDense(values.getData(), index * ${type.width}, ${minor.nDecimalDigits}, getField().getScale(), ${minor.maxPrecisionDigits}, ${type.width});
+      <#else>
+      return values.getAccessor().getObject(index);
+      </#if>
     }
 
     public int getValueCount(){
@@ -362,8 +381,15 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
       values.getMutator().set(index, holder);
       <#if type.major == "VarLen">lastSet = index;</#if>
     }
-    
-    public boolean setSafe(int index, <#if type.major == "VarLen" || minor.class == "TimeStampTZ" || minor.class == "Interval" || minor.class == "IntervalDay">Nullable${minor.class}Holder <#elseif (type.width < 4)>int<#else>${minor.javaType!type.javaType}</#if> value){
+
+    //public boolean setSafe(int index, <#if type.major == "VarLen" || minor.class == "TimeStampTZ" || minor.class == "Interval" || minor.class == "IntervalDay">Nullable${minor.class}Holder <#elseif (type.width < 4)>int<#else>${minor.javaType!type.javaType}</#if> value){
+
+    <#if type.major == "VarLen" || minor.class == "Decimal28Sparse" || minor.class == "Decimal38Sparse" || minor.class == "Decimal28Dense" || minor.class == "Decimal38Dense" || minor.class == "TimeStampTZ" || minor.class == "Interval" || minor.class == "IntervalDay">
+    public boolean setSafe(int index, Nullable${minor.class}Holder value) {
+    <#else>
+    public boolean setSafe(int index, ${minor.javaType!type.javaType} value) {
+    </#if>
+
       <#if type.major == "VarLen">
       for (int i = lastSet + 1; i < index; i++) {
         values.getMutator().set(i, new byte[]{});

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/SqlAccessors.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/SqlAccessors.java b/exec/java-exec/src/main/codegen/templates/SqlAccessors.java
index cd6955e..0e6b4a7 100644
--- a/exec/java-exec/src/main/codegen/templates/SqlAccessors.java
+++ b/exec/java-exec/src/main/codegen/templates/SqlAccessors.java
@@ -107,6 +107,11 @@ public class ${name}Accessor extends AbstractSqlAccessor{
   public byte[] getBytes(int index) {
       return null;
   }
+  <#elseif minor.class.startsWith("Decimal")>
+  @Override
+  public BigDecimal getBigDecimal(int index) {
+      return (BigDecimal) ac.getObject(index);
+  }
   <#else>
   @Override
   public ${javaType} get${javaType?cap_first}(int index){

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/TypeHelper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/TypeHelper.java b/exec/java-exec/src/main/codegen/templates/TypeHelper.java
index 0ceb14c..bd89ce7 100644
--- a/exec/java-exec/src/main/codegen/templates/TypeHelper.java
+++ b/exec/java-exec/src/main/codegen/templates/TypeHelper.java
@@ -139,8 +139,9 @@ public class TypeHelper {
 <#list vv.types as type>
   <#list type.minor as minor>
     case ${minor.class?upper_case} :
-      <#if minor.class?starts_with("Var") || minor.class == "TimeStampTZ" || minor.class == "IntervalDay" || minor.class == "Interval">
-         throw new UnsupportedOperationException(type.getMinorType() + " type is not supported."); 
+      <#if minor.class?starts_with("Var") || minor.class == "TimeStampTZ" || minor.class == "IntervalDay" || minor.class == "Interval" ||
+        minor.class?starts_with("Decimal28") ||  minor.class?starts_with("Decimal38")>
+         throw new UnsupportedOperationException(type.getMinorType() + " type is not supported.");
       <#else>
       holder = new ${minor.class}Holder(); 
       ((${minor.class}Holder)holder).value = ((${minor.class}Vector) vector).getAccessor().get(index);

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/codegen/templates/ValueHolders.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/ValueHolders.java b/exec/java-exec/src/main/codegen/templates/ValueHolders.java
index f63077e..e36e655 100644
--- a/exec/java-exec/src/main/codegen/templates/ValueHolders.java
+++ b/exec/java-exec/src/main/codegen/templates/ValueHolders.java
@@ -42,8 +42,7 @@ public final class ${className} implements ValueHolder{
       </#if>
       
       <#if type.major != "VarLen">
-      
-      <#if (type.width > 8)>
+
       <#if (minor.class == "TimeStampTZ")>
       public long value;
       public int index;
@@ -54,19 +53,77 @@ public final class ${className} implements ValueHolder{
       <#elseif (minor.class == "IntervalDay")>
       public int days;
       public int milliSeconds;
-      <#else>
+    <#elseif minor.class.startsWith("Decimal")>
+    public int scale;
+    public int precision;
+    public static final int maxPrecision = ${minor.maxPrecisionDigits};
+    <#if minor.class.startsWith("Decimal28") || minor.class.startsWith("Decimal38")>
+    public boolean sign;
+    public int start;
+    public ByteBuf buffer;
+    public static final int nDecimalDigits = ${minor.nDecimalDigits};
+
+
+    public int getInteger(int index) {
+        int value = buffer.getInt(start + (index * 4));
+
+        if (index == 0) {
+            /* the first byte contains sign bit, return value without it */
+            <#if minor.class.endsWith("Sparse")>
+            value = (value & 0x7FFFFFFF);
+            <#elseif minor.class.endsWith("Dense")>
+            value = (value & 0x0000007F);
+            </#if>
+        }
+        return value;
+    }
+
+    public void setInteger(int index, int value) {
+        buffer.setInt(start + (index * 4), value);
+    }
+
+    // TODO: This is a temporary hack to swap holders. We need a generic solution for this issue
+    public void swap(${className} right) {
+        int tempScale = this.scale;
+        int tempPrec = this.precision;
+        boolean tempSign = this.sign;
+        ByteBuf tempBuf = this.buffer;
+        int start = this.start;
+
+        this.scale = right.scale;
+        this.precision = right.precision;
+        this.sign = right.sign;
+        this.buffer = right.buffer;
+        this.start = right.start;
+
+        right.scale = tempScale;
+        right.precision = tempPrec;
+        right.sign = tempSign;
+        right.buffer = tempBuf;
+        right.start = start;
+
+        <#if mode.prefix == "Nullable">
+        int isSet = this.isSet;
+        this.isSet = right.isSet;
+        right.isSet = isSet;
+        </#if>
+    }
+
+    <#else>
+    public ${minor.javaType!type.javaType} value;
+    </#if>
+
+      <#elseif (type.width > 8)>
       public int start;
       public ByteBuf buffer;
-      </#if>
       <#else>
         public ${minor.javaType!type.javaType} value;
-      
       </#if>
       <#else>
       /** The first offset (inclusive) into the buffer. **/
       public int start;
       
-      /** The last offset (exclusive) into the buffer. **/
+  /** The last offset (exclusive) into the buffer. **/
       public int end;
       
       /** The buffer holding actual values. **/
@@ -95,8 +152,6 @@ public final class ${className} implements ValueHolder{
       /** The Vector holding the actual values. **/
       public ${minor.class}Vector vector;
     </#if>
-  
-    
 }
 
 </#list>

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/ConstantExpressionIdentifier.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/ConstantExpressionIdentifier.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/ConstantExpressionIdentifier.java
index 2a87bab..2b125cd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/ConstantExpressionIdentifier.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/ConstantExpressionIdentifier.java
@@ -37,6 +37,10 @@ import org.apache.drill.common.expression.ValueExpressions.IntervalYearExpressio
 import org.apache.drill.common.expression.ValueExpressions.IntervalDayExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal9Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal18Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal28Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal38Expression;
 import org.apache.drill.common.expression.ValueExpressions.QuotedString;
 import org.apache.drill.common.expression.visitors.ExprVisitor;
 
@@ -134,16 +138,31 @@ public class ConstantExpressionIdentifier implements ExprVisitor<Boolean, Identi
   }
 
   @Override
+  public Boolean visitDecimal9Constant(Decimal9Expression decExpr, IdentityHashMap<LogicalExpression, Object> value){
+    return true;
+  }
+
+  @Override
   public Boolean visitTimeConstant(TimeExpression intExpr, IdentityHashMap<LogicalExpression, Object> value){
     return true;
   }
 
   @Override
+  public Boolean visitDecimal18Constant(Decimal18Expression decExpr, IdentityHashMap<LogicalExpression, Object> value){
+    return true;
+  }
+
+  @Override
   public Boolean visitIntervalYearConstant(IntervalYearExpression intExpr, IdentityHashMap<LogicalExpression, Object> value){
     return true;
   }
 
   @Override
+  public Boolean visitDecimal28Constant(Decimal28Expression decExpr, IdentityHashMap<LogicalExpression, Object> value){
+    return true;
+  }
+
+  @Override
   public Boolean visitIntervalDayConstant(IntervalDayExpression intExpr, IdentityHashMap<LogicalExpression, Object> value){
     return true;
   }
@@ -154,6 +173,11 @@ public class ConstantExpressionIdentifier implements ExprVisitor<Boolean, Identi
   }
 
   @Override
+  public Boolean visitDecimal38Constant(Decimal38Expression decExpr, IdentityHashMap<LogicalExpression, Object> value){
+    return true;
+  }
+
+  @Override
   public Boolean visitDoubleConstant(DoubleExpression dExpr, IdentityHashMap<LogicalExpression, Object> value){
     return true;
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
index aff47db..2e632a3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
@@ -20,6 +20,7 @@ package org.apache.drill.exec.expr;
 import java.util.List;
 import java.util.Set;
 
+import io.netty.buffer.ByteBuf;
 import org.apache.drill.common.expression.CastExpression;
 import org.apache.drill.common.expression.FunctionCall;
 import org.apache.drill.common.expression.FunctionHolderExpression;
@@ -38,6 +39,10 @@ import org.apache.drill.common.expression.ValueExpressions.IntervalYearExpressio
 import org.apache.drill.common.expression.ValueExpressions.IntervalDayExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal9Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal18Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal28Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal38Expression;
 import org.apache.drill.common.expression.ValueExpressions.QuotedString;
 import org.apache.drill.common.expression.visitors.AbstractExprVisitor;
 import org.apache.drill.common.types.TypeProtos.MajorType;
@@ -76,7 +81,6 @@ public class EvaluationVisitor {
     Set<LogicalExpression> constantBoundaries = ConstantExpressionIdentifier.getConstantExpressionSet(e);
     //Set<LogicalExpression> constantBoundaries = Collections.emptySet();
     return e.accept(new ConstantFilter(constantBoundaries), generator);
-    
   }
 
   private class EvalVisitor extends AbstractExprVisitor<HoldingContainer, ClassGenerator<?>, RuntimeException> {
@@ -370,23 +374,72 @@ public class EvaluationVisitor {
       JVar var = generator.declareClassField("intervalday", holderType);
       JExpression dayLiteral = JExpr.lit(e.getIntervalDay());
       JExpression millisLiteral = JExpr.lit(e.getIntervalMillis());
-
       setup.assign(var, ((JClass)generator.getModel().ref(ValueHolderHelper.class)).staticInvoke("getIntervalDayHolder").arg(dayLiteral).arg(millisLiteral));
       return new HoldingContainer(majorType, var, null, null);
     }
 
     @Override
+    public HoldingContainer visitDecimal9Constant(Decimal9Expression e, ClassGenerator<?> generator) throws RuntimeException {
+      MajorType majorType = e.getMajorType();
+      JBlock setup = generator.getBlock(BlockType.SETUP);
+      JType holderType = generator.getHolderType(majorType);
+      JVar var = generator.declareClassField("dec9", holderType);
+      JExpression valueLiteral = JExpr.lit(e.getIntFromDecimal());
+      JExpression scaleLiteral = JExpr.lit(e.getScale());
+      JExpression precisionLiteral = JExpr.lit(e.getPrecision());
+      setup.assign(var, ((JClass)generator.getModel().ref(ValueHolderHelper.class)).staticInvoke("getDecimal9Holder").arg(valueLiteral).arg(scaleLiteral).arg(precisionLiteral));
+      return new HoldingContainer(majorType, var, null, null);
+    }
+
+    @Override
+    public HoldingContainer visitDecimal18Constant(Decimal18Expression e, ClassGenerator<?> generator) throws RuntimeException {
+      MajorType majorType = e.getMajorType();
+      JBlock setup = generator.getBlock(BlockType.SETUP);
+      JType holderType = generator.getHolderType(majorType);
+      JVar var = generator.declareClassField("dec18", holderType);
+      JExpression valueLiteral = JExpr.lit(e.getLongFromDecimal());
+      JExpression scaleLiteral = JExpr.lit(e.getScale());
+      JExpression precisionLiteral = JExpr.lit(e.getPrecision());
+      setup.assign(var, ((JClass)generator.getModel().ref(ValueHolderHelper.class)).staticInvoke("getDecimal18Holder").arg(valueLiteral).arg(scaleLiteral).arg(precisionLiteral));
+      return new HoldingContainer(majorType, var, null, null);
+    }
+
+    @Override
+    public HoldingContainer visitDecimal28Constant(Decimal28Expression e, ClassGenerator<?> generator)
+        throws RuntimeException {
+      MajorType majorType = e.getMajorType();
+      JBlock setup = generator.getBlock(BlockType.SETUP);
+      JType holderType = generator.getHolderType(majorType);
+      JVar var = generator.declareClassField("dec28", holderType);
+      JExpression stringLiteral = JExpr.lit(e.getBigDecimal().toString());
+      setup.assign(var, ((JClass)generator.getModel().ref(ValueHolderHelper.class)).staticInvoke("getDecimal28Holder").arg(stringLiteral));
+      return new HoldingContainer(majorType, var, null, null);
+    }
+
+    @Override
+    public HoldingContainer visitDecimal38Constant(Decimal38Expression e, ClassGenerator<?> generator)
+        throws RuntimeException {
+      MajorType majorType = e.getMajorType();
+      JBlock setup = generator.getBlock(BlockType.SETUP);
+      JType holderType = generator.getHolderType(majorType);
+      JVar var = generator.declareClassField("dec38", holderType);
+      JExpression stringLiteral = JExpr.lit(e.getBigDecimal().toString());
+      setup.assign(var, ((JClass)generator.getModel().ref(ValueHolderHelper.class)).staticInvoke("getVarCharHolder").arg(stringLiteral));
+      return new HoldingContainer(majorType, var, null, null);
+    }
+
+    @Override
     public HoldingContainer visitCastExpression(CastExpression e, ClassGenerator<?> value) throws RuntimeException {
       throw new UnsupportedOperationException("CastExpression is not expected here. "+
         "It should have been converted to FunctionHolderExpression in materialization");
     }
-  }
 
+  }
   private class ConstantFilter extends EvalVisitor {
 
     private Set<LogicalExpression> constantBoundaries;
-    
-    
+
+
     public ConstantFilter(Set<LogicalExpression> constantBoundaries) {
       super();
       this.constantBoundaries = constantBoundaries;
@@ -420,7 +473,7 @@ public class EvaluationVisitor {
         HoldingContainer c = super.visitIfExpression(e, generator);
         // generator.getMappingSet().exitConstant();
         // return c;
-        return renderConstantExpression(generator, c); 
+        return renderConstantExpression(generator, c);
       } else if (generator.getMappingSet().isWithinConstant()) {
         return super.visitIfExpression(e, generator).setConstant(true);
       } else {
@@ -458,6 +511,59 @@ public class EvaluationVisitor {
       }
     }
 
+
+    @Override
+    public HoldingContainer visitDecimal9Constant(Decimal9Expression e, ClassGenerator<?> generator) throws RuntimeException {
+      if (constantBoundaries.contains(e)) {
+        generator.getMappingSet().enterConstant();
+        HoldingContainer c = super.visitDecimal9Constant(e, generator);
+        return renderConstantExpression(generator, c);
+      } else if (generator.getMappingSet().isWithinConstant()) {
+        return super.visitDecimal9Constant(e, generator).setConstant(true);
+      } else {
+        return super.visitDecimal9Constant(e, generator);
+      }
+    }
+
+    @Override
+    public HoldingContainer visitDecimal18Constant(Decimal18Expression e, ClassGenerator<?> generator) throws RuntimeException {
+      if (constantBoundaries.contains(e)) {
+        generator.getMappingSet().enterConstant();
+        HoldingContainer c = super.visitDecimal18Constant(e, generator);
+        return renderConstantExpression(generator, c);
+      } else if (generator.getMappingSet().isWithinConstant()) {
+        return super.visitDecimal18Constant(e, generator).setConstant(true);
+      } else {
+        return super.visitDecimal18Constant(e, generator);
+      }
+    }
+
+    @Override
+    public HoldingContainer visitDecimal28Constant(Decimal28Expression e, ClassGenerator<?> generator) throws RuntimeException {
+      if (constantBoundaries.contains(e)) {
+        generator.getMappingSet().enterConstant();
+        HoldingContainer c = super.visitDecimal28Constant(e, generator);
+        return renderConstantExpression(generator, c);
+      } else if (generator.getMappingSet().isWithinConstant()) {
+        return super.visitDecimal28Constant(e, generator).setConstant(true);
+      } else {
+        return super.visitDecimal28Constant(e, generator);
+      }
+    }
+
+    @Override
+    public HoldingContainer visitDecimal38Constant(Decimal38Expression e, ClassGenerator<?> generator) throws RuntimeException {
+      if (constantBoundaries.contains(e)) {
+        generator.getMappingSet().enterConstant();
+        HoldingContainer c = super.visitDecimal38Constant(e, generator);
+        return renderConstantExpression(generator, c);
+      } else if (generator.getMappingSet().isWithinConstant()) {
+        return super.visitDecimal38Constant(e, generator).setConstant(true);
+      } else {
+        return super.visitDecimal38Constant(e, generator);
+      }
+    }
+
     @Override
     public HoldingContainer visitIntConstant(IntExpression e, ClassGenerator<?> generator) throws RuntimeException {
       if (constantBoundaries.contains(e)) {
@@ -561,7 +667,7 @@ public class EvaluationVisitor {
       }
     }
 
-    
+
     @Override
     public HoldingContainer visitUnknown(LogicalExpression e, ClassGenerator<?> generator) throws RuntimeException {
       if (constantBoundaries.contains(e)) {
@@ -573,7 +679,7 @@ public class EvaluationVisitor {
       } else if (generator.getMappingSet().isWithinConstant()) {
         return super.visitUnknown(e, generator).setConstant(true);
       } else {
-        return super.visitUnknown(e, generator); 
+        return super.visitUnknown(e, generator);
       }
     }
 
@@ -589,7 +695,7 @@ public class EvaluationVisitor {
       } else if (generator.getMappingSet().isWithinConstant()) {
         return super.visitQuotedStringConstant(e, generator).setConstant(true);
       } else {
-        return super.visitQuotedStringConstant(e, generator);          
+        return super.visitQuotedStringConstant(e, generator);
       }
     }
 
@@ -610,14 +716,13 @@ public class EvaluationVisitor {
     }
 
     /* Get a HoldingContainer for a constant expression. The returned HoldingContainder will indicate it's for
-     * a constant expression. 
-     * */    
+     * a constant expression.
+     * */
     private HoldingContainer renderConstantExpression(ClassGenerator<?> generator, HoldingContainer input){
       JVar fieldValue = generator.declareClassField("constant", generator.getHolderType(input.getMajorType()));
       generator.getEvalBlock().assign(fieldValue, input.getHolder());
       generator.getMappingSet().exitConstant();
-      return new HoldingContainer(input.getMajorType(), fieldValue, fieldValue.ref("value"), fieldValue.ref("isSet")).setConstant(true);                        
+      return new HoldingContainer(input.getMajorType(), fieldValue, fieldValue.ref("value"), fieldValue.ref("isSet")).setConstant(true);
     }
-
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
index 1d8070c..f9572db 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
@@ -38,6 +38,10 @@ import org.apache.drill.common.expression.ValueExpressions.IntervalYearExpressio
 import org.apache.drill.common.expression.ValueExpressions.IntervalDayExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal9Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal18Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal28Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal38Expression;
 import org.apache.drill.common.expression.ValueExpressions.IntExpression;
 import org.apache.drill.common.expression.ValueExpressions.QuotedString;
 import org.apache.drill.common.expression.fn.CastFunctions;
@@ -127,24 +131,34 @@ public class ExpressionTreeMaterializer {
       if (matchedFuncHolder!=null) {
         //Compare parm type against arg type. Insert cast on top of arg, whenever necessary.
         for (int i = 0; i < call.args.size(); ++i) {
+
+          LogicalExpression currentArg = call.args.get(i);
+
           TypeProtos.MajorType parmType = matchedFuncHolder.getParmMajorType(i);
 
           //Case 1: If  1) the argument is NullExpression
           //            2) the parameter of matchedFuncHolder allows null input, or func's null_handling is NULL_IF_NULL (means null and non-null are exchangable).
           //        then replace NullExpression with a TypedNullConstant
-          if (call.args.get(i).equals(NullExpression.INSTANCE) &&
+          if (currentArg.equals(NullExpression.INSTANCE) &&
             ( parmType.getMode().equals(TypeProtos.DataMode.OPTIONAL) ||
               matchedFuncHolder.getNullHandling() == FunctionTemplate.NullHandling.NULL_IF_NULL)) {
             argsWithCast.add(new TypedNullConstant(parmType));
-          } else if (Types.softEquals(parmType, call.args.get(i).getMajorType(), matchedFuncHolder.getNullHandling() ==
-            FunctionTemplate.NullHandling.NULL_IF_NULL)) {
+          } else if (Types.softEquals(parmType, currentArg.getMajorType(), matchedFuncHolder.getNullHandling() ==
+                  FunctionTemplate.NullHandling.NULL_IF_NULL)) {
             //Case 2: argument and parameter matches. Do nothing.
-            argsWithCast.add(call.args.get(i));
+            argsWithCast.add(currentArg);
           } else {
             //Case 3: insert cast if param type is different from arg type.
             String castFuncName = CastFunctions.getCastFunc(parmType.getMinorType());
             List<LogicalExpression> castArgs = Lists.newArrayList();
             castArgs.add(call.args.get(i));  //input_expr
+
+            if (parmType.getMinorType().name().startsWith("DECIMAL")) {
+              // Add the scale and precision to the arguments of the implicit cast
+              castArgs.add(new ValueExpressions.LongExpression(currentArg.getMajorType().getPrecision(), null));
+              castArgs.add(new ValueExpressions.LongExpression(currentArg.getMajorType().getScale(), null));
+            }
+
             FunctionCall castCall = new FunctionCall(castFuncName, castArgs, ExpressionPosition.UNKNOWN);
             DrillFuncHolder matchedCastFuncHolder = resolver.getBestMatch(
               registry.getDrillRegistry().getMethods().get(castFuncName), castCall);
@@ -155,6 +169,7 @@ public class ExpressionTreeMaterializer {
             }
 
             argsWithCast.add(new DrillFuncHolderExpr(call.getName(), matchedCastFuncHolder, castArgs, ExpressionPosition.UNKNOWN));
+
           }
         }
         return new DrillFuncHolderExpr(call.getName(), matchedFuncHolder, argsWithCast, call.getPosition());
@@ -263,6 +278,26 @@ public class ExpressionTreeMaterializer {
     }
 
     @Override
+    public LogicalExpression visitDecimal9Constant(Decimal9Expression decExpr, FunctionImplementationRegistry registry) {
+      return decExpr;
+    }
+
+    @Override
+    public LogicalExpression visitDecimal18Constant(Decimal18Expression decExpr, FunctionImplementationRegistry registry) {
+      return decExpr;
+    }
+
+    @Override
+    public LogicalExpression visitDecimal28Constant(Decimal28Expression decExpr, FunctionImplementationRegistry registry) {
+      return decExpr;
+    }
+
+    @Override
+    public LogicalExpression visitDecimal38Constant(Decimal38Expression decExpr, FunctionImplementationRegistry registry) {
+      return decExpr;
+    }
+
+    @Override
     public LogicalExpression visitDoubleConstant(DoubleExpression dExpr, FunctionImplementationRegistry registry) {
       return dExpr;
     }
@@ -288,27 +323,29 @@ public class ExpressionTreeMaterializer {
       
       if(castEqual(e.getPosition(), newMajor, input.getMajorType())) return input; // don't do pointless cast.
       
-      
-      if(newMinor == MinorType.LATE){
-        throw new UnsupportedOperationException("LATE binding is not supported");
-      } else if (newMinor == MinorType.NULL){
-        // convert it into null expression
-        return NullExpression.INSTANCE;
-      }
-
-      // if the type is fully bound, convert to functioncall and materialze the function.
-      MajorType type = e.getMajorType();
-      String castFuncWithType = "cast" + type.getMinorType().name();
-
-      List<LogicalExpression> newArgs = Lists.newArrayList();
-      newArgs.add(e.getInput());  //input_expr
-
-      //VarLen type
-      if (!Types.isFixedWidthType(type)) {
-        newArgs.add(new ValueExpressions.LongExpression(type.getWidth(), null));
+      if(newMinor == MinorType.LATE || newMinor == MinorType.NULL){
+        // if the type still isn't fully bound, leave as cast expression.
+        return new CastExpression(input, e.getMajorType(), e.getPosition());
+      }else{
+        // if the type is fully bound, convert to functioncall and materialze the function.
+        MajorType type = e.getMajorType();
+
+        // Get the cast function name from the map
+        String castFuncWithType = CastFunctions.getCastFunc(type.getMinorType());
+
+        List<LogicalExpression> newArgs = Lists.newArrayList();
+        newArgs.add(e.getInput());  //input_expr
+
+        //VarLen type
+        if (!Types.isFixedWidthType(type)) {
+          newArgs.add(new ValueExpressions.LongExpression(type.getWidth(), null));
+        } else if (type.getMinorType().name().startsWith("DECIMAL")) {
+            newArgs.add(new ValueExpressions.LongExpression(type.getPrecision(), null));
+            newArgs.add(new ValueExpressions.LongExpression(type.getScale(), null));
+        }
+        FunctionCall fc = new FunctionCall(castFuncWithType, newArgs, e.getPosition());
+        return fc.accept(this, value);   
       }
-      FunctionCall fc = new FunctionCall(castFuncWithType, newArgs, e.getPosition());
-      return fc.accept(this, value);
     }
   
     private boolean castEqual(ExpressionPosition pos, MajorType from, MajorType to){
@@ -326,6 +363,16 @@ public class ExpressionTreeMaterializer {
       case UINT8:
         // nothing else matters.
         return true;
+    case DECIMAL9:
+    case DECIMAL18:
+    case DECIMAL28DENSE:
+    case DECIMAL28SPARSE:
+    case DECIMAL38DENSE:
+    case DECIMAL38SPARSE:
+      if (to.getScale() == from.getScale() && to.getPrecision() == from.getPrecision()) {
+        return true;
+      }
+      return false;
 
       case FIXED16CHAR:
       case FIXEDBINARY:

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/expr/annotations/FunctionTemplate.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/annotations/FunctionTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/annotations/FunctionTemplate.java
index d91b282..53c9952 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/annotations/FunctionTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/annotations/FunctionTemplate.java
@@ -51,6 +51,6 @@ public @interface FunctionTemplate {
   }
   
   public static enum FunctionScope{
-    SIMPLE, POINT_AGGREGATE, HOLISTIC_AGGREGATE, RANGE_AGGREGATE;
+    SIMPLE, POINT_AGGREGATE, HOLISTIC_AGGREGATE, RANGE_AGGREGATE, DECIMAL_MAX_SCALE, DECIMAL_SUM_SCALE, DECIMAL_CAST;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillDecimalCastFuncHolder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillDecimalCastFuncHolder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillDecimalCastFuncHolder.java
new file mode 100644
index 0000000..2a0ac0c
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillDecimalCastFuncHolder.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.fn;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.drill.common.exceptions.DrillRuntimeException;
+import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.expression.ValueExpressions;
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.TypeProtos.MajorType;
+
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+
+public class DrillDecimalCastFuncHolder extends DrillSimpleFuncHolder {
+
+
+    public DrillDecimalCastFuncHolder(FunctionScope scope, NullHandling nullHandling, boolean isBinaryCommutative, boolean isRandom,
+                                          String[] registeredNames, ValueReference[] parameters, ValueReference returnValue, WorkspaceReference[] workspaceVars,
+                                          Map<String, String> methods, List<String> imports) {
+        super(scope, nullHandling, isBinaryCommutative, isRandom, registeredNames, parameters, returnValue, workspaceVars, methods, imports);
+    }
+
+    @Override
+    public MajorType getReturnType(List<LogicalExpression> args) {
+
+        TypeProtos.DataMode mode = returnValue.type.getMode();
+
+        if (nullHandling == NullHandling.NULL_IF_NULL) {
+            // if any one of the input types is nullable, then return nullable return type
+            for (LogicalExpression e : args) {
+                if (e.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
+                    mode = TypeProtos.DataMode.OPTIONAL;
+                    break;
+                }
+            }
+        }
+
+        if (args.size() != 3) {
+            StringBuilder err = new StringBuilder();
+            for (int i = 0; i < args.size(); i++) {
+                err.append("arg" + i + ": " + args.get(i).getMajorType().getMinorType());
+            }
+            throw new DrillRuntimeException("Decimal cast function invoked with incorect arguments" + err);
+        }
+
+        int scale = (int) ((ValueExpressions.LongExpression)(args.get(args.size() - 1))).getLong();
+        int precision = (int) ((ValueExpressions.LongExpression)(args.get(args.size() - 2))).getLong();
+        return (TypeProtos.MajorType.newBuilder().setMinorType(returnValue.type.getMinorType()).setScale(scale).setPrecision(precision).setMode(mode).build());
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillDecimalMaxScaleFuncHolder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillDecimalMaxScaleFuncHolder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillDecimalMaxScaleFuncHolder.java
new file mode 100644
index 0000000..62fa513
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillDecimalMaxScaleFuncHolder.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.fn;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.TypeProtos.MajorType;
+
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+
+public class DrillDecimalMaxScaleFuncHolder extends DrillSimpleFuncHolder{
+
+
+    public DrillDecimalMaxScaleFuncHolder(FunctionScope scope, NullHandling nullHandling, boolean isBinaryCommutative, boolean isRandom,
+    String[] registeredNames, ValueReference[] parameters, ValueReference returnValue, WorkspaceReference[] workspaceVars,
+    Map<String, String> methods, List<String> imports) {
+        super(scope, nullHandling, isBinaryCommutative, isRandom, registeredNames, parameters, returnValue, workspaceVars, methods, imports);
+    }
+
+    @Override
+    public MajorType getReturnType(List<LogicalExpression> args) {
+
+        TypeProtos.DataMode mode = returnValue.type.getMode();
+        int scale = 0;
+        int precision = 0;
+
+        if (nullHandling == NullHandling.NULL_IF_NULL) {
+            // if any one of the input types is nullable, then return nullable return type
+            for (LogicalExpression e : args) {
+                if (e.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
+                    mode = TypeProtos.DataMode.OPTIONAL;
+                }
+                scale = Math.max(scale, e.getMajorType().getScale());
+                precision = Math.max(precision, e.getMajorType().getPrecision());
+            }
+        }
+        return (TypeProtos.MajorType.newBuilder().setMinorType(returnValue.type.getMinorType()).setScale(scale).setPrecision(precision).setMode(mode).build());
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillDecimalSumScaleFuncHolder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillDecimalSumScaleFuncHolder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillDecimalSumScaleFuncHolder.java
new file mode 100644
index 0000000..2e82966
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillDecimalSumScaleFuncHolder.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.fn;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.TypeProtos.MajorType;
+
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+
+public class DrillDecimalSumScaleFuncHolder extends DrillSimpleFuncHolder{
+
+
+    public DrillDecimalSumScaleFuncHolder(FunctionScope scope, NullHandling nullHandling, boolean isBinaryCommutative, boolean isRandom,
+                                          String[] registeredNames, ValueReference[] parameters, ValueReference returnValue, WorkspaceReference[] workspaceVars,
+                                          Map<String, String> methods, List<String> imports) {
+        super(scope, nullHandling, isBinaryCommutative, isRandom, registeredNames, parameters, returnValue, workspaceVars, methods, imports);
+    }
+
+    @Override
+    public MajorType getReturnType(List<LogicalExpression> args) {
+
+        TypeProtos.DataMode mode = returnValue.type.getMode();
+        int scale = 0;
+        int precision = 0;
+
+        if (nullHandling == NullHandling.NULL_IF_NULL) {
+            // if any one of the input types is nullable, then return nullable return type
+            for (LogicalExpression e : args) {
+                if (e.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
+                    mode = TypeProtos.DataMode.OPTIONAL;
+                }
+                scale += e.getMajorType().getScale();
+                precision = Math.max(precision, e.getMajorType().getPrecision());
+            }
+        }
+        return (TypeProtos.MajorType.newBuilder().setMinorType(returnValue.type.getMinorType()).setScale(scale).setPrecision(precision).setMode(mode).build());
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionConverter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionConverter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionConverter.java
index 0f80d11..888829d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionConverter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionConverter.java
@@ -209,7 +209,15 @@ public class FunctionConverter {
       case SIMPLE:
         return new DrillSimpleFuncHolder(template.scope(), template.nulls(), template.isBinaryCommutative(),
           template.isRandom(), registeredNames, ps, outputField, works, methods, imports);
-
+      case DECIMAL_MAX_SCALE:
+          return new DrillDecimalMaxScaleFuncHolder(template.scope(), template.nulls(), template.isBinaryCommutative(),
+                  template.isRandom(), registeredNames, ps, outputField, works, methods, imports);
+      case DECIMAL_SUM_SCALE:
+          return new DrillDecimalSumScaleFuncHolder(template.scope(), template.nulls(), template.isBinaryCommutative(),
+                  template.isRandom(), registeredNames, ps, outputField, works, methods, imports);
+      case DECIMAL_CAST:
+          return new DrillDecimalCastFuncHolder(template.scope(), template.nulls(), template.isBinaryCommutative(),
+                  template.isRandom(), registeredNames, ps, outputField, works, methods, imports);
       case HOLISTIC_AGGREGATE:
       case RANGE_AGGREGATE:
       default:

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
index ad6aa3c..bc2178b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
@@ -30,8 +30,10 @@ import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.expression.ValueExpressions;
 import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.expr.EvaluationVisitor;
 import org.apache.drill.exec.record.NullExpression;
 import org.eigenbase.rel.RelNode;
 import org.eigenbase.reltype.RelDataTypeField;
@@ -204,15 +206,37 @@ public class DrillOptiq {
       MajorType castType = null;
       
       switch(call.getType().getSqlTypeName().getName()){
-        case "VARCHAR":
-        case "CHAR":
-          castType = Types.required(MinorType.VARCHAR).toBuilder().setWidth(call.getType().getPrecision()).build();
+      case "VARCHAR":
+      case "CHAR":
+        castType = Types.required(MinorType.VARCHAR).toBuilder().setWidth(call.getType().getPrecision()).build();
+        break;
+      
+      case "INTEGER": castType = Types.required(MinorType.INT); break;
+      case "FLOAT": castType = Types.required(MinorType.FLOAT4); break;
+      case "DOUBLE": castType = Types.required(MinorType.FLOAT8); break;
+      case "DECIMAL":
+
+          int precision = call.getType().getPrecision();
+          int scale = call.getType().getScale();
+
+          if (precision <= 9) {
+              castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL9).setPrecision(precision).setScale(scale).build();
+          } else if (precision <= 18) {
+              castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL18).setPrecision(precision).setScale(scale).build();
+          } else if (precision <= 28) {
+              // Inject a cast to SPARSE before casting to the dense type.
+              castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL28SPARSE).setPrecision(precision).setScale(scale).build();
+              arg = FunctionCallFactory.createCast(castType, ExpressionPosition.UNKNOWN, arg);
+              castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL28DENSE).setPrecision(precision).setScale(scale).build();
+          } else if (precision <= 38) {
+              castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL38SPARSE).setPrecision(precision).setScale(scale).build();
+              arg = FunctionCallFactory.createCast(castType, ExpressionPosition.UNKNOWN, arg);
+              castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL38DENSE).setPrecision(precision).setScale(scale).build();
+          } else {
+              throw new UnsupportedOperationException("Only Decimal types with precision range 0 - 38 is supported");
+          }
           break;
       
-        case "INTEGER": castType = Types.required(MinorType.INT); break;
-        case "FLOAT": castType = Types.required(MinorType.FLOAT4); break;
-        case "DOUBLE": castType = Types.required(MinorType.FLOAT8); break;
-        case "DECIMAL": throw new UnsupportedOperationException("Need to add decimal.");
         case "INTERVAL_YEAR_MONTH": castType = Types.required(MinorType.INTERVALYEAR); break;
         case "INTERVAL_DAY_TIME": castType = Types.required(MinorType.INTERVALDAY); break;
         default: castType = Types.required(MinorType.valueOf(call.getType().getSqlTypeName().getName()));
@@ -251,6 +275,20 @@ public class DrillOptiq {
         int a = ((BigDecimal) literal.getValue()).intValue();
         return ValueExpressions.getInt(a);
       case DECIMAL:
+        /* TODO: Enable using Decimal literals once we have more functions implemented for Decimal
+         * For now continue using Double instead of decimals
+
+        int precision = ((BigDecimal) literal.getValue()).precision();
+        if (precision <= 9) {
+            return ValueExpressions.getDecimal9((BigDecimal)literal.getValue());
+        } else if (precision <= 18) {
+            return ValueExpressions.getDecimal18((BigDecimal)literal.getValue());
+        } else if (precision <= 28) {
+            return ValueExpressions.getDecimal28((BigDecimal)literal.getValue());
+        } else if (precision <= 38) {
+            return ValueExpressions.getDecimal38((BigDecimal)literal.getValue());
+        } */
+
         double dbl = ((BigDecimal) literal.getValue()).doubleValue();
         logger.warn("Converting exact decimal into approximate decimal.  Should be fixed once decimal is implemented.");
         return ValueExpressions.getFloat8(dbl);

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
index abe6308..a9d53ac 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
@@ -22,9 +22,12 @@ import java.util.Iterator;
 import java.util.List;
 
 import org.apache.drill.common.expression.FieldReference;
+import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.common.expression.PathSegment;
 import org.apache.drill.common.expression.PathSegment.NameSegment;
 import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.logical.data.NamedExpression;
+import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.exec.expr.TypeHelper;
@@ -123,6 +126,12 @@ public class MaterializedField{
     return def.getMajorType();
   }
 
+  public int getScale() {
+      return def.getMajorType().getScale();
+  }
+  public int getPrecision() {
+      return def.getMajorType().getPrecision();
+  }
   public boolean isNullable() {
     return def.getMajorType().getMode() == DataMode.OPTIONAL;
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java
index 579a07c..f6d83e2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java
@@ -58,12 +58,14 @@ public static final Map<MinorType, Integer> precedenceMap;
   	precedenceMap.put(MinorType.BIGINT, i += 2);
   	precedenceMap.put(MinorType.UINT8, i += 2);
   	precedenceMap.put(MinorType.MONEY, i += 2);
-  	precedenceMap.put(MinorType.DECIMAL4, i += 2);
-  	precedenceMap.put(MinorType.DECIMAL8, i += 2);
-  	precedenceMap.put(MinorType.DECIMAL12, i += 2);
-  	precedenceMap.put(MinorType.DECIMAL16, i += 2);
   	precedenceMap.put(MinorType.FLOAT4, i += 2);
   	precedenceMap.put(MinorType.FLOAT8, i += 2);
+ 	precedenceMap.put(MinorType.DECIMAL9, i += 2);
+  	precedenceMap.put(MinorType.DECIMAL18, i += 2);
+   	precedenceMap.put(MinorType.DECIMAL28DENSE, i += 2);
+  	precedenceMap.put(MinorType.DECIMAL28SPARSE, i += 2);
+    precedenceMap.put(MinorType.DECIMAL38DENSE, i += 2);
+    precedenceMap.put(MinorType.DECIMAL38SPARSE, i += 2);
   	precedenceMap.put(MinorType.TIME, i += 2);
   	precedenceMap.put(MinorType.DATE, i += 2);
   	precedenceMap.put(MinorType.TIMESTAMP, i += 2);
@@ -72,7 +74,6 @@ public static final Map<MinorType, Integer> precedenceMap;
     precedenceMap.put(MinorType.INTERVALDAY, i+= 2);
     precedenceMap.put(MinorType.INTERVALYEAR, i+= 2);
     precedenceMap.put(MinorType.INTERVAL, i+= 2);
-
   }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java
index 67769c9..2d50846 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java
@@ -52,10 +52,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.FLOAT4);
     rule.add(MinorType.FLOAT8);
@@ -74,10 +76,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.FLOAT4);
     rule.add(MinorType.FLOAT8);
@@ -96,10 +100,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.FLOAT4);
     rule.add(MinorType.FLOAT8);
@@ -118,10 +124,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.FLOAT4);
     rule.add(MinorType.FLOAT8);
@@ -134,13 +142,18 @@ public class TypeCastRules {
     rule.add(MinorType.VARBINARY);
     rules.put(MinorType.BIGINT, rule);
 
-    /** DECIMAL4 cast able from **/
+    /** DECIMAL9 cast able from **/
     rule = new HashSet<MinorType>();
     rule.add(MinorType.TINYINT);
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.FLOAT4);
     rule.add(MinorType.FLOAT8);
@@ -151,16 +164,20 @@ public class TypeCastRules {
     rule.add(MinorType.VARCHAR);
     rule.add(MinorType.VAR16CHAR);
     rule.add(MinorType.VARBINARY);
-    rules.put(MinorType.DECIMAL4, rule);
+    rules.put(MinorType.DECIMAL9, rule);
 
-    /** DECIMAL8 cast able from **/
+    /** DECIMAL18 cast able from **/
     rule = new HashSet<MinorType>();
     rule.add(MinorType.TINYINT);
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.FLOAT4);
     rule.add(MinorType.FLOAT8);
@@ -171,17 +188,20 @@ public class TypeCastRules {
     rule.add(MinorType.VARCHAR);
     rule.add(MinorType.VAR16CHAR);
     rule.add(MinorType.VARBINARY);
-    rules.put(MinorType.DECIMAL8, rule);
+    rules.put(MinorType.DECIMAL18, rule);
 
-    /** DECIMAL12 cast able from **/
+    /** DECIMAL28Dense cast able from **/
     rule = new HashSet<MinorType>();
     rule.add(MinorType.TINYINT);
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.FLOAT4);
     rule.add(MinorType.FLOAT8);
@@ -192,18 +212,20 @@ public class TypeCastRules {
     rule.add(MinorType.VARCHAR);
     rule.add(MinorType.VAR16CHAR);
     rule.add(MinorType.VARBINARY);
-    rules.put(MinorType.DECIMAL12, rule);
+    rules.put(MinorType.DECIMAL28DENSE, rule);
 
-    /** DECIMAL16 cast able from **/
+    /** DECIMAL28Sparse cast able from **/
     rule = new HashSet<MinorType>();
     rule.add(MinorType.TINYINT);
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.FLOAT4);
     rule.add(MinorType.FLOAT8);
@@ -214,7 +236,56 @@ public class TypeCastRules {
     rule.add(MinorType.VARCHAR);
     rule.add(MinorType.VAR16CHAR);
     rule.add(MinorType.VARBINARY);
-    rules.put(MinorType.DECIMAL16, rule);
+    rules.put(MinorType.DECIMAL28SPARSE, rule);
+
+    /** DECIMAL38Dense cast able from **/
+    rule = new HashSet<MinorType>();
+    rule.add(MinorType.TINYINT);
+    rule.add(MinorType.SMALLINT);
+    rule.add(MinorType.INT);
+    rule.add(MinorType.BIGINT);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
+    rule.add(MinorType.MONEY);
+    rule.add(MinorType.FLOAT4);
+    rule.add(MinorType.FLOAT8);
+    rule.add(MinorType.BIT);
+    rule.add(MinorType.FIXEDCHAR);
+    rule.add(MinorType.FIXED16CHAR);
+    rule.add(MinorType.FIXEDBINARY);
+    rule.add(MinorType.VARCHAR);
+    rule.add(MinorType.VAR16CHAR);
+    rule.add(MinorType.VARBINARY);
+    rules.put(MinorType.DECIMAL38DENSE, rule);
+
+
+    /** DECIMAL38Sparse cast able from **/
+    rule = new HashSet<MinorType>();
+    rule.add(MinorType.TINYINT);
+    rule.add(MinorType.SMALLINT);
+    rule.add(MinorType.INT);
+    rule.add(MinorType.BIGINT);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
+    rule.add(MinorType.MONEY);
+    rule.add(MinorType.FLOAT4);
+    rule.add(MinorType.FLOAT8);
+    rule.add(MinorType.BIT);
+    rule.add(MinorType.FIXEDCHAR);
+    rule.add(MinorType.FIXED16CHAR);
+    rule.add(MinorType.FIXEDBINARY);
+    rule.add(MinorType.VARCHAR);
+    rule.add(MinorType.VAR16CHAR);
+    rule.add(MinorType.VARBINARY);
+    rules.put(MinorType.DECIMAL38SPARSE, rule);
 
     /** MONEY cast able from **/
     rule = new HashSet<MinorType>();
@@ -222,10 +293,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.FLOAT4);
     rule.add(MinorType.FLOAT8);
@@ -271,10 +344,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.DATE);
     rule.add(MinorType.TIME);
     rule.add(MinorType.TIMESTAMPTZ);
@@ -348,10 +423,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.FLOAT4);
     rule.add(MinorType.BIT);
@@ -367,10 +444,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.FLOAT4);
     rule.add(MinorType.FLOAT8);
@@ -387,10 +466,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.TIMESTAMPTZ);
     rule.add(MinorType.FLOAT4);
@@ -410,12 +491,15 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.TIMESTAMPTZ);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
+    rule.add(MinorType.TIMESTAMP);
     rule.add(MinorType.FLOAT4);
     rule.add(MinorType.FLOAT8);
     rule.add(MinorType.BIT);
@@ -440,10 +524,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.TIMESTAMPTZ);
     rule.add(MinorType.FLOAT4);
@@ -470,10 +556,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.TIMESTAMPTZ);
     rule.add(MinorType.FLOAT4);
@@ -491,10 +579,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.TIMESTAMPTZ);
     rule.add(MinorType.FLOAT4);
@@ -521,10 +611,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.TIMESTAMPTZ);
     rule.add(MinorType.FLOAT4);
@@ -551,10 +643,12 @@ public class TypeCastRules {
     rule.add(MinorType.SMALLINT);
     rule.add(MinorType.INT);
     rule.add(MinorType.BIGINT);
-    rule.add(MinorType.DECIMAL4);
-    rule.add(MinorType.DECIMAL8);
-    rule.add(MinorType.DECIMAL12);
-    rule.add(MinorType.DECIMAL16);
+    rule.add(MinorType.DECIMAL9);
+    rule.add(MinorType.DECIMAL18);
+    rule.add(MinorType.DECIMAL28SPARSE);
+    rule.add(MinorType.DECIMAL28DENSE);
+    rule.add(MinorType.DECIMAL38SPARSE);
+    rule.add(MinorType.DECIMAL38DENSE);
     rule.add(MinorType.MONEY);
     rule.add(MinorType.TIMESTAMPTZ);
     rule.add(MinorType.FLOAT4);

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
index c659b1f..d203fa4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
@@ -326,7 +326,7 @@ public class HiveRecordReader implements RecordReader {
       case "tinyint":
         return Types.required(TypeProtos.MinorType.TINYINT);
       case "decimal":
-        return Types.required(TypeProtos.MinorType.DECIMAL16);
+        return Types.required(TypeProtos.MinorType.DECIMAL38SPARSE);
       case "double":
         return Types.required(TypeProtos.MinorType.FLOAT8);
       case "float":
@@ -355,7 +355,7 @@ public class HiveRecordReader implements RecordReader {
       case BYTE:
         return Types.required(TypeProtos.MinorType.TINYINT);
       case DECIMAL:
-        return Types.required(TypeProtos.MinorType.DECIMAL16);
+        return Types.required(TypeProtos.MinorType.DECIMAL38SPARSE);
       case DOUBLE:
         return Types.required(TypeProtos.MinorType.FLOAT8);
       case FLOAT:

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a5ee8f84/exec/java-exec/src/main/java/org/apache/drill/exec/vector/ValueHolderHelper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/ValueHolderHelper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/ValueHolderHelper.java
index 55d49d1..e4af851 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/ValueHolderHelper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/ValueHolderHelper.java
@@ -18,14 +18,23 @@
 package org.apache.drill.exec.vector;
 
 import java.nio.ByteOrder;
-
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.SwappedByteBuf;
+import io.netty.buffer.Unpooled;
 import io.netty.buffer.UnpooledByteBufAllocator;
 
+import org.apache.drill.common.util.DecimalUtility;
 import org.apache.drill.exec.expr.holders.VarCharHolder;
 import org.apache.drill.exec.expr.holders.IntervalDayHolder;
+import org.apache.drill.exec.expr.holders.Decimal9Holder;
+import org.apache.drill.exec.expr.holders.Decimal18Holder;
+import org.apache.drill.exec.expr.holders.Decimal28SparseHolder;
+import org.apache.drill.exec.expr.holders.Decimal38SparseHolder;
 
 import com.google.common.base.Charsets;
 
+import java.math.BigDecimal;
+
 
 public class ValueHolderHelper {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ValueHolderHelper.class);
@@ -46,6 +55,63 @@ public class ValueHolderHelper {
 
       dch.days = days;
       dch.milliSeconds = millis;
+      return dch;
+  }
+
+  public static Decimal9Holder getDecimal9Holder(int decimal, int scale, int precision) {
+    Decimal9Holder dch = new Decimal9Holder();
+
+    dch.scale = scale;
+    dch.precision = precision;
+    dch.value = decimal;
+
+    return dch;
+  }
+
+  public static Decimal18Holder getDecimal18Holder(long decimal, int scale, int precision) {
+    Decimal18Holder dch = new Decimal18Holder();
+
+    dch.scale = scale;
+    dch.precision = precision;
+    dch.value = decimal;
+
+    return dch;
+  }
+
+  public static Decimal28SparseHolder getDecimal28Holder(String decimal) {
+
+    Decimal28SparseHolder dch = new Decimal28SparseHolder();
+
+    BigDecimal bigDecimal = new BigDecimal(decimal);
+
+    dch.scale = bigDecimal.scale();
+    dch.precision = bigDecimal.precision();
+    dch.sign = (bigDecimal.signum() == -1);
+    dch.start = 0;
+
+    dch.buffer = Unpooled.wrappedBuffer(new byte[5 * DecimalUtility.integerSize]);
+    dch.buffer = new SwappedByteBuf(dch.buffer);
+    DecimalUtility.getSparseFromBigDecimal(bigDecimal, dch.buffer, dch.start, dch.scale, dch.precision, dch.nDecimalDigits);
+
+    return dch;
+  }
+
+  public static Decimal38SparseHolder getDecimal38Holder(String decimal) {
+
+
+      Decimal38SparseHolder dch = new Decimal38SparseHolder();
+
+      BigDecimal bigDecimal = new BigDecimal(decimal);
+
+      dch.scale = bigDecimal.scale();
+      dch.precision = bigDecimal.precision();
+      dch.sign = (bigDecimal.signum() == -1);
+      dch.start = 0;
+
+
+      dch.buffer = Unpooled.wrappedBuffer(new byte[dch.maxPrecision * DecimalUtility.integerSize]);
+      dch.buffer = new SwappedByteBuf(dch.buffer);
+      DecimalUtility.getSparseFromBigDecimal(bigDecimal, dch.buffer, dch.start, dch.scale, dch.precision, dch.nDecimalDigits);
 
       return dch;
   }


[06/10] git commit: DRILL-323: Handle multiple inputs to math operators correctly.

Posted by ja...@apache.org.
DRILL-323: Handle multiple inputs to math operators correctly.


Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/43615c8d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/43615c8d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/43615c8d

Branch: refs/heads/master
Commit: 43615c8d085afe762bccd8814891779c60861bbb
Parents: a5ee8f8
Author: Mehant Baid <me...@gmail.com>
Authored: Tue Apr 22 00:30:19 2014 -0700
Committer: Jacques Nadeau <ja...@apache.org>
Committed: Tue Apr 22 19:18:03 2014 -0700

----------------------------------------------------------------------
 .../common/expression/FunctionCallFactory.java  |  2 +-
 .../drill/exec/fn/impl/TestMultiInputAdd.java   | 86 ++++++++++++++++++++
 .../functions/multi_input_add_test.json         | 30 +++++++
 3 files changed, 117 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/43615c8d/common/src/main/java/org/apache/drill/common/expression/FunctionCallFactory.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/expression/FunctionCallFactory.java b/common/src/main/java/org/apache/drill/common/expression/FunctionCallFactory.java
index 50fceda..8113107 100644
--- a/common/src/main/java/org/apache/drill/common/expression/FunctionCallFactory.java
+++ b/common/src/main/java/org/apache/drill/common/expression/FunctionCallFactory.java
@@ -104,7 +104,7 @@ public class FunctionCallFactory {
       List<LogicalExpression> l2 = new ArrayList<LogicalExpression>();
       l2.add(first);
       l2.add(args.get(i + 1));
-      first = createExpression(opTypes.get(i), ep, args);
+      first = createExpression(opTypes.get(i), ep, l2);
     }
     return first;
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/43615c8d/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMultiInputAdd.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMultiInputAdd.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMultiInputAdd.java
new file mode 100644
index 0000000..d4714c0
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMultiInputAdd.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.drill.exec.fn.impl;
+
+import com.codahale.metrics.MetricRegistry;
+import com.google.common.base.Charsets;
+import com.google.common.io.Files;
+import mockit.Injectable;
+import mockit.NonStrictExpectations;
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.expression.ExpressionPosition;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.exec.client.DrillClient;
+import org.apache.drill.exec.pop.PopUnitTestBase;
+import org.apache.drill.exec.proto.UserProtos;
+import org.apache.drill.exec.record.RecordBatchLoader;
+import org.apache.drill.exec.record.VectorWrapper;
+import org.apache.drill.exec.rpc.user.QueryResultBatch;
+import org.apache.drill.exec.rpc.user.UserServer;
+import org.apache.drill.exec.server.Drillbit;
+import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.server.RemoteServiceSet;
+import org.apache.drill.exec.vector.IntVector;
+import org.apache.drill.exec.vector.Float8Vector;
+import org.apache.drill.exec.vector.ValueVector;
+import org.junit.Test;
+
+import java.util.List;
+
+import static org.junit.Assert.assertTrue;
+
+public class TestMultiInputAdd extends PopUnitTestBase {
+
+    static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestMathFunctions.class);
+    DrillConfig c = DrillConfig.create();
+
+
+    @Test
+    public void testMultiInputAdd(@Injectable final DrillbitContext bitContext, @Injectable UserServer.UserClientConnection connection) throws Throwable
+    {
+        try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+             Drillbit bit = new Drillbit(CONFIG, serviceSet);
+             DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
+
+            // run query.
+            bit.run();
+            client.connect();
+            List<QueryResultBatch> results = client.runQuery(UserProtos.QueryType.PHYSICAL,
+                    Files.toString(FileUtils.getResourceAsFile("/functions/multi_input_add_test.json"), Charsets.UTF_8));
+
+            RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
+
+            QueryResultBatch batch = results.get(0);
+            assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
+
+            for (VectorWrapper<?> v : batchLoader) {
+
+                ValueVector.Accessor accessor = v.getValueVector().getAccessor();
+
+                assertTrue((accessor.getObject(0)).equals(10));
+            }
+
+            batchLoader.clear();
+            for(QueryResultBatch b : results){
+                b.release();
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/43615c8d/exec/java-exec/src/test/resources/functions/multi_input_add_test.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/functions/multi_input_add_test.json b/exec/java-exec/src/test/resources/functions/multi_input_add_test.json
new file mode 100644
index 0000000..16443a4
--- /dev/null
+++ b/exec/java-exec/src/test/resources/functions/multi_input_add_test.json
@@ -0,0 +1,30 @@
+{
+  head : {
+    version : 1,
+    generator : {
+      type : "optiq",
+      info : "na"
+    },
+    type : "APACHE_DRILL_PHYSICAL"
+  },
+         graph:[
+         {
+             @id:1,
+             pop:"mock-scan",
+             url: "http://apache.org",
+             entries:[
+                 {records: 1, types: [
+                   {name: "blue", type: "BIGINT", mode: "REQUIRED"}
+                 ]}
+             ]
+         }, {
+    pop : "project",
+    @id : 2,
+    exprs : [ { ref : "INTADD", expr : " 1 + 2 + 3 + 4" } ],
+    child : 1
+  }, {
+    pop : "screen",
+    @id : 3,
+    child : 2
+  } ]
+}
\ No newline at end of file


[08/10] DRILL-442: Implement text format plugin

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
index 807c67e..8bdb1ee 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
@@ -19,6 +19,8 @@ package org.apache.drill.exec.store.easy.json;
 
 import java.util.List;
 
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.google.common.collect.Lists;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.expression.FieldReference;
 import org.apache.drill.common.expression.SchemaPath;
@@ -41,7 +43,7 @@ public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
   }
   
   public JSONFormatPlugin(String name, DrillbitContext context, DrillFileSystem fs, StoragePluginConfig config, JSONFormatConfig formatPluginConfig) {
-    super(name, context, fs, config, formatPluginConfig, true, false, false, "json", "json");
+    super(name, context, fs, config, formatPluginConfig, true, false, false, false, Lists.newArrayList("json"), "json");
   }
   
   @Override

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
new file mode 100644
index 0000000..850f248
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.easy.text;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+import com.google.common.base.Preconditions;
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.expression.FieldReference;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.logical.FormatPluginConfig;
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.store.RecordReader;
+import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
+import org.apache.drill.exec.store.dfs.easy.FileWork;
+import org.apache.drill.exec.store.dfs.shim.DrillFileSystem;
+import org.apache.drill.exec.store.text.DrillTextRecordReader;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.CompressionCodecFactory;
+import org.apache.hadoop.mapred.FileSplit;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class TextFormatPlugin extends EasyFormatPlugin<TextFormatPlugin.TextFormatConfig> {
+
+  public TextFormatPlugin(String name, DrillbitContext context, DrillFileSystem fs, StoragePluginConfig storageConfig) {
+    super(name, context, fs, storageConfig, new TextFormatConfig(), true, false, true, true, new ArrayList<String>(), "text");
+  }
+
+  public TextFormatPlugin(String name, DrillbitContext context, DrillFileSystem fs, StoragePluginConfig config, TextFormatConfig formatPluginConfig) {
+    super(name, context, fs, config, formatPluginConfig, true, false, true, true, formatPluginConfig.getExtensions(), "text");
+  }
+
+
+  @Override
+  public RecordReader getRecordReader(FragmentContext context, FileWork fileWork,
+      List<SchemaPath> columns) throws ExecutionSetupException {
+    Path path = getFileSystem().getUnderlying().makeQualified(new Path(fileWork.getPath()));
+    FileSplit split = new FileSplit(path, fileWork.getStart(), fileWork.getLength(), new String[]{""});
+    Preconditions.checkArgument(((TextFormatConfig)formatConfig).getDelimiter().length() == 1, "Only single character delimiter supported");
+    return new DrillTextRecordReader(split, context, ((TextFormatConfig) formatConfig).getDelimiter().charAt(0), columns);
+  }
+
+  @JsonTypeName("text")
+  public static class TextFormatConfig implements FormatPluginConfig {
+
+    public List<String> extensions;
+    public String delimiter;
+
+    public List<String> getExtensions() {
+      return extensions;
+    }
+
+    public String getDelimiter() {
+      return delimiter;
+    }
+
+    @Override
+    public int hashCode() {
+      return 33;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (this == obj)
+        return true;
+      if (obj == null)
+        return false;
+      if (!(obj instanceof TextFormatConfig))
+        return false;
+      TextFormatConfig that = (TextFormatConfig) obj;
+      if (this.delimiter.equals(that.delimiter))
+        return true;
+      return false;
+    }
+    
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveScan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveScan.java
index a1e8f1d..bf33805 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveScan.java
@@ -79,25 +79,25 @@ public class HiveScan extends AbstractGroupScan {
   @JsonCreator
   public HiveScan(@JsonProperty("hive-table") HiveReadEntry hiveReadEntry, @JsonProperty("storage-plugin") String storagePluginName,
                   @JsonProperty("columns") List<FieldReference> columns,
-                  @JacksonInject StoragePluginRegistry engineRegistry) throws ExecutionSetupException {
+                  @JacksonInject StoragePluginRegistry pluginRegistry) throws ExecutionSetupException {
     this.hiveReadEntry = hiveReadEntry;
     this.table = hiveReadEntry.getTable();
     this.storagePluginName = storagePluginName;
-    this.storagePlugin = (HiveStoragePlugin) engineRegistry.getEngine(storagePluginName);
+    this.storagePlugin = (HiveStoragePlugin) pluginRegistry.getPlugin(storagePluginName);
     this.columns = columns;
     this.partitions = hiveReadEntry.getPartitions();
     getSplits();
     endpoints = storagePlugin.getContext().getBits();
   }
 
-  public HiveScan(HiveReadEntry hiveReadEntry, HiveStoragePlugin storageEngine, List<FieldReference> columns) throws ExecutionSetupException {
+  public HiveScan(HiveReadEntry hiveReadEntry, HiveStoragePlugin storagePlugin, List<FieldReference> columns) throws ExecutionSetupException {
     this.table = hiveReadEntry.getTable();
     this.hiveReadEntry = hiveReadEntry;
     this.columns = columns;
     this.partitions = hiveReadEntry.getPartitions();
     getSplits();
-    endpoints = storageEngine.getContext().getBits();
-    this.storagePluginName = storageEngine.getName();
+    endpoints = storagePlugin.getContext().getBits();
+    this.storagePluginName = storagePlugin.getName();
   }
 
   public List<FieldReference> getColumns() {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatConfig.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatConfig.java
index 33eb68a..d3200fe 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatConfig.java
@@ -17,6 +17,7 @@
  */
 package org.apache.drill.exec.store.parquet;
 
+import com.fasterxml.jackson.annotation.JsonIgnore;
 import org.apache.drill.common.logical.FormatPluginConfig;
 
 import com.fasterxml.jackson.annotation.JsonTypeName;

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
index c20c134..bcee2be 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
@@ -29,6 +29,7 @@ import org.apache.drill.common.expression.FieldReference;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.exec.exception.DrillbitStartupException;
 import org.apache.drill.exec.metrics.DrillMetrics;
 import org.apache.drill.exec.physical.EndpointAffinity;
 import org.apache.drill.exec.physical.OperatorCost;
@@ -113,7 +114,6 @@ public class ParquetGroupScan extends AbstractGroupScan {
       @JacksonInject StoragePluginRegistry engineRegistry, // 
       @JsonProperty("columns") List<SchemaPath> columns //
       ) throws IOException, ExecutionSetupException {
-    engineRegistry.init(DrillConfig.create());
     this.columns = columns;
     if(formatConfig == null) formatConfig = new ParquetFormatConfig();
     Preconditions.checkNotNull(storageConfig);

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java
index eaa4f17..4ae84fc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java
@@ -61,6 +61,7 @@ public class AssignmentCreator<T extends CompleteWork> {
   }
 
   private AssignmentCreator(List<DrillbitEndpoint> incomingEndpoints, List<T> units) {
+    logger.debug("Assigning {} units to {} endpoints", units.size(), incomingEndpoints.size());
     Stopwatch watch = new Stopwatch();
     
     Preconditions.checkArgument(incomingEndpoints.size() <= units.size(), String.format("Incoming endpoints %d "
@@ -116,6 +117,7 @@ public class AssignmentCreator<T extends CompleteWork> {
                 .get(currentEndpoint) >= endpointByteMap.getMaxBytes() * requiredPercentage))) {
 
           mappings.put(minorFragmentId, unit);
+          logger.debug("Assigned unit: {} to minorFragmentId: {}", unit, minorFragmentId);
           // logger.debug("Assigned rowGroup {} to minorFragmentId {} endpoint {}", rowGroupInfo.getRowGroupIndex(),
           // minorFragmentId, endpoints.get(minorFragmentId).getAddress());
           // if (bytesPerEndpoint.get(currentEndpoint) != null) {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java
index 432c1d7..1ad134e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java
@@ -40,6 +40,7 @@ import com.codahale.metrics.Timer;
 import com.google.common.base.Stopwatch;
 import com.google.common.collect.ImmutableRangeMap;
 import com.google.common.collect.Range;
+import org.apache.hadoop.io.compress.CompressionCodecFactory;
 
 public class BlockMapBuilder {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BlockMapBuilder.class);
@@ -50,19 +51,24 @@ public class BlockMapBuilder {
   private Collection<DrillbitEndpoint> endpoints;
   private FileSystem fs;
   private HashMap<String,DrillbitEndpoint> endPointMap;
+  private CompressionCodecFactory codecFactory;
 
   public BlockMapBuilder(FileSystem fs, Collection<DrillbitEndpoint> endpoints) {
     this.fs = fs;
     this.endpoints = endpoints;
+    codecFactory = new CompressionCodecFactory(fs.getConf());
     buildEndpointMap();
   }
 
-  
+  private boolean compressed(FileStatus fileStatus) {
+    return codecFactory.getCodec(fileStatus.getPath()) != null;
+  }
+
   public List<CompleteFileWork> generateFileWork(List<FileStatus> files, boolean blockify) throws IOException{
     List<CompleteFileWork> work = Lists.newArrayList();
     for(FileStatus f : files){
       ImmutableRangeMap<Long,BlockLocation> rangeMap = getBlockMap(f);
-      if(!blockify){
+      if(!blockify || compressed(f)){
         work.add(new CompleteFileWork(this.getEndpointByteMap(new FileStatusWork(f)), 0, f.getLen(), f.getPath().toString()));
         continue;
       }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/CompleteFileWork.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/CompleteFileWork.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/CompleteFileWork.java
index 30b08f6..cfa4fcb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/CompleteFileWork.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/CompleteFileWork.java
@@ -102,4 +102,9 @@ public class CompleteFileWork implements FileWork, CompleteWork{
     }
     
   }
+
+  @Override
+  public String toString() {
+    return String.format("File: %s start: %d length: %d", path, start, length);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordReader.java
new file mode 100644
index 0000000..17d2adb
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordReader.java
@@ -0,0 +1,169 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.text;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import org.apache.drill.common.exceptions.DrillRuntimeException;
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.expression.FieldReference;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.physical.impl.OutputMutator;
+import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.store.RecordReader;
+import org.apache.drill.exec.vector.AllocationHelper;
+import org.apache.drill.exec.vector.RepeatedVarCharVector;
+import org.apache.drill.exec.vector.ValueVector;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.*;
+
+import java.io.IOException;
+import java.util.List;
+
+public class DrillTextRecordReader implements RecordReader {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillTextRecordReader.class);
+
+  private org.apache.hadoop.mapred.RecordReader<LongWritable, Text> reader;
+  private List<ValueVector> vectors = Lists.newArrayList();
+  private byte delimiter;
+  private int targetRecordCount;
+  private FieldReference ref = new FieldReference("columns");
+  private FragmentContext context;
+  private RepeatedVarCharVector vector;
+  private List<Integer> columnIds = Lists.newArrayList();
+  private LongWritable key;
+  private Text value;
+  private int numCols = 0;
+  private boolean redoRecord = false;
+
+  public DrillTextRecordReader(FileSplit split, FragmentContext context, char delimiter, List<SchemaPath> columns) {
+    this.context = context;
+    this.delimiter = (byte) delimiter;
+    if(columns != null) {
+      for (SchemaPath path : columns) {
+        assert path.getRootSegment().isNamed();
+        Preconditions.checkArgument(path.getRootSegment().getChild().isArray(),"Selected column must be an array index");
+        int index = path.getRootSegment().getChild().getArraySegment().getIndex();
+        columnIds.add(index);
+      }
+    }
+    targetRecordCount = context.getConfig().getInt(ExecConstants.TEXT_LINE_READER_BATCH_SIZE);
+    numCols = columnIds.size();
+    TextInputFormat inputFormat = new TextInputFormat();
+    JobConf job = new JobConf();
+    job.setInt("io.file.buffer.size", context.getConfig().getInt(ExecConstants.TEXT_LINE_READER_BUFFER_SIZE));
+    job.setInputFormat(inputFormat.getClass());
+    try {
+      reader = inputFormat.getRecordReader(split, job, Reporter.NULL);
+      key = reader.createKey();
+      value = reader.createValue();
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  public void setup(OutputMutator output) throws ExecutionSetupException {
+    output.removeAllFields();
+    MaterializedField field = MaterializedField.create(ref, Types.repeated(TypeProtos.MinorType.VARCHAR));
+    vector = new RepeatedVarCharVector(field, context.getAllocator());
+    try {
+      output.addField(vector);
+      output.setNewSchema();
+    } catch (SchemaChangeException e) {
+      throw new ExecutionSetupException(e);
+    }
+  }
+
+  @Override
+  public int next() {
+    AllocationHelper.allocate(vector, targetRecordCount, 50);
+    try {
+      int recordCount = 0;
+      while (redoRecord || (recordCount < targetRecordCount && reader.next(key, value))) {
+        redoRecord = false;
+        int start;
+        int end = -1;
+        int p = 0;
+        int i = 0;
+        vector.getMutator().startNewGroup(recordCount);
+        while (end < value.getLength() - 1) {
+          if(numCols > 0 && p >= numCols) {
+            break;
+          }
+          start = end;
+          end = find(value, delimiter, start + 1);
+          if (end == -1) {
+            end = value.getLength();
+          }
+          if (numCols > 0 && i++ < columnIds.get(p)) {
+            if (!vector.getMutator().addSafe(recordCount, value.getBytes(), start + 1, start + 1)) {
+              redoRecord = true;
+              vector.getMutator().setValueCount(recordCount);
+              return recordCount;
+            }
+            continue;
+          }
+          p++;
+          if (!vector.getMutator().addSafe(recordCount, value.getBytes(), start + 1, end - start - 1)) {
+            redoRecord = true;
+            vector.getMutator().setValueCount(recordCount);
+            return recordCount;
+          }
+        }
+        recordCount++;
+      }
+      for (ValueVector v : vectors) {
+        v.getMutator().setValueCount(recordCount);
+      }
+      vector.getMutator().setValueCount(recordCount);
+      return recordCount;
+    } catch (IOException e) {
+      cleanup();
+      throw new DrillRuntimeException(e);
+    }
+  }
+
+  public int find(Text text, byte what, int start) {
+    int len = text.getLength();
+    int p = start;
+    byte[] bytes = text.getBytes();
+    while (p < len) {
+      if (bytes[p] == what) {
+        return p;
+      }
+      p++;
+    }
+    return -1;
+  }
+
+  @Override
+  public void cleanup() {
+    try {
+    reader.close();
+    } catch (IOException e) {
+      logger.warn("Exception closing reader: {}", e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/util/VectorUtil.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/util/VectorUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/util/VectorUtil.java
index 1942f62..b48d3bd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/util/VectorUtil.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/util/VectorUtil.java
@@ -17,6 +17,7 @@
  */
 package org.apache.drill.exec.util;
 
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
@@ -101,7 +102,9 @@ public class VectorUtil {
         }
         else if (o instanceof byte[]) {
           String value = new String((byte[]) o);
-          System.out.printf(format, value.length() <= columnWidth ? value : value.substring(0, columnWidth - 1));
+          System.out.printf("| %-15s",value.length() <= 15 ? value : value.substring(0, 14));
+        } else if (o instanceof List) {
+          System.out.printf("| %s", o);
         } else {
           String value = o.toString();
           System.out.printf(format, value.length() <= columnWidth ? value : value.substring(0,columnWidth - 1));

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/vector/RepeatedVariableWidthVector.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/RepeatedVariableWidthVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/RepeatedVariableWidthVector.java
index 556c9a9..bd03038 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/RepeatedVariableWidthVector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/RepeatedVariableWidthVector.java
@@ -28,7 +28,13 @@ public interface RepeatedVariableWidthVector extends ValueVector{
    * @param childValueCount   Number of supported values in the vector.
    */
   public void allocateNew(int totalBytes, int parentValueCount, int childValueCount);
-  
+
+  /**
+   * Provide the maximum amount of variable width bytes that can be stored int his vector.
+   * @return
+   */
+  public int getByteCapacity();
+
   /**
    * Load the records in the provided buffer based on the given number of values.
    * @param dataBytes   The number of bytes associated with the data array.

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/vector/allocator/RepeatedVariableEstimatedAllocator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/allocator/RepeatedVariableEstimatedAllocator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/allocator/RepeatedVariableEstimatedAllocator.java
new file mode 100644
index 0000000..2a0ca65
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/allocator/RepeatedVariableEstimatedAllocator.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.allocator;
+
+import org.apache.drill.exec.vector.RepeatedVariableWidthVector;
+import org.apache.drill.exec.vector.VariableWidthVector;
+
+public class RepeatedVariableEstimatedAllocator extends VectorAllocator{
+  RepeatedVariableWidthVector out;
+  int avgWidth;
+
+  public RepeatedVariableEstimatedAllocator(RepeatedVariableWidthVector out, int avgWidth) {
+    super();
+    this.out = out;
+    this.avgWidth = avgWidth;
+  }
+  
+  public void alloc(int recordCount){
+    out.allocateNew(avgWidth * recordCount, recordCount, recordCount);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/vector/allocator/RepeatedVectorAllocator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/allocator/RepeatedVectorAllocator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/allocator/RepeatedVectorAllocator.java
new file mode 100644
index 0000000..d9be306
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/allocator/RepeatedVectorAllocator.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.allocator;
+
+import org.apache.drill.exec.vector.RepeatedVariableWidthVector;
+import org.apache.drill.exec.vector.VariableWidthVector;
+
+class RepeatedVectorAllocator extends VectorAllocator{
+  RepeatedVariableWidthVector in;
+  RepeatedVariableWidthVector out;
+
+  public RepeatedVectorAllocator(RepeatedVariableWidthVector in, RepeatedVariableWidthVector out) {
+    super();
+    this.in = in;
+    this.out = out;
+  }
+
+  public void alloc(int recordCount){
+    out.allocateNew(in.getByteCapacity(), in.getAccessor().getValueCount(), in.getAccessor().getValueCount());
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/java/org/apache/drill/exec/vector/allocator/VectorAllocator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/allocator/VectorAllocator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/allocator/VectorAllocator.java
index fcd09cd..77b6e1c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/allocator/VectorAllocator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/allocator/VectorAllocator.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.vector.allocator;
 
 import org.apache.drill.exec.vector.FixedWidthVector;
+import org.apache.drill.exec.vector.RepeatedVariableWidthVector;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.VariableWidthVector;
 
@@ -29,6 +30,8 @@ public abstract class VectorAllocator{
       return new FixedVectorAllocator((FixedWidthVector) outgoing);
     }else if(outgoing instanceof VariableWidthVector && in instanceof VariableWidthVector){
       return new VariableVectorAllocator( (VariableWidthVector) in, (VariableWidthVector) outgoing);
+    } else if (outgoing instanceof RepeatedVariableWidthVector && in instanceof RepeatedVariableWidthVector) {
+      return new RepeatedVectorAllocator((RepeatedVariableWidthVector) in, (RepeatedVariableWidthVector) outgoing);
     }else{
       throw new UnsupportedOperationException();
     }
@@ -40,7 +43,9 @@ public abstract class VectorAllocator{
       return new FixedVectorAllocator((FixedWidthVector) outgoing);
     }else if(outgoing instanceof VariableWidthVector){
       return new VariableEstimatedVector( (VariableWidthVector) outgoing, averageBytesPerVariable);
-    }else{
+    }else if (outgoing instanceof RepeatedVariableWidthVector) {
+      return new RepeatedVariableEstimatedAllocator((RepeatedVariableWidthVector) outgoing, averageBytesPerVariable);
+    } else {
       throw new UnsupportedOperationException();
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/main/resources/drill-module.conf
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/resources/drill-module.conf b/exec/java-exec/src/main/resources/drill-module.conf
index 81e6135..f3b04b5 100644
--- a/exec/java-exec/src/main/resources/drill-module.conf
+++ b/exec/java-exec/src/main/resources/drill-module.conf
@@ -36,7 +36,13 @@ drill.exec: {
   },
   functions: ["org.apache.drill.expr.fn.impl"],
   storage: {
-    packages += "org.apache.drill.exec.store"  
+    packages += "org.apache.drill.exec.store",
+    file: {
+      text: {
+        buffer.size: 262144,
+        batch.size: 4000
+      }
+    }
   },
   metrics : { 
     context: "drillbit",

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java b/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
index a9c8e69..3a492c5 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
@@ -27,6 +27,8 @@ import net.hydromatic.optiq.tools.Frameworks;
 
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.util.TestTools;
+import org.apache.drill.exec.cache.DistributedCache;
+import org.apache.drill.exec.cache.LocalCache;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.memory.TopLevelAllocator;
 import org.apache.drill.exec.ops.QueryContext;
@@ -63,6 +65,9 @@ public class PlanningBase {
   protected void testSqlPlan(String sqlCommands) throws Exception{
     String[] sqlStrings = sqlCommands.split(";");
 
+    final DistributedCache cache = new LocalCache();
+    cache.run();
+
     new NonStrictExpectations() {
       {
         dbContext.getMetrics();
@@ -71,10 +76,13 @@ public class PlanningBase {
         result = new TopLevelAllocator();
         dbContext.getConfig();
         result = config;
+        dbContext.getCache();
+        result = cache;
       }
     };
 
     StoragePluginRegistry registry = new StoragePluginRegistry(dbContext);
+    registry.init();
     final FunctionImplementationRegistry functionRegistry = new FunctionImplementationRegistry(config);
     final SchemaPlus root = Frameworks.createRootSchema();
     registry.getSchemaFactory().registerSchemas(null, root);
@@ -96,6 +104,8 @@ public class PlanningBase {
         result = new PlannerSettings();
         context.getConfig();
         result = config;
+        context.getCache();
+        result = cache;
       }
     };
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
index f5c9884..06d8a32 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
@@ -117,6 +117,7 @@ public class TestMergeJoin extends PopUnitTestBase {
   }
 
   @Test
+  @Ignore
   public void orderedEqualityLeftJoin(@Injectable final DrillbitContext bitContext,
                                       @Injectable UserServer.UserClientConnection connection) throws Throwable {
 
@@ -170,6 +171,7 @@ public class TestMergeJoin extends PopUnitTestBase {
   }
 
   @Test
+  @Ignore
   public void orderedEqualityInnerJoin(@Injectable final DrillbitContext bitContext,
                                        @Injectable UserServer.UserClientConnection connection) throws Throwable {
 
@@ -223,6 +225,7 @@ public class TestMergeJoin extends PopUnitTestBase {
   }
 
   @Test
+  @Ignore
   public void orderedEqualityMultiBatchJoin(@Injectable final DrillbitContext bitContext,
                                             @Injectable UserServer.UserClientConnection connection) throws Throwable {
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java
index 95bee87..1cc77f9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java
@@ -32,6 +32,7 @@ import org.apache.drill.common.expression.FieldReference;
 import org.apache.drill.common.expression.FunctionCall;
 import org.apache.drill.common.expression.IfExpression;
 import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.expression.ValueExpressions;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
@@ -82,7 +83,7 @@ public class ExpressionTreeMaterializerTest {
 
     new NonStrictExpectations() {
       {
-        batch.getValueVectorId(new FieldReference("test", ExpressionPosition.UNKNOWN));
+        batch.getValueVectorId(new SchemaPath("test", ExpressionPosition.UNKNOWN));
         result = new TypedFieldId(Types.required(MinorType.BIGINT), -5);
       }
     };
@@ -98,9 +99,9 @@ public class ExpressionTreeMaterializerTest {
   public void testMaterializingLateboundTree(final @Injectable RecordBatch batch) throws SchemaChangeException {
     new NonStrictExpectations() {
       {
-        batch.getValueVectorId(new FieldReference("test", ExpressionPosition.UNKNOWN));
+        batch.getValueVectorId(SchemaPath.getSimplePath("test"));
         result = new TypedFieldId(Types.required(MinorType.BIT), -4);
-        batch.getValueVectorId(new FieldReference("test1", ExpressionPosition.UNKNOWN));
+        batch.getValueVectorId(SchemaPath.getSimplePath("test1"));
         result = new TypedFieldId(Types.required(MinorType.BIGINT), -5);
       }
     };
@@ -196,7 +197,7 @@ public class ExpressionTreeMaterializerTest {
 
     new NonStrictExpectations() {
       {
-        batch.getValueVectorId(new FieldReference("test", ExpressionPosition.UNKNOWN));
+        batch.getValueVectorId(new SchemaPath("test", ExpressionPosition.UNKNOWN));
         result = new TypedFieldId(Types.required(MinorType.BIGINT), -5);
       }
     };

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOrphanSchema.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOrphanSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOrphanSchema.java
index ffe05a4..f61f4ee 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOrphanSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOrphanSchema.java
@@ -22,6 +22,8 @@ import net.hydromatic.optiq.SchemaPlus;
 import net.hydromatic.optiq.tools.Frameworks;
 
 import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.exec.cache.HazelCache;
+import org.apache.drill.exec.cache.LocalCache;
 import org.apache.drill.exec.memory.TopLevelAllocator;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.junit.Test;
@@ -33,7 +35,7 @@ public class TestOrphanSchema {
 
 
   @Test
-  public void test(final DrillbitContext bitContext){
+  public void test(final DrillbitContext bitContext) throws Exception {
     final DrillConfig c = DrillConfig.create();
 
     new NonStrictExpectations() {
@@ -44,11 +46,16 @@ public class TestOrphanSchema {
         result = new TopLevelAllocator();
         bitContext.getConfig();
         result = c;
+        bitContext.getCache();
+        result = new LocalCache();
       }
     };
 
+    bitContext.getCache().run();
+    
     StoragePluginRegistry r = new StoragePluginRegistry(bitContext);
     SchemaPlus plus = Frameworks.createRootSchema();
+    r.init();
     r.getSchemaFactory().registerSchemas(null, plus);
 
     printSchema(plus, 0);

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/OrphanSchema.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/OrphanSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/OrphanSchema.java
index 22e4a14..1f1b367 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/OrphanSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/OrphanSchema.java
@@ -23,6 +23,7 @@ import net.hydromatic.optiq.SchemaPlus;
 import net.hydromatic.optiq.tools.Frameworks;
 
 import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.exec.cache.LocalCache;
 import org.apache.drill.exec.memory.TopLevelAllocator;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.StoragePluginRegistry;
@@ -37,8 +38,8 @@ public class OrphanSchema {
    * Create an orphan schema to be used for testing.
    * @return root node of the created schema.
    */
-  public static SchemaPlus create(){
-
+  public static SchemaPlus create() throws Exception {
+    
     final DrillConfig c = DrillConfig.create();
 
     // Mock up a context which will allow us to create a schema.
@@ -46,9 +47,13 @@ public class OrphanSchema {
     when(bitContext.getMetrics()).thenReturn(new MetricRegistry());
     when(bitContext.getAllocator()).thenReturn(new TopLevelAllocator());
     when(bitContext.getConfig()).thenReturn(c);
+    when(bitContext.getCache()).thenReturn(new LocalCache());
 
+    bitContext.getCache().run();
+    
     // Using the mock context, get the orphan schema.
     StoragePluginRegistry r = new StoragePluginRegistry(bitContext);
+    r.init();
     SchemaPlus plus = Frameworks.createRootSchema();
     r.getSchemaFactory().registerSchemas(null, plus);
     return plus;
@@ -61,7 +66,7 @@ public class OrphanSchema {
    */
 
   @Test
-  public void test() {
+  public void test() throws Exception {
     printSchema(create(), 0);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/TestOrphanSchema.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/TestOrphanSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/TestOrphanSchema.java
index e76c609..d4755fa 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/TestOrphanSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/TestOrphanSchema.java
@@ -39,6 +39,7 @@ import org.apache.drill.exec.store.ischema.RowProvider;
 import org.apache.drill.exec.store.ischema.RowRecordReader;
 import org.apache.drill.exec.vector.ValueVector;
 import org.junit.Assert;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 /**
@@ -46,7 +47,12 @@ import org.junit.Test;
  * An "orphan schema" is a stand alone schema which is not (yet?) connected to Optiq.
  */
 public class TestOrphanSchema {
-  SchemaPlus root = OrphanSchema.create();
+  static SchemaPlus root;
+
+  @BeforeClass
+  public static void init() throws Exception {
+    root = OrphanSchema.create();
+  }
 
   @Test
   public void testTables() {

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TextRecordReaderTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TextRecordReaderTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TextRecordReaderTest.java
new file mode 100644
index 0000000..5fbcc8b
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TextRecordReaderTest.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.text;
+
+import com.codahale.metrics.MetricRegistry;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.google.common.base.Charsets;
+import com.google.common.io.Files;
+import mockit.Injectable;
+import mockit.NonStrictExpectations;
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.exec.client.DrillClient;
+import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
+import org.apache.drill.exec.memory.TopLevelAllocator;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.physical.impl.OperatorCreatorRegistry;
+import org.apache.drill.exec.physical.impl.OutputMutator;
+import org.apache.drill.exec.pop.PopUnitTestBase;
+import org.apache.drill.exec.proto.BitControl;
+import org.apache.drill.exec.proto.UserProtos;
+import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.record.RecordBatchLoader;
+import org.apache.drill.exec.record.VectorContainer;
+import org.apache.drill.exec.rpc.user.QueryResultBatch;
+import org.apache.drill.exec.rpc.user.UserServer;
+import org.apache.drill.exec.server.Drillbit;
+import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.server.RemoteServiceSet;
+import org.apache.drill.exec.store.RecordReader;
+import org.apache.drill.exec.util.VectorUtil;
+import org.apache.drill.exec.vector.ValueVector;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.FileSplit;
+import org.junit.AfterClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+
+public class TextRecordReaderTest extends PopUnitTestBase {
+
+  @Test
+  public void testFullExecution() throws Exception {
+    RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+
+    try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+        DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
+
+      bit1.run();
+      client.connect();
+      List<QueryResultBatch> results = client.runQuery(UserProtos.QueryType.PHYSICAL,
+              Files.toString(
+                      FileUtils.getResourceAsFile("/store/text/test.json"), Charsets.UTF_8)
+                      .replace("#{DATA_FILE}", FileUtils.getResourceAsFile("/store/text/regions.csv").toURI().toString()));
+      int count = 0;
+      RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
+      for(QueryResultBatch b : results) {
+        if (b.getHeader().getRowCount() != 0) {
+          count += b.getHeader().getRowCount();
+        }
+        loader.load(b.getHeader().getDef(), b.getData());
+        VectorUtil.showVectorAccessibleContent(loader);
+      }
+      assertEquals(5, count);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/test/resources/storage-plugins.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/storage-plugins.json b/exec/java-exec/src/test/resources/storage-plugins.json
new file mode 100644
index 0000000..33f4fac
--- /dev/null
+++ b/exec/java-exec/src/test/resources/storage-plugins.json
@@ -0,0 +1,40 @@
+{
+  "storage":{
+    dfs: {
+      type: "file",
+      connection: "file:///",
+    formats: {
+            "psv" : {
+              type: "text",
+              extensions: [ "tbl" ],
+              delimiter: "|"
+            },
+            "csv" : {
+              type: "text",
+              extensions: [ "csv", "bcp" ],
+              delimiter: ","
+            },
+            "tsv" : {
+              type: "text",
+              extensions: [ "tsv" ],
+              delimiter: "\t"
+            },
+            "parquet" : {
+              type: "parquet"
+            }
+          }
+    },
+    cp: {
+      type: "file",
+      connection: "classpath:///",
+      formats: {
+        "json" : {
+          type: "json"
+        },
+        "parquet" : {
+          type: "parquet"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/test/resources/store/text/regions.csv
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/store/text/regions.csv b/exec/java-exec/src/test/resources/store/text/regions.csv
new file mode 100644
index 0000000..e97d2ed
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/text/regions.csv
@@ -0,0 +1,5 @@
+0,AFRICA,lar deposits. blithely final packages cajole. regular waters are final requests. regular accounts are according to ,
+1,AMERICA,hs use ironic, even requests. s,
+2,ASIA,ges. thinly even pinto beans ca,
+3,EUROPE,ly final courts cajole furiously final excuse,
+4,MIDDLE EAST,uickly special accounts cajole carefully blithely close requests. carefully final asymptotes haggle furiousl,
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/exec/java-exec/src/test/resources/store/text/test.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/store/text/test.json b/exec/java-exec/src/test/resources/store/text/test.json
new file mode 100644
index 0000000..ee33b5d
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/text/test.json
@@ -0,0 +1,40 @@
+{
+    head:{
+        type:"APACHE_DRILL_PHYSICAL",
+        version:"1",
+        generator:{
+            type:"manual"
+        }
+    },
+	graph:[
+        {
+            @id:1,
+            pop:"fs-scan",
+            files:[
+              "#{DATA_FILE}"
+            ],
+            storage : {
+              type : "named",
+              name: "dfs"
+            },
+            format: {
+              type: "named",
+              name: "csv"
+            }
+        },
+        {
+            @id:2,
+            child: 1,
+            pop:"project",
+            exprs: [
+              { ref: "col1", expr:"columns[0]" },
+              { ref: "col2", expr:"columns[1]" }
+            ]
+        },
+        {
+            @id: 3,
+            child: 2,
+            pop: "screen"
+        }
+    ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 2cac418..5d1123d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -127,6 +127,7 @@
             <exclude>**/*.json</exclude>
             <exclude>**/*.sql</exclude>
             <exclude>**/git.properties</exclude>
+            <exclude>**/*.csv</exclude>
             <exclude>**/drill-*.conf</exclude>
             <exclude>**/.buildpath</exclude>
             <exclude>**/*.proto</exclude>

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/sqlparser/src/test/resources/storage-engines.json
----------------------------------------------------------------------
diff --git a/sqlparser/src/test/resources/storage-engines.json b/sqlparser/src/test/resources/storage-engines.json
deleted file mode 100644
index c16a971..0000000
--- a/sqlparser/src/test/resources/storage-engines.json
+++ /dev/null
@@ -1,27 +0,0 @@
-{
-  "storage":{
-    dfs: {
-      type: "file",
-      connection: "file:///",
-      workspaces: {
-        default: "/",
-        home: "/"
-      }
-    },
-    cp: {
-      type: "file",
-      connection: "classpath:///"
-    },
-    hive : {
-        type:"hive",
-        config :
-          {
-            "hive.metastore.uris" : "",
-            "javax.jdo.option.ConnectionURL" : "jdbc:derby:;databaseName=/tmp/drill_hive_db;create=true",
-            "hive.metastore.warehouse.dir" : "/tmp/drill_hive_wh",
-            "fs.default.name" : "file:///",
-            "hive.metastore.sasl.enabled" : "false"
-          }
-      }
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/54287d07/sqlparser/src/test/resources/storage-plugins.json
----------------------------------------------------------------------
diff --git a/sqlparser/src/test/resources/storage-plugins.json b/sqlparser/src/test/resources/storage-plugins.json
new file mode 100644
index 0000000..b9a4bd8
--- /dev/null
+++ b/sqlparser/src/test/resources/storage-plugins.json
@@ -0,0 +1,47 @@
+{
+  "storage":{
+    dfs: {
+      type: "file",
+      connection: "file:///",
+      workspaces: {
+        default: "/",
+        home: "/"
+      },
+      formats: {
+        "psv" : {
+          type: "text",
+          extensions: [ "tbl" ],
+          delimiter: "|"
+        },
+        "csv" : {
+          type: "text",
+          extensions: [ "csv" ],
+          delimiter: ","
+        },
+        "tsv" : {
+          type: "text",
+          extensions: [ "tsv" ],
+          delimiter: "\t"
+        },
+        "parquet" : {
+          type: "parquet"
+        }
+      }
+    },
+    cp: {
+      type: "file",
+      connection: "classpath:///"
+    },
+    hive : {
+        type:"hive",
+        config :
+          {
+            "hive.metastore.uris" : "",
+            "javax.jdo.option.ConnectionURL" : "jdbc:derby:;databaseName=/tmp/drill_hive_db;create=true",
+            "hive.metastore.warehouse.dir" : "/tmp/drill_hive_wh",
+            "fs.default.name" : "file:///",
+            "hive.metastore.sasl.enabled" : "false"
+          }
+      }
+  }
+}
\ No newline at end of file


[07/10] git commit: DRILL-553: simple query fails sometimes

Posted by ja...@apache.org.
DRILL-553: simple query fails sometimes

Initialize lastSet variable in NullableVarLength vectors to -1. Also reset when reallocating.


Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/7b6c7a12
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/7b6c7a12
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/7b6c7a12

Branch: refs/heads/master
Commit: 7b6c7a12576546266bcf5ea109eb71adb0f30cc5
Parents: 43615c8
Author: Steven Phillips <sp...@maprtech.com>
Authored: Mon Apr 21 20:15:39 2014 -0700
Committer: Jacques Nadeau <ja...@apache.org>
Committed: Tue Apr 22 19:29:17 2014 -0700

----------------------------------------------------------------------
 .../main/codegen/templates/NullableValueVectors.java |  3 ++-
 .../java/org/apache/drill/TestExampleQueries.java    | 15 +++++++++++++++
 2 files changed, 17 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/7b6c7a12/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java b/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java
index bb692dc..d2209c1 100644
--- a/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java
+++ b/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java
@@ -302,7 +302,7 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
   public final class Mutator implements ValueVector.Mutator, NullableVectorDefinitionSetter{
     
     private int setCount;
-    <#if type.major = "VarLen"> private int lastSet;</#if>
+    <#if type.major = "VarLen"> private int lastSet = -1;</#if>
 
     private Mutator(){
     }
@@ -433,6 +433,7 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
     
     public void reset(){
       setCount = 0;
+      <#if type.major = "VarLen">lastSet = -1;</#if>
     }
     
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/7b6c7a12/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
index cbf19ac..191115b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
@@ -22,6 +22,21 @@ import org.junit.Test;
 public class TestExampleQueries extends BaseTestQuery{
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestExampleQueries.class);
 
+  @Test // see DRILL-553
+  public void testQueryWithNullValues() throws Exception {
+    test("select count(*) from cp.`customer.json` limit 1");
+    test("select count(*) from cp.`customer.json` limit 1");
+    test("select count(*) from cp.`customer.json` limit 1");
+    test("select count(*) from cp.`customer.json` limit 1");
+    test("select count(*) from cp.`customer.json` limit 1");
+    test("select count(*) from cp.`customer.json` limit 1");
+    test("select count(*) from cp.`customer.json` limit 1");
+    test("select count(*) from cp.`customer.json` limit 1");
+    test("select count(*) from cp.`customer.json` limit 1");
+    test("select count(*) from cp.`customer.json` limit 1");
+    test("select count(*) from cp.`customer.json` limit 1");
+  }
+
   @Test
   public void testSelectWithLimit() throws Exception{
     test("select employee_id,  first_name, last_name from cp.`employee.json` order by employee_id limit 5 offset 10");