You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/04/17 09:29:46 UTC

svn commit: r1468783 [4/16] - in /hive/branches/HIVE-4115: ./ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/ beeline/src/test/org/apache/ beeline/src/test/org/apache/hive/ beeline/src/test/org/apache/hive/beeline/ beeline/src/test/org/...

Modified: hive/branches/HIVE-4115/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java (original)
+++ hive/branches/HIVE-4115/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java Wed Apr 17 07:29:38 2013
@@ -1852,6 +1852,569 @@ public final class OrcProto {
     // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.ql.io.orc.BucketStatistics)
   }
   
+  public interface DecimalStatisticsOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+    
+    // optional string minimum = 1;
+    boolean hasMinimum();
+    String getMinimum();
+    
+    // optional string maximum = 2;
+    boolean hasMaximum();
+    String getMaximum();
+    
+    // optional string sum = 3;
+    boolean hasSum();
+    String getSum();
+  }
+  public static final class DecimalStatistics extends
+      com.google.protobuf.GeneratedMessage
+      implements DecimalStatisticsOrBuilder {
+    // Use DecimalStatistics.newBuilder() to construct.
+    private DecimalStatistics(Builder builder) {
+      super(builder);
+    }
+    private DecimalStatistics(boolean noInit) {}
+    
+    private static final DecimalStatistics defaultInstance;
+    public static DecimalStatistics getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public DecimalStatistics getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_DecimalStatistics_descriptor;
+    }
+    
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_DecimalStatistics_fieldAccessorTable;
+    }
+    
+    private int bitField0_;
+    // optional string minimum = 1;
+    public static final int MINIMUM_FIELD_NUMBER = 1;
+    private java.lang.Object minimum_;
+    public boolean hasMinimum() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    public String getMinimum() {
+      java.lang.Object ref = minimum_;
+      if (ref instanceof String) {
+        return (String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        String s = bs.toStringUtf8();
+        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+          minimum_ = s;
+        }
+        return s;
+      }
+    }
+    private com.google.protobuf.ByteString getMinimumBytes() {
+      java.lang.Object ref = minimum_;
+      if (ref instanceof String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+        minimum_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+    
+    // optional string maximum = 2;
+    public static final int MAXIMUM_FIELD_NUMBER = 2;
+    private java.lang.Object maximum_;
+    public boolean hasMaximum() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    public String getMaximum() {
+      java.lang.Object ref = maximum_;
+      if (ref instanceof String) {
+        return (String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        String s = bs.toStringUtf8();
+        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+          maximum_ = s;
+        }
+        return s;
+      }
+    }
+    private com.google.protobuf.ByteString getMaximumBytes() {
+      java.lang.Object ref = maximum_;
+      if (ref instanceof String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+        maximum_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+    
+    // optional string sum = 3;
+    public static final int SUM_FIELD_NUMBER = 3;
+    private java.lang.Object sum_;
+    public boolean hasSum() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    public String getSum() {
+      java.lang.Object ref = sum_;
+      if (ref instanceof String) {
+        return (String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        String s = bs.toStringUtf8();
+        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+          sum_ = s;
+        }
+        return s;
+      }
+    }
+    private com.google.protobuf.ByteString getSumBytes() {
+      java.lang.Object ref = sum_;
+      if (ref instanceof String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+        sum_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+    
+    private void initFields() {
+      minimum_ = "";
+      maximum_ = "";
+      sum_ = "";
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+      
+      memoizedIsInitialized = 1;
+      return true;
+    }
+    
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, getMinimumBytes());
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeBytes(2, getMaximumBytes());
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeBytes(3, getSumBytes());
+      }
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, getMinimumBytes());
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(2, getMaximumBytes());
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(3, getSumBytes());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+    
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+    
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatisticsOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_DecimalStatistics_descriptor;
+      }
+      
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_DecimalStatistics_fieldAccessorTable;
+      }
+      
+      // Construct using org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+      
+      private Builder(BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+      
+      public Builder clear() {
+        super.clear();
+        minimum_ = "";
+        bitField0_ = (bitField0_ & ~0x00000001);
+        maximum_ = "";
+        bitField0_ = (bitField0_ & ~0x00000002);
+        sum_ = "";
+        bitField0_ = (bitField0_ & ~0x00000004);
+        return this;
+      }
+      
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+      
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.getDescriptor();
+      }
+      
+      public org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics getDefaultInstanceForType() {
+        return org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.getDefaultInstance();
+      }
+      
+      public org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics build() {
+        org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+      
+      private org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return result;
+      }
+      
+      public org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics buildPartial() {
+        org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics result = new org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.minimum_ = minimum_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.maximum_ = maximum_;
+        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+          to_bitField0_ |= 0x00000004;
+        }
+        result.sum_ = sum_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+      
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics) {
+          return mergeFrom((org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+      
+      public Builder mergeFrom(org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics other) {
+        if (other == org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.getDefaultInstance()) return this;
+        if (other.hasMinimum()) {
+          setMinimum(other.getMinimum());
+        }
+        if (other.hasMaximum()) {
+          setMaximum(other.getMaximum());
+        }
+        if (other.hasSum()) {
+          setSum(other.getSum());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+      
+      public final boolean isInitialized() {
+        return true;
+      }
+      
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              onChanged();
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              minimum_ = input.readBytes();
+              break;
+            }
+            case 18: {
+              bitField0_ |= 0x00000002;
+              maximum_ = input.readBytes();
+              break;
+            }
+            case 26: {
+              bitField0_ |= 0x00000004;
+              sum_ = input.readBytes();
+              break;
+            }
+          }
+        }
+      }
+      
+      private int bitField0_;
+      
+      // optional string minimum = 1;
+      private java.lang.Object minimum_ = "";
+      public boolean hasMinimum() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      public String getMinimum() {
+        java.lang.Object ref = minimum_;
+        if (!(ref instanceof String)) {
+          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+          minimum_ = s;
+          return s;
+        } else {
+          return (String) ref;
+        }
+      }
+      public Builder setMinimum(String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        minimum_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearMinimum() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        minimum_ = getDefaultInstance().getMinimum();
+        onChanged();
+        return this;
+      }
+      void setMinimum(com.google.protobuf.ByteString value) {
+        bitField0_ |= 0x00000001;
+        minimum_ = value;
+        onChanged();
+      }
+      
+      // optional string maximum = 2;
+      private java.lang.Object maximum_ = "";
+      public boolean hasMaximum() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      public String getMaximum() {
+        java.lang.Object ref = maximum_;
+        if (!(ref instanceof String)) {
+          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+          maximum_ = s;
+          return s;
+        } else {
+          return (String) ref;
+        }
+      }
+      public Builder setMaximum(String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        maximum_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearMaximum() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        maximum_ = getDefaultInstance().getMaximum();
+        onChanged();
+        return this;
+      }
+      void setMaximum(com.google.protobuf.ByteString value) {
+        bitField0_ |= 0x00000002;
+        maximum_ = value;
+        onChanged();
+      }
+      
+      // optional string sum = 3;
+      private java.lang.Object sum_ = "";
+      public boolean hasSum() {
+        return ((bitField0_ & 0x00000004) == 0x00000004);
+      }
+      public String getSum() {
+        java.lang.Object ref = sum_;
+        if (!(ref instanceof String)) {
+          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+          sum_ = s;
+          return s;
+        } else {
+          return (String) ref;
+        }
+      }
+      public Builder setSum(String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000004;
+        sum_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearSum() {
+        bitField0_ = (bitField0_ & ~0x00000004);
+        sum_ = getDefaultInstance().getSum();
+        onChanged();
+        return this;
+      }
+      void setSum(com.google.protobuf.ByteString value) {
+        bitField0_ |= 0x00000004;
+        sum_ = value;
+        onChanged();
+      }
+      
+      // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.ql.io.orc.DecimalStatistics)
+    }
+    
+    static {
+      defaultInstance = new DecimalStatistics(true);
+      defaultInstance.initFields();
+    }
+    
+    // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.ql.io.orc.DecimalStatistics)
+  }
+  
   public interface ColumnStatisticsOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
@@ -1878,6 +2441,11 @@ public final class OrcProto {
     boolean hasBucketStatistics();
     org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics getBucketStatistics();
     org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatisticsOrBuilder getBucketStatisticsOrBuilder();
+    
+    // optional .org.apache.hadoop.hive.ql.io.orc.DecimalStatistics decimalStatistics = 6;
+    boolean hasDecimalStatistics();
+    org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics getDecimalStatistics();
+    org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatisticsOrBuilder getDecimalStatisticsOrBuilder();
   }
   public static final class ColumnStatistics extends
       com.google.protobuf.GeneratedMessage
@@ -1970,12 +2538,26 @@ public final class OrcProto {
       return bucketStatistics_;
     }
     
+    // optional .org.apache.hadoop.hive.ql.io.orc.DecimalStatistics decimalStatistics = 6;
+    public static final int DECIMALSTATISTICS_FIELD_NUMBER = 6;
+    private org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics decimalStatistics_;
+    public boolean hasDecimalStatistics() {
+      return ((bitField0_ & 0x00000020) == 0x00000020);
+    }
+    public org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics getDecimalStatistics() {
+      return decimalStatistics_;
+    }
+    public org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatisticsOrBuilder getDecimalStatisticsOrBuilder() {
+      return decimalStatistics_;
+    }
+    
     private void initFields() {
       numberOfValues_ = 0L;
       intStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics.getDefaultInstance();
       doubleStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics.getDefaultInstance();
       stringStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics.getDefaultInstance();
       bucketStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics.getDefaultInstance();
+      decimalStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.getDefaultInstance();
     }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
@@ -2004,6 +2586,9 @@ public final class OrcProto {
       if (((bitField0_ & 0x00000010) == 0x00000010)) {
         output.writeMessage(5, bucketStatistics_);
       }
+      if (((bitField0_ & 0x00000020) == 0x00000020)) {
+        output.writeMessage(6, decimalStatistics_);
+      }
       getUnknownFields().writeTo(output);
     }
     
@@ -2033,6 +2618,10 @@ public final class OrcProto {
         size += com.google.protobuf.CodedOutputStream
           .computeMessageSize(5, bucketStatistics_);
       }
+      if (((bitField0_ & 0x00000020) == 0x00000020)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(6, decimalStatistics_);
+      }
       size += getUnknownFields().getSerializedSize();
       memoizedSerializedSize = size;
       return size;
@@ -2153,6 +2742,7 @@ public final class OrcProto {
           getDoubleStatisticsFieldBuilder();
           getStringStatisticsFieldBuilder();
           getBucketStatisticsFieldBuilder();
+          getDecimalStatisticsFieldBuilder();
         }
       }
       private static Builder create() {
@@ -2187,6 +2777,12 @@ public final class OrcProto {
           bucketStatisticsBuilder_.clear();
         }
         bitField0_ = (bitField0_ & ~0x00000010);
+        if (decimalStatisticsBuilder_ == null) {
+          decimalStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.getDefaultInstance();
+        } else {
+          decimalStatisticsBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000020);
         return this;
       }
       
@@ -2261,6 +2857,14 @@ public final class OrcProto {
         } else {
           result.bucketStatistics_ = bucketStatisticsBuilder_.build();
         }
+        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+          to_bitField0_ |= 0x00000020;
+        }
+        if (decimalStatisticsBuilder_ == null) {
+          result.decimalStatistics_ = decimalStatistics_;
+        } else {
+          result.decimalStatistics_ = decimalStatisticsBuilder_.build();
+        }
         result.bitField0_ = to_bitField0_;
         onBuilt();
         return result;
@@ -2292,6 +2896,9 @@ public final class OrcProto {
         if (other.hasBucketStatistics()) {
           mergeBucketStatistics(other.getBucketStatistics());
         }
+        if (other.hasDecimalStatistics()) {
+          mergeDecimalStatistics(other.getDecimalStatistics());
+        }
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
@@ -2364,6 +2971,15 @@ public final class OrcProto {
               setBucketStatistics(subBuilder.buildPartial());
               break;
             }
+            case 50: {
+              org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.Builder subBuilder = org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.newBuilder();
+              if (hasDecimalStatistics()) {
+                subBuilder.mergeFrom(getDecimalStatistics());
+              }
+              input.readMessage(subBuilder, extensionRegistry);
+              setDecimalStatistics(subBuilder.buildPartial());
+              break;
+            }
           }
         }
       }
@@ -2751,6 +3367,96 @@ public final class OrcProto {
         return bucketStatisticsBuilder_;
       }
       
+      // optional .org.apache.hadoop.hive.ql.io.orc.DecimalStatistics decimalStatistics = 6;
+      private org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics decimalStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.getDefaultInstance();
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics, org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.Builder, org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatisticsOrBuilder> decimalStatisticsBuilder_;
+      public boolean hasDecimalStatistics() {
+        return ((bitField0_ & 0x00000020) == 0x00000020);
+      }
+      public org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics getDecimalStatistics() {
+        if (decimalStatisticsBuilder_ == null) {
+          return decimalStatistics_;
+        } else {
+          return decimalStatisticsBuilder_.getMessage();
+        }
+      }
+      public Builder setDecimalStatistics(org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics value) {
+        if (decimalStatisticsBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          decimalStatistics_ = value;
+          onChanged();
+        } else {
+          decimalStatisticsBuilder_.setMessage(value);
+        }
+        bitField0_ |= 0x00000020;
+        return this;
+      }
+      public Builder setDecimalStatistics(
+          org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.Builder builderForValue) {
+        if (decimalStatisticsBuilder_ == null) {
+          decimalStatistics_ = builderForValue.build();
+          onChanged();
+        } else {
+          decimalStatisticsBuilder_.setMessage(builderForValue.build());
+        }
+        bitField0_ |= 0x00000020;
+        return this;
+      }
+      public Builder mergeDecimalStatistics(org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics value) {
+        if (decimalStatisticsBuilder_ == null) {
+          if (((bitField0_ & 0x00000020) == 0x00000020) &&
+              decimalStatistics_ != org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.getDefaultInstance()) {
+            decimalStatistics_ =
+              org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.newBuilder(decimalStatistics_).mergeFrom(value).buildPartial();
+          } else {
+            decimalStatistics_ = value;
+          }
+          onChanged();
+        } else {
+          decimalStatisticsBuilder_.mergeFrom(value);
+        }
+        bitField0_ |= 0x00000020;
+        return this;
+      }
+      public Builder clearDecimalStatistics() {
+        if (decimalStatisticsBuilder_ == null) {
+          decimalStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.getDefaultInstance();
+          onChanged();
+        } else {
+          decimalStatisticsBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000020);
+        return this;
+      }
+      public org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.Builder getDecimalStatisticsBuilder() {
+        bitField0_ |= 0x00000020;
+        onChanged();
+        return getDecimalStatisticsFieldBuilder().getBuilder();
+      }
+      public org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatisticsOrBuilder getDecimalStatisticsOrBuilder() {
+        if (decimalStatisticsBuilder_ != null) {
+          return decimalStatisticsBuilder_.getMessageOrBuilder();
+        } else {
+          return decimalStatistics_;
+        }
+      }
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics, org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.Builder, org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatisticsOrBuilder> 
+          getDecimalStatisticsFieldBuilder() {
+        if (decimalStatisticsBuilder_ == null) {
+          decimalStatisticsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+              org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics, org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.Builder, org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatisticsOrBuilder>(
+                  decimalStatistics_,
+                  getParentForChildren(),
+                  isClean());
+          decimalStatistics_ = null;
+        }
+        return decimalStatisticsBuilder_;
+      }
+      
       // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.ql.io.orc.ColumnStatistics)
     }
     
@@ -3905,7 +4611,7 @@ public final class OrcProto {
       LENGTH(2, 2),
       DICTIONARY_DATA(3, 3),
       DICTIONARY_COUNT(4, 4),
-      NANO_DATA(5, 5),
+      SECONDARY(5, 5),
       ROW_INDEX(6, 6),
       ;
       
@@ -3914,7 +4620,7 @@ public final class OrcProto {
       public static final int LENGTH_VALUE = 2;
       public static final int DICTIONARY_DATA_VALUE = 3;
       public static final int DICTIONARY_COUNT_VALUE = 4;
-      public static final int NANO_DATA_VALUE = 5;
+      public static final int SECONDARY_VALUE = 5;
       public static final int ROW_INDEX_VALUE = 6;
       
       
@@ -3927,7 +4633,7 @@ public final class OrcProto {
           case 2: return LENGTH;
           case 3: return DICTIONARY_DATA;
           case 4: return DICTIONARY_COUNT;
-          case 5: return NANO_DATA;
+          case 5: return SECONDARY;
           case 6: return ROW_INDEX;
           default: return null;
         }
@@ -3959,7 +4665,7 @@ public final class OrcProto {
       }
       
       private static final Kind[] VALUES = {
-        PRESENT, DATA, LENGTH, DICTIONARY_DATA, DICTIONARY_COUNT, NANO_DATA, ROW_INDEX, 
+        PRESENT, DATA, LENGTH, DICTIONARY_DATA, DICTIONARY_COUNT, SECONDARY, ROW_INDEX, 
       };
       
       public static Kind valueOf(
@@ -5798,6 +6504,7 @@ public final class OrcProto {
       MAP(11, 11),
       STRUCT(12, 12),
       UNION(13, 13),
+      DECIMAL(14, 14),
       ;
       
       public static final int BOOLEAN_VALUE = 0;
@@ -5814,6 +6521,7 @@ public final class OrcProto {
       public static final int MAP_VALUE = 11;
       public static final int STRUCT_VALUE = 12;
       public static final int UNION_VALUE = 13;
+      public static final int DECIMAL_VALUE = 14;
       
       
       public final int getNumber() { return value; }
@@ -5834,6 +6542,7 @@ public final class OrcProto {
           case 11: return MAP;
           case 12: return STRUCT;
           case 13: return UNION;
+          case 14: return DECIMAL;
           default: return null;
         }
       }
@@ -5864,7 +6573,7 @@ public final class OrcProto {
       }
       
       private static final Kind[] VALUES = {
-        BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING, BINARY, TIMESTAMP, LIST, MAP, STRUCT, UNION, 
+        BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING, BINARY, TIMESTAMP, LIST, MAP, STRUCT, UNION, DECIMAL, 
       };
       
       public static Kind valueOf(
@@ -9550,6 +10259,11 @@ public final class OrcProto {
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
       internal_static_org_apache_hadoop_hive_ql_io_orc_BucketStatistics_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_org_apache_hadoop_hive_ql_io_orc_DecimalStatistics_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_org_apache_hadoop_hive_ql_io_orc_DecimalStatistics_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
     internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
@@ -9620,58 +10334,62 @@ public final class OrcProto {
       "maximum\030\002 \001(\001\022\013\n\003sum\030\003 \001(\001\"4\n\020StringStat" +
       "istics\022\017\n\007minimum\030\001 \001(\t\022\017\n\007maximum\030\002 \001(\t" +
       "\"%\n\020BucketStatistics\022\021\n\005count\030\001 \003(\004B\002\020\001\"" +
-      "\340\002\n\020ColumnStatistics\022\026\n\016numberOfValues\030\001" +
-      " \001(\004\022J\n\rintStatistics\030\002 \001(\01323.org.apache" +
-      ".hadoop.hive.ql.io.orc.IntegerStatistics",
-      "\022L\n\020doubleStatistics\030\003 \001(\01322.org.apache." +
-      "hadoop.hive.ql.io.orc.DoubleStatistics\022L" +
-      "\n\020stringStatistics\030\004 \001(\01322.org.apache.ha" +
-      "doop.hive.ql.io.orc.StringStatistics\022L\n\020" +
-      "bucketStatistics\030\005 \001(\01322.org.apache.hado" +
-      "op.hive.ql.io.orc.BucketStatistics\"n\n\rRo" +
-      "wIndexEntry\022\025\n\tpositions\030\001 \003(\004B\002\020\001\022F\n\nst" +
-      "atistics\030\002 \001(\01322.org.apache.hadoop.hive." +
-      "ql.io.orc.ColumnStatistics\"J\n\010RowIndex\022>" +
-      "\n\005entry\030\001 \003(\0132/.org.apache.hadoop.hive.q",
-      "l.io.orc.RowIndexEntry\"\331\001\n\006Stream\022;\n\004kin" +
-      "d\030\001 \002(\0162-.org.apache.hadoop.hive.ql.io.o" +
-      "rc.Stream.Kind\022\016\n\006column\030\002 \001(\r\022\016\n\006length" +
-      "\030\003 \001(\004\"r\n\004Kind\022\013\n\007PRESENT\020\000\022\010\n\004DATA\020\001\022\n\n" +
-      "\006LENGTH\020\002\022\023\n\017DICTIONARY_DATA\020\003\022\024\n\020DICTIO" +
-      "NARY_COUNT\020\004\022\r\n\tNANO_DATA\020\005\022\r\n\tROW_INDEX" +
-      "\020\006\"\221\001\n\016ColumnEncoding\022C\n\004kind\030\001 \002(\01625.or" +
-      "g.apache.hadoop.hive.ql.io.orc.ColumnEnc" +
-      "oding.Kind\022\026\n\016dictionarySize\030\002 \001(\r\"\"\n\004Ki" +
-      "nd\022\n\n\006DIRECT\020\000\022\016\n\nDICTIONARY\020\001\"\214\001\n\014Strip",
-      "eFooter\0229\n\007streams\030\001 \003(\0132(.org.apache.ha" +
-      "doop.hive.ql.io.orc.Stream\022A\n\007columns\030\002 " +
-      "\003(\01320.org.apache.hadoop.hive.ql.io.orc.C" +
-      "olumnEncoding\"\221\002\n\004Type\0229\n\004kind\030\001 \002(\0162+.o" +
-      "rg.apache.hadoop.hive.ql.io.orc.Type.Kin" +
-      "d\022\024\n\010subtypes\030\002 \003(\rB\002\020\001\022\022\n\nfieldNames\030\003 " +
-      "\003(\t\"\243\001\n\004Kind\022\013\n\007BOOLEAN\020\000\022\010\n\004BYTE\020\001\022\t\n\005S" +
-      "HORT\020\002\022\007\n\003INT\020\003\022\010\n\004LONG\020\004\022\t\n\005FLOAT\020\005\022\n\n\006" +
-      "DOUBLE\020\006\022\n\n\006STRING\020\007\022\n\n\006BINARY\020\010\022\r\n\tTIME" +
-      "STAMP\020\t\022\010\n\004LIST\020\n\022\007\n\003MAP\020\013\022\n\n\006STRUCT\020\014\022\t",
-      "\n\005UNION\020\r\"x\n\021StripeInformation\022\016\n\006offset" +
-      "\030\001 \001(\004\022\023\n\013indexLength\030\002 \001(\004\022\022\n\ndataLengt" +
-      "h\030\003 \001(\004\022\024\n\014footerLength\030\004 \001(\004\022\024\n\014numberO" +
-      "fRows\030\005 \001(\004\"/\n\020UserMetadataItem\022\014\n\004name\030" +
-      "\001 \002(\t\022\r\n\005value\030\002 \002(\014\"\356\002\n\006Footer\022\024\n\014heade" +
-      "rLength\030\001 \001(\004\022\025\n\rcontentLength\030\002 \001(\004\022D\n\007" +
-      "stripes\030\003 \003(\01323.org.apache.hadoop.hive.q" +
-      "l.io.orc.StripeInformation\0225\n\005types\030\004 \003(" +
-      "\0132&.org.apache.hadoop.hive.ql.io.orc.Typ" +
-      "e\022D\n\010metadata\030\005 \003(\01322.org.apache.hadoop.",
-      "hive.ql.io.orc.UserMetadataItem\022\024\n\014numbe" +
-      "rOfRows\030\006 \001(\004\022F\n\nstatistics\030\007 \003(\01322.org." +
-      "apache.hadoop.hive.ql.io.orc.ColumnStati" +
-      "stics\022\026\n\016rowIndexStride\030\010 \001(\r\"\210\001\n\nPostSc" +
-      "ript\022\024\n\014footerLength\030\001 \001(\004\022F\n\013compressio" +
-      "n\030\002 \001(\01621.org.apache.hadoop.hive.ql.io.o" +
-      "rc.CompressionKind\022\034\n\024compressionBlockSi" +
-      "ze\030\003 \001(\004*:\n\017CompressionKind\022\010\n\004NONE\020\000\022\010\n" +
-      "\004ZLIB\020\001\022\n\n\006SNAPPY\020\002\022\007\n\003LZO\020\003"
+      "B\n\021DecimalStatistics\022\017\n\007minimum\030\001 \001(\t\022\017\n" +
+      "\007maximum\030\002 \001(\t\022\013\n\003sum\030\003 \001(\t\"\260\003\n\020ColumnSt" +
+      "atistics\022\026\n\016numberOfValues\030\001 \001(\004\022J\n\rintS",
+      "tatistics\030\002 \001(\01323.org.apache.hadoop.hive" +
+      ".ql.io.orc.IntegerStatistics\022L\n\020doubleSt" +
+      "atistics\030\003 \001(\01322.org.apache.hadoop.hive." +
+      "ql.io.orc.DoubleStatistics\022L\n\020stringStat" +
+      "istics\030\004 \001(\01322.org.apache.hadoop.hive.ql" +
+      ".io.orc.StringStatistics\022L\n\020bucketStatis" +
+      "tics\030\005 \001(\01322.org.apache.hadoop.hive.ql.i" +
+      "o.orc.BucketStatistics\022N\n\021decimalStatist" +
+      "ics\030\006 \001(\01323.org.apache.hadoop.hive.ql.io" +
+      ".orc.DecimalStatistics\"n\n\rRowIndexEntry\022",
+      "\025\n\tpositions\030\001 \003(\004B\002\020\001\022F\n\nstatistics\030\002 \001" +
+      "(\01322.org.apache.hadoop.hive.ql.io.orc.Co" +
+      "lumnStatistics\"J\n\010RowIndex\022>\n\005entry\030\001 \003(" +
+      "\0132/.org.apache.hadoop.hive.ql.io.orc.Row" +
+      "IndexEntry\"\331\001\n\006Stream\022;\n\004kind\030\001 \002(\0162-.or" +
+      "g.apache.hadoop.hive.ql.io.orc.Stream.Ki" +
+      "nd\022\016\n\006column\030\002 \001(\r\022\016\n\006length\030\003 \001(\004\"r\n\004Ki" +
+      "nd\022\013\n\007PRESENT\020\000\022\010\n\004DATA\020\001\022\n\n\006LENGTH\020\002\022\023\n" +
+      "\017DICTIONARY_DATA\020\003\022\024\n\020DICTIONARY_COUNT\020\004" +
+      "\022\r\n\tSECONDARY\020\005\022\r\n\tROW_INDEX\020\006\"\221\001\n\016Colum",
+      "nEncoding\022C\n\004kind\030\001 \002(\01625.org.apache.had" +
+      "oop.hive.ql.io.orc.ColumnEncoding.Kind\022\026" +
+      "\n\016dictionarySize\030\002 \001(\r\"\"\n\004Kind\022\n\n\006DIRECT" +
+      "\020\000\022\016\n\nDICTIONARY\020\001\"\214\001\n\014StripeFooter\0229\n\007s" +
+      "treams\030\001 \003(\0132(.org.apache.hadoop.hive.ql" +
+      ".io.orc.Stream\022A\n\007columns\030\002 \003(\01320.org.ap" +
+      "ache.hadoop.hive.ql.io.orc.ColumnEncodin" +
+      "g\"\236\002\n\004Type\0229\n\004kind\030\001 \002(\0162+.org.apache.ha" +
+      "doop.hive.ql.io.orc.Type.Kind\022\024\n\010subtype" +
+      "s\030\002 \003(\rB\002\020\001\022\022\n\nfieldNames\030\003 \003(\t\"\260\001\n\004Kind",
+      "\022\013\n\007BOOLEAN\020\000\022\010\n\004BYTE\020\001\022\t\n\005SHORT\020\002\022\007\n\003IN" +
+      "T\020\003\022\010\n\004LONG\020\004\022\t\n\005FLOAT\020\005\022\n\n\006DOUBLE\020\006\022\n\n\006" +
+      "STRING\020\007\022\n\n\006BINARY\020\010\022\r\n\tTIMESTAMP\020\t\022\010\n\004L" +
+      "IST\020\n\022\007\n\003MAP\020\013\022\n\n\006STRUCT\020\014\022\t\n\005UNION\020\r\022\013\n" +
+      "\007DECIMAL\020\016\"x\n\021StripeInformation\022\016\n\006offse" +
+      "t\030\001 \001(\004\022\023\n\013indexLength\030\002 \001(\004\022\022\n\ndataLeng" +
+      "th\030\003 \001(\004\022\024\n\014footerLength\030\004 \001(\004\022\024\n\014number" +
+      "OfRows\030\005 \001(\004\"/\n\020UserMetadataItem\022\014\n\004name" +
+      "\030\001 \002(\t\022\r\n\005value\030\002 \002(\014\"\356\002\n\006Footer\022\024\n\014head" +
+      "erLength\030\001 \001(\004\022\025\n\rcontentLength\030\002 \001(\004\022D\n",
+      "\007stripes\030\003 \003(\01323.org.apache.hadoop.hive." +
+      "ql.io.orc.StripeInformation\0225\n\005types\030\004 \003" +
+      "(\0132&.org.apache.hadoop.hive.ql.io.orc.Ty" +
+      "pe\022D\n\010metadata\030\005 \003(\01322.org.apache.hadoop" +
+      ".hive.ql.io.orc.UserMetadataItem\022\024\n\014numb" +
+      "erOfRows\030\006 \001(\004\022F\n\nstatistics\030\007 \003(\01322.org" +
+      ".apache.hadoop.hive.ql.io.orc.ColumnStat" +
+      "istics\022\026\n\016rowIndexStride\030\010 \001(\r\"\210\001\n\nPostS" +
+      "cript\022\024\n\014footerLength\030\001 \001(\004\022F\n\013compressi" +
+      "on\030\002 \001(\01621.org.apache.hadoop.hive.ql.io.",
+      "orc.CompressionKind\022\034\n\024compressionBlockS" +
+      "ize\030\003 \001(\004*:\n\017CompressionKind\022\010\n\004NONE\020\000\022\010" +
+      "\n\004ZLIB\020\001\022\n\n\006SNAPPY\020\002\022\007\n\003LZO\020\003"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -9710,16 +10428,24 @@ public final class OrcProto {
               new java.lang.String[] { "Count", },
               org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics.Builder.class);
-          internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_descriptor =
+          internal_static_org_apache_hadoop_hive_ql_io_orc_DecimalStatistics_descriptor =
             getDescriptor().getMessageTypes().get(4);
+          internal_static_org_apache_hadoop_hive_ql_io_orc_DecimalStatistics_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_org_apache_hadoop_hive_ql_io_orc_DecimalStatistics_descriptor,
+              new java.lang.String[] { "Minimum", "Maximum", "Sum", },
+              org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.class,
+              org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.Builder.class);
+          internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_descriptor =
+            getDescriptor().getMessageTypes().get(5);
           internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_descriptor,
-              new java.lang.String[] { "NumberOfValues", "IntStatistics", "DoubleStatistics", "StringStatistics", "BucketStatistics", },
+              new java.lang.String[] { "NumberOfValues", "IntStatistics", "DoubleStatistics", "StringStatistics", "BucketStatistics", "DecimalStatistics", },
               org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_RowIndexEntry_descriptor =
-            getDescriptor().getMessageTypes().get(5);
+            getDescriptor().getMessageTypes().get(6);
           internal_static_org_apache_hadoop_hive_ql_io_orc_RowIndexEntry_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_RowIndexEntry_descriptor,
@@ -9727,7 +10453,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.RowIndexEntry.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.RowIndexEntry.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_RowIndex_descriptor =
-            getDescriptor().getMessageTypes().get(6);
+            getDescriptor().getMessageTypes().get(7);
           internal_static_org_apache_hadoop_hive_ql_io_orc_RowIndex_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_RowIndex_descriptor,
@@ -9735,7 +10461,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.RowIndex.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.RowIndex.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_Stream_descriptor =
-            getDescriptor().getMessageTypes().get(7);
+            getDescriptor().getMessageTypes().get(8);
           internal_static_org_apache_hadoop_hive_ql_io_orc_Stream_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_Stream_descriptor,
@@ -9743,7 +10469,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.Stream.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.Stream.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnEncoding_descriptor =
-            getDescriptor().getMessageTypes().get(8);
+            getDescriptor().getMessageTypes().get(9);
           internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnEncoding_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnEncoding_descriptor,
@@ -9751,7 +10477,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnEncoding.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnEncoding.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_StripeFooter_descriptor =
-            getDescriptor().getMessageTypes().get(9);
+            getDescriptor().getMessageTypes().get(10);
           internal_static_org_apache_hadoop_hive_ql_io_orc_StripeFooter_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_StripeFooter_descriptor,
@@ -9759,7 +10485,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeFooter.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeFooter.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_Type_descriptor =
-            getDescriptor().getMessageTypes().get(10);
+            getDescriptor().getMessageTypes().get(11);
           internal_static_org_apache_hadoop_hive_ql_io_orc_Type_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_Type_descriptor,
@@ -9767,7 +10493,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.Type.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.Type.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_StripeInformation_descriptor =
-            getDescriptor().getMessageTypes().get(11);
+            getDescriptor().getMessageTypes().get(12);
           internal_static_org_apache_hadoop_hive_ql_io_orc_StripeInformation_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_StripeInformation_descriptor,
@@ -9775,7 +10501,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeInformation.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeInformation.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_UserMetadataItem_descriptor =
-            getDescriptor().getMessageTypes().get(12);
+            getDescriptor().getMessageTypes().get(13);
           internal_static_org_apache_hadoop_hive_ql_io_orc_UserMetadataItem_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_UserMetadataItem_descriptor,
@@ -9783,7 +10509,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.UserMetadataItem.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.UserMetadataItem.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_Footer_descriptor =
-            getDescriptor().getMessageTypes().get(13);
+            getDescriptor().getMessageTypes().get(14);
           internal_static_org_apache_hadoop_hive_ql_io_orc_Footer_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_Footer_descriptor,
@@ -9791,7 +10517,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.Footer.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.Footer.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_PostScript_descriptor =
-            getDescriptor().getMessageTypes().get(14);
+            getDescriptor().getMessageTypes().get(15);
           internal_static_org_apache_hadoop_hive_ql_io_orc_PostScript_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_PostScript_descriptor,

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java Wed Apr 17 07:29:38 2013
@@ -385,7 +385,7 @@ public abstract class CommonJoinOperator
   // all evaluation should be processed here for valid aliasFilterTags
   //
   // for MapJoin, filter tag is pre-calculated in MapredLocalTask and stored with value.
-  // when reading the hashtable, MapJoinObjectValue calcuates alias filter and provide it to join
+  // when reading the hashtable, MapJoinObjectValue calculates alias filter and provide it to join
   protected ArrayList<Object> getFilteredValue(byte alias, Object row) throws HiveException {
     boolean hasFilter = hasFilter(alias);
     ArrayList<Object> nr = JoinUtil.computeValues(row, joinValues[alias],

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Wed Apr 17 07:29:38 2013
@@ -109,10 +109,10 @@ import org.apache.hadoop.hive.ql.parse.B
 import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
 import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.AlterIndexDesc;
+import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc;
 import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
 import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
 import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc;
-import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc;
 import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.CreateIndexDesc;
 import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
@@ -2323,7 +2323,8 @@ public class DDLTask extends Task<DDLWor
 
       List<FieldSchema> cols = table.getCols();
       cols.addAll(table.getPartCols());
-      outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(cols));
+      outStream.writeBytes(
+          MetaDataFormatUtils.getAllColumnsInformation(cols, false));
       ((FSDataOutputStream) outStream).close();
       outStream = null;
     } catch (IOException e) {

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java Wed Apr 17 07:29:38 2013
@@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.metastore.api.SkewedValueList;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveKey;
@@ -708,7 +709,7 @@ public class FileSinkOperator extends Te
     List<String> skewedCols = lbCtx.getSkewedColNames();
     List<List<String>> allSkewedVals = lbCtx.getSkewedColValues();
     List<String> skewedValsCandidate = null;
-    Map<List<String>, String> locationMap = lbCtx.getLbLocationMap();
+    Map<SkewedValueList, String> locationMap = lbCtx.getLbLocationMap();
 
     /* Convert input row to standard objects. */
     ObjectInspectorUtils.copyToStandardObject(standObjs, row,
@@ -726,14 +727,14 @@ public class FileSinkOperator extends Te
     if (allSkewedVals.contains(skewedValsCandidate)) {
       /* matches skewed values. */
       lbDirName = FileUtils.makeListBucketingDirName(skewedCols, skewedValsCandidate);
-      locationMap.put(skewedValsCandidate, lbDirName);
+      locationMap.put(new SkewedValueList(skewedValsCandidate), lbDirName);
     } else {
       /* create default directory. */
       lbDirName = FileUtils.makeDefaultListBucketingDirName(skewedCols,
           lbCtx.getDefaultDirName());
       List<String> defaultKey = Arrays.asList(lbCtx.getDefaultKey());
       if (!locationMap.containsKey(defaultKey)) {
-        locationMap.put(defaultKey, lbDirName);
+        locationMap.put(new SkewedValueList(defaultKey), lbDirName);
       }
     }
     return lbDirName;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Wed Apr 17 07:29:38 2013
@@ -126,107 +126,9 @@ import org.apache.hadoop.hive.ql.udf.UDF
 import org.apache.hadoop.hive.ql.udf.UDFUpper;
 import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear;
 import org.apache.hadoop.hive.ql.udf.UDFYear;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFBridge;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCollectSet;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFComputeStats;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFContextNGrams;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCorrelation;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCount;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCovariance;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCovarianceSample;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCumeDist;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFDenseRank;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEWAHBitmap;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFFirstValue;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFHistogramNumeric;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFLastValue;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMax;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMin;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFNTile;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFParameterInfo;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentRank;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileApprox;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFRank;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFLead;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFLag;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver2;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFRowNumber;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFStd;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFStdSample;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFVariance;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFVarianceSample;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFnGrams;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFArray;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFArrayContains;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFAssertTrue;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCase;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCoalesce;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcatWS;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapAnd;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapEmpty;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapOr;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFElt;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFField;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFormatNumber;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFromUtcTimestamp;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIf;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFInFile;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIndex;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFInstr;
+import org.apache.hadoop.hive.ql.udf.generic.*;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLeadLag.GenericUDFLag;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLeadLag.GenericUDFLead;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLocate;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMap;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMapKeys;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMapValues;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFNamedStruct;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFNvl;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualNS;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFReflect;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFReflect2;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSentences;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSize;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSortArray;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSplit;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFStringToMap;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFStruct;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToBinary;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDecimal;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUtcTimestamp;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTranslate;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUnion;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUnixTimeStamp;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFInline;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFJSONTuple;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFParseUrlTuple;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFStack;
-import org.apache.hadoop.hive.ql.udf.generic.SimpleGenericUDAFParameterInfo;
 import org.apache.hadoop.hive.ql.udf.ptf.NPath.NPathResolver;
 import org.apache.hadoop.hive.ql.udf.ptf.Noop.NoopResolver;
 import org.apache.hadoop.hive.ql.udf.ptf.NoopWithMap.NoopWithMapResolver;
@@ -273,6 +175,8 @@ public final class FunctionRegistry {
 
   public static final String LEAD_FUNC_NAME = "lead";
   public static final String LAG_FUNC_NAME = "lag";
+  public static final String LAST_VALUE_FUNC_NAME = "last_value";
+
 
   public static final String WINDOWING_TABLE_FUNCTION = "windowingtablefunction";
   public static final String NOOP_TABLE_FUNCTION = "noop";

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java Wed Apr 17 07:29:38 2013
@@ -230,11 +230,8 @@ public class MapJoinOperator extends Abs
       // compute keys and values as StandardObjects
       AbstractMapJoinKey key = JoinUtil.computeMapJoinKeys(row, joinKeys[alias],
           joinKeysObjectInspectors[alias]);
-      ArrayList<Object> value = getFilteredValue(alias, row);
-
-      // Add the value to the ArrayList
-      storage[alias].add(value);
 
+      boolean joinNeeded = false;
       for (byte pos = 0; pos < order.length; pos++) {
         if (pos != alias) {
 
@@ -243,12 +240,14 @@ public class MapJoinOperator extends Abs
 
           // there is no join-value or join-key has all null elements
           if (o == null || key.hasAnyNulls(nullsafes)) {
-            if (noOuterJoin) {
-              storage[pos] = emptyList;
-            } else {
+            if (!noOuterJoin) {
+              joinNeeded = true;
               storage[pos] = dummyObjVectors[pos];
+            } else {
+              storage[pos] = emptyList;
             }
           } else {
+            joinNeeded = true;
             rowContainer.reset(o.getObj());
             storage[pos] = rowContainer;
             aliasFilterTags[pos] = o.getAliasFilter();
@@ -256,8 +255,15 @@ public class MapJoinOperator extends Abs
         }
       }
 
-      // generate the output records
-      checkAndGenObject();
+      if (joinNeeded) {
+        ArrayList<Object> value = getFilteredValue(alias, row);
+
+        // Add the value to the ArrayList
+        storage[alias].add(value);
+
+        // generate the output records
+        checkAndGenObject();
+      }
 
       // done with the row
       storage[tag].clear();

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java Wed Apr 17 07:29:38 2013
@@ -138,7 +138,6 @@ public class MapOperator extends Operato
     public void setOp(Operator<? extends OperatorDesc> op) {
       this.op = op;
     }
-
   }
 
   private static class MapOpCtx {
@@ -468,8 +467,8 @@ public class MapOperator extends Operato
 
   public void setChildren(Configuration hconf) throws HiveException {
 
-    Path fpath = new Path((new Path(HiveConf.getVar(hconf,
-        HiveConf.ConfVars.HADOOPMAPFILENAME))).toUri().getPath());
+    Path fpath = new Path(HiveConf.getVar(hconf,
+        HiveConf.ConfVars.HADOOPMAPFILENAME));
 
     ArrayList<Operator<? extends OperatorDesc>> children =
       new ArrayList<Operator<? extends OperatorDesc>>();
@@ -481,7 +480,7 @@ public class MapOperator extends Operato
     try {
       for (String onefile : conf.getPathToAliases().keySet()) {
         MapOpCtx opCtx = initObjectInspector(conf, hconf, onefile, convertedOI);
-        Path onepath = new Path(new Path(onefile).toUri().getPath());
+        Path onepath = new Path(onefile);
         List<String> aliases = conf.getPathToAliases().get(onefile);
 
         for (String onealias : aliases) {
@@ -513,7 +512,7 @@ public class MapOperator extends Operato
         // didn't find match for input file path in configuration!
         // serious problem ..
         LOG.error("Configuration does not have any alias for path: "
-            + fpath.toUri().getPath());
+            + fpath.toUri());
         throw new HiveException("Configuration and input path are inconsistent");
       }
 

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java Wed Apr 17 07:29:38 2013
@@ -58,6 +58,8 @@ import org.apache.hadoop.hive.ql.session
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.shims.HadoopShims;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.util.ReflectionUtils;
 
@@ -195,6 +197,14 @@ public class MapredLocalTask extends Tas
 
       // }
 
+      //Set HADOOP_USER_NAME env variable for child process, so that
+      // it also runs with hadoop permissions for the user the job is running as
+      // This will be used by hadoop only in unsecure(/non kerberos) mode
+      HadoopShims shim = ShimLoader.getHadoopShims();
+      String endUserName = shim.getShortUserName(shim.getUGIForConf(job));
+      console.printInfo("setting HADOOP_USER_NAME\t" + endUserName);
+      variables.put("HADOOP_USER_NAME", endUserName);
+
       if (variables.containsKey(HADOOP_OPTS_KEY)) {
         variables.put(HADOOP_OPTS_KEY, variables.get(HADOOP_OPTS_KEY) + hadoopOpts);
       } else {

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java Wed Apr 17 07:29:38 2013
@@ -525,7 +525,7 @@ public abstract class Operator<T extends
     if (fatalError) {
       return;
     }
-    OperatorHookContext opHookContext = new OperatorHookContext(this, row);
+    OperatorHookContext opHookContext = new OperatorHookContext(this, row, tag);
     preProcessCounter();
     enterOperatorHooks(opHookContext);
     processOp(row, tag);
@@ -612,7 +612,7 @@ public abstract class Operator<T extends
 
     LOG.info(id + " forwarded " + cntr + " rows");
 
-    closeOperatorHooks(new OperatorHookContext(this, null));
+    closeOperatorHooks(new OperatorHookContext(this));
     // call the operator specific close routine
     closeOp(abort);
 
@@ -1549,6 +1549,7 @@ public abstract class Operator<T extends
       start++;
     }
     builder.append(name);
+    start += name.length();
     if (added) {
       if (op.getNumChild() > 0) {
         List<Operator<?>> children = op.getChildOperators();
@@ -1559,7 +1560,7 @@ public abstract class Operator<T extends
               builder.append(' ');
             }
           }
-          toString(builder, visited, children.get(i), start += name.length());
+          toString(builder, visited, children.get(i), start);
         }
       }
       return true;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorHookContext.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorHookContext.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorHookContext.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorHookContext.java Wed Apr 17 07:29:38 2013
@@ -18,26 +18,44 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
+import java.util.List;
+
 public class OperatorHookContext {
-  private String operatorName;
-  private String operatorId;
-  private Object currentRow;
+  private final String operatorName;
+  private final String operatorId;
+  private final Object currentRow;
+  private final int parentTag;
   private Operator operator;
-  public OperatorHookContext(Operator op, Object row) {
-    this(op.getName(), op.getIdentifier(), row);
+
+  public OperatorHookContext(Operator op) {
+    this(op, null, -1);
+  }
+
+  public OperatorHookContext(Operator op, Object row, int tag) {
+    this(op.getName(), op.getIdentifier(), row, tag);
     this.operator = op;
   }
 
-  private OperatorHookContext(String opName, String opId, Object row) {
+  private OperatorHookContext(String opName, String opId, Object row, int tag) {
     operatorName = opName;
     operatorId = opId;
     currentRow = row;
+    parentTag = tag;
   }
 
   public Operator getOperator() {
     return operator;
   }
 
+  public Operator getParentOperator() {
+    List<Operator> parents = this.operator.getParentOperators();
+    if (parents == null || parents.isEmpty()) {
+      return null;
+    }
+    return (Operator)(this.operator.getParentOperators().get(this.parentTag));
+
+  }
+
   public String getOperatorName() {
     return operatorName;
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java Wed Apr 17 07:29:38 2013
@@ -98,10 +98,11 @@ public class TableScanOperator extends O
     // in the execution context. This is needed for the following scenario:
     // insert overwrite table T1 select * from T2;
     // where T1 and T2 are sorted/bucketed by the same keys into the same number of buckets
-    // Although one mapper per file is used (bucketizedinputhiveinput), it is possible that
+    // Although one mapper per file is used (BucketizedInputHiveInput), it is possible that
     // any mapper can pick up any file (depending on the size of the files). The bucket number
     // corresponding to the input file is stored to name the output bucket file appropriately.
-    Map<String, Integer> bucketNameMapping = conf != null ? conf.getBucketFileNameMapping() : null;
+    Map<String, Integer> bucketNameMapping =
+        (conf != null) ? conf.getBucketFileNameMapping() : null;
     if ((bucketNameMapping != null) && (!bucketNameMapping.isEmpty())) {
       String currentInputFile = getExecContext().getCurrentInputFile();
       getExecContext().setFileId(Integer.toString(bucketNameMapping.get(

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Wed Apr 17 07:29:38 2013
@@ -690,11 +690,6 @@ public final class Utilities {
     return new PartitionDesc(part, tblDesc);
   }
 
-  public static void addMapWork(MapredWork mr, Table tbl, String alias, Operator<?> work) {
-    mr.addMapWork(tbl.getDataLocation().getPath(), alias, work, new PartitionDesc(
-        getTableDesc(tbl), (LinkedHashMap<String, String>) null));
-  }
-
   private static String getOpTreeSkel_helper(Operator<?> op, String indent) {
     if (op == null) {
       return "";

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java Wed Apr 17 07:29:38 2013
@@ -36,7 +36,7 @@ public class ReadEntity extends Entity i
   // The inputs will contain V and T (parent: V)
 
   // For views, the entities can be nested - by default, entities are at the top level
-  private Set<ReadEntity> parents = null;
+  private final Set<ReadEntity> parents = new HashSet<ReadEntity>();
 
   /**
    * For serialization only.
@@ -57,7 +57,6 @@ public class ReadEntity extends Entity i
 
   private void initParent(ReadEntity parent) {
     if (parent != null) {
-      this.parents = new HashSet<ReadEntity>();
       this.parents.add(parent);
     }
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java Wed Apr 17 07:29:38 2013
@@ -19,6 +19,10 @@
 package org.apache.hadoop.hive.ql.io;
 
 import java.io.IOException;
+import java.util.Map;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.WeakHashMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -31,6 +35,8 @@ import org.apache.hadoop.mapred.FileSpli
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.util.ReflectionUtils;
 
+import org.apache.hadoop.hive.conf.HiveConf;
+
 /**
  * RCFileRecordReader.
  *
@@ -45,20 +51,72 @@ public class RCFileRecordReader<K extend
   private final long end;
   private boolean more = true;
   protected Configuration conf;
+  private final FileSplit split;
+  private final boolean useCache;
+
+  private static RCFileSyncCache syncCache = new RCFileSyncCache();
+
+  private static final class RCFileSyncEntry {
+    long end;
+    long endSync;
+  }
+
+  private static final class RCFileSyncCache {
+
+    private final Map<String, RCFileSyncEntry> cache;
+
+    public RCFileSyncCache() {
+	cache = Collections.synchronizedMap(new WeakHashMap<String, RCFileSyncEntry>());
+    }
+
+    public void put(FileSplit split, long endSync) {
+      Path path = split.getPath();
+      long end = split.getStart() + split.getLength();       
+      String key = path.toString()+"+"+String.format("%d",end);
+
+      RCFileSyncEntry entry = new RCFileSyncEntry();
+      entry.end = end;
+      entry.endSync = endSync;
+      if(entry.endSync >= entry.end) {
+        cache.put(key, entry);
+      }
+    }
+
+    public long get(FileSplit split) {
+      Path path = split.getPath();
+      long start = split.getStart();
+      String key = path.toString()+"+"+String.format("%d",start);
+      RCFileSyncEntry entry = cache.get(key);
+      if(entry != null) {
+        return entry.endSync;
+      }
+      return -1;
+    }
+  }
 
   public RCFileRecordReader(Configuration conf, FileSplit split)
       throws IOException {
+
     Path path = split.getPath();
     FileSystem fs = path.getFileSystem(conf);
     this.in = new RCFile.Reader(fs, path, conf);
     this.end = split.getStart() + split.getLength();
     this.conf = conf;
+    this.split = split;
+
+    useCache = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEUSERCFILESYNCCACHE);
 
     if (split.getStart() > in.getPosition()) {
-      in.sync(split.getStart()); // sync to start
+      long oldSync = useCache ? syncCache.get(split) : -1;
+      if(oldSync == -1) {
+        in.sync(split.getStart()); // sync to start
+      } else {
+        in.seek(oldSync);
+      }
     }
 
     this.start = in.getPosition();
+
     more = start < end;
   }
 
@@ -101,12 +159,13 @@ public class RCFileRecordReader<K extend
     }
 
     more = in.next(key);
-    if (!more) {
-      return false;
-    }
 
     long lastSeenSyncPos = in.lastSeenSyncPos();
+
     if (lastSeenSyncPos >= end) {
+      if(useCache) {
+        syncCache.put(split, lastSeenSyncPos);
+      }
       more = false;
       return more;
     }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ColumnStatisticsImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ColumnStatisticsImpl.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ColumnStatisticsImpl.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ColumnStatisticsImpl.java Wed Apr 17 07:29:38 2013
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.io.orc;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 
@@ -420,6 +421,128 @@ class ColumnStatisticsImpl implements Co
     }
   }
 
+  private static final class DecimalStatisticsImpl extends ColumnStatisticsImpl
+      implements DecimalColumnStatistics {
+    private HiveDecimal minimum = null;
+    private HiveDecimal maximum = null;
+    private HiveDecimal sum = HiveDecimal.ZERO;
+
+    DecimalStatisticsImpl() {
+    }
+
+    DecimalStatisticsImpl(OrcProto.ColumnStatistics stats) {
+      super(stats);
+      OrcProto.DecimalStatistics dec = stats.getDecimalStatistics();
+      if (dec.hasMaximum()) {
+        maximum = new HiveDecimal(dec.getMaximum());
+      }
+      if (dec.hasMinimum()) {
+        minimum = new HiveDecimal(dec.getMinimum());
+      }
+      if (dec.hasSum()) {
+        sum = new HiveDecimal(dec.getSum());
+      } else {
+        sum = null;
+      }
+    }
+
+    @Override
+    void reset() {
+      super.reset();
+      minimum = null;
+      maximum = null;
+      sum = HiveDecimal.ZERO;
+    }
+
+    @Override
+    void updateDecimal(HiveDecimal value) {
+      if (minimum == null) {
+        minimum = value;
+        maximum = value;
+      } else if (minimum.compareTo(value) > 0) {
+        minimum = value;
+      } else if (maximum.compareTo(value) < 0) {
+        maximum = value;
+      }
+      if (sum != null) {
+        try {
+          sum = sum.add(value);
+        } catch (NumberFormatException nfe) {
+          sum = null;
+        }
+      }
+    }
+
+    @Override
+    void merge(ColumnStatisticsImpl other) {
+      super.merge(other);
+      DecimalStatisticsImpl dec = (DecimalStatisticsImpl) other;
+      if (minimum == null) {
+        minimum = dec.minimum;
+        maximum = dec.maximum;
+        sum = dec.sum;
+      } else if (dec.minimum != null) {
+        if (minimum.compareTo(dec.minimum) > 0) {
+          minimum = dec.minimum;
+        } else if (maximum.compareTo(dec.maximum) < 0) {
+          maximum = dec.maximum;
+        }
+        if (sum == null || dec.sum == null) {
+          sum = null;
+        } else {
+          sum = sum.add(dec.sum);
+        }
+      }
+    }
+
+    @Override
+    OrcProto.ColumnStatistics.Builder serialize() {
+      OrcProto.ColumnStatistics.Builder result = super.serialize();
+      OrcProto.DecimalStatistics.Builder dec =
+          OrcProto.DecimalStatistics.newBuilder();
+      if (getNumberOfValues() != 0) {
+        dec.setMinimum(minimum.toString());
+        dec.setMaximum(maximum.toString());
+      }
+      if (sum != null) {
+        dec.setSum(sum.toString());
+      }
+      result.setDecimalStatistics(dec);
+      return result;
+    }
+
+    @Override
+    public HiveDecimal getMinimum() {
+      return minimum;
+    }
+
+    @Override
+    public HiveDecimal getMaximum() {
+      return maximum;
+    }
+
+    @Override
+    public HiveDecimal getSum() {
+      return sum;
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder buf = new StringBuilder(super.toString());
+      if (getNumberOfValues() != 0) {
+        buf.append(" min: ");
+        buf.append(minimum);
+        buf.append(" max: ");
+        buf.append(maximum);
+        if (sum != null) {
+          buf.append(" sum: ");
+          buf.append(sum);
+        }
+      }
+      return buf.toString();
+    }
+  }
+
   private long count = 0;
 
   ColumnStatisticsImpl(OrcProto.ColumnStatistics stats) {
@@ -451,6 +574,10 @@ class ColumnStatisticsImpl implements Co
     throw new UnsupportedOperationException("Can't update string");
   }
 
+  void updateDecimal(HiveDecimal value) {
+    throw new UnsupportedOperationException("Can't update decimal");
+  }
+
   void merge(ColumnStatisticsImpl stats) {
     count += stats.count;
   }
@@ -492,6 +619,8 @@ class ColumnStatisticsImpl implements Co
             return new DoubleStatisticsImpl();
           case STRING:
             return new StringStatisticsImpl();
+          case DECIMAL:
+            return new DecimalStatisticsImpl();
           default:
             return new ColumnStatisticsImpl();
         }
@@ -509,6 +638,8 @@ class ColumnStatisticsImpl implements Co
       return new DoubleStatisticsImpl(stats);
     } else if (stats.hasStringStatistics()) {
       return new StringStatisticsImpl(stats);
+    } else if (stats.hasDecimalStatistics()) {
+      return new DecimalStatisticsImpl(stats);
     } else {
       return new ColumnStatisticsImpl(stats);
     }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java Wed Apr 17 07:29:38 2013
@@ -337,6 +337,8 @@ final class OrcStruct implements Writabl
             return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
           case TIMESTAMP:
             return PrimitiveObjectInspectorFactory.javaTimestampObjectInspector;
+          case DECIMAL:
+            return PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector;
           default:
             throw new IllegalArgumentException("Unknown primitive type " +
               ((PrimitiveTypeInfo) info).getPrimitiveCategory());
@@ -379,6 +381,8 @@ final class OrcStruct implements Writabl
         return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
       case TIMESTAMP:
         return PrimitiveObjectInspectorFactory.javaTimestampObjectInspector;
+      case DECIMAL:
+        return PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector;
       case STRUCT:
         return new OrcStructInspector(columnId, types);
       case UNION: