You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/11/05 08:19:23 UTC

svn commit: r1538885 - in /hive/branches/tez: ./ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/ packaging/ packaging/src/main/assembly/ ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/ ql/src/java/org/apache/hadoop/hive/ql/exec...

Author: gunther
Date: Tue Nov  5 07:19:22 2013
New Revision: 1538885

URL: http://svn.apache.org/r1538885
Log:
Merge latest trunk into branch. (Gunther Hagleitner)

Added:
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java
      - copied unchanged from r1538880, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRound.java
      - copied unchanged from r1538880, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRound.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/RoundUtils.java
      - copied unchanged from r1538880, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/RoundUtils.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRound.java
      - copied unchanged from r1538880, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRound.java
Removed:
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFRound.java
Modified:
    hive/branches/tez/   (props changed)
    hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
    hive/branches/tez/packaging/pom.xml
    hive/branches/tez/packaging/src/main/assembly/bin.xml
    hive/branches/tez/packaging/src/main/assembly/src.xml
    hive/branches/tez/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/RoundWithNumDigitsDoubleToDouble.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java
    hive/branches/tez/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
    hive/branches/tez/ql/src/test/queries/clientpositive/udf_round.q
    hive/branches/tez/ql/src/test/results/clientpositive/decimal_udf.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/udf_round.q.out
    hive/branches/tez/ql/src/test/results/compiler/plan/udf4.q.xml

Propchange: hive/branches/tez/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1538725-1538880

Modified: hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java (original)
+++ hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java Tue Nov  5 07:19:22 2013
@@ -76,8 +76,6 @@ public class HCatCli {
 
     HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName());
 
-    SessionState.start(ss);
-
     Options options = new Options();
 
     // -e 'quoted-query-string'
@@ -126,19 +124,30 @@ public class HCatCli {
       cmdLine = parser.parse(options, args);
 
     } catch (ParseException e) {
-      printUsage(options, ss.err);
+      printUsage(options, System.err);
+      // Note, we print to System.err instead of ss.err, because if we can't parse our
+      // commandline, we haven't even begun, and therefore cannot be expected to have
+      // reasonably constructed or started the SessionState.
       System.exit(1);
     }
-    // -e
-    String execString = (String) cmdLine.getOptionValue('e');
-    // -f
-    String fileName = (String) cmdLine.getOptionValue('f');
+
+    // -D : process these first, so that we can instantiate SessionState appropriately.
+    setConfProperties(conf, cmdLine.getOptionProperties("D"));
+
+    // Now that the properties are in, we can instantiate SessionState.
+    SessionState.start(ss);
+
     // -h
     if (cmdLine.hasOption('h')) {
       printUsage(options, ss.out);
       System.exit(0);
     }
 
+    // -e
+    String execString = (String) cmdLine.getOptionValue('e');
+
+    // -f
+    String fileName = (String) cmdLine.getOptionValue('f');
     if (execString != null && fileName != null) {
       ss.err.println("The '-e' and '-f' options cannot be specified simultaneously");
       printUsage(options, ss.err);
@@ -157,8 +166,7 @@ public class HCatCli {
       conf.set(HCatConstants.HCAT_GROUP, grp);
     }
 
-    // -D
-    setConfProperties(conf, cmdLine.getOptionProperties("D"));
+    // all done parsing, let's run stuff!
 
     if (execString != null) {
       System.exit(processLine(execString));

Modified: hive/branches/tez/packaging/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/packaging/pom.xml?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/packaging/pom.xml (original)
+++ hive/branches/tez/packaging/pom.xml Tue Nov  5 07:19:22 2013
@@ -47,7 +47,7 @@
                   <goal>single</goal>
                 </goals>
                 <configuration>
-                  <finalName>hive-${project.version}</finalName>
+                  <finalName>apache-hive-${project.version}</finalName>
                   <descriptors>
                     <descriptor>src/main/assembly/bin.xml</descriptor>
                     <descriptor>src/main/assembly/src.xml</descriptor>

Modified: hive/branches/tez/packaging/src/main/assembly/bin.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/packaging/src/main/assembly/bin.xml?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/packaging/src/main/assembly/bin.xml (original)
+++ hive/branches/tez/packaging/src/main/assembly/bin.xml Tue Nov  5 07:19:22 2013
@@ -29,6 +29,8 @@
     <format>tar.gz</format>
   </formats>
 
+  <baseDirectory>apache-hive-${project.version}-bin</baseDirectory>
+
   <dependencySets>
     <dependencySet>
     <outputDirectory>lib</outputDirectory>

Modified: hive/branches/tez/packaging/src/main/assembly/src.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/packaging/src/main/assembly/src.xml?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/packaging/src/main/assembly/src.xml (original)
+++ hive/branches/tez/packaging/src/main/assembly/src.xml Tue Nov  5 07:19:22 2013
@@ -28,6 +28,8 @@
     <format>tar.gz</format>
   </formats>
 
+  <baseDirectory>apache-hive-${project.version}-src</baseDirectory>
+
   <fileSets>
     <fileSet>
       <directory>${project.parent.basedir}</directory>

Modified: hive/branches/tez/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java (original)
+++ hive/branches/tez/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java Tue Nov  5 07:19:22 2013
@@ -9668,6 +9668,26 @@ public final class OrcProto {
      * <code>optional uint32 maximumLength = 4;</code>
      */
     int getMaximumLength();
+
+    // optional uint32 precision = 5;
+    /**
+     * <code>optional uint32 precision = 5;</code>
+     */
+    boolean hasPrecision();
+    /**
+     * <code>optional uint32 precision = 5;</code>
+     */
+    int getPrecision();
+
+    // optional uint32 scale = 6;
+    /**
+     * <code>optional uint32 scale = 6;</code>
+     */
+    boolean hasScale();
+    /**
+     * <code>optional uint32 scale = 6;</code>
+     */
+    int getScale();
   }
   /**
    * Protobuf type {@code org.apache.hadoop.hive.ql.io.orc.Type}
@@ -9765,6 +9785,16 @@ public final class OrcProto {
               maximumLength_ = input.readUInt32();
               break;
             }
+            case 40: {
+              bitField0_ |= 0x00000004;
+              precision_ = input.readUInt32();
+              break;
+            }
+            case 48: {
+              bitField0_ |= 0x00000008;
+              scale_ = input.readUInt32();
+              break;
+            }
           }
         }
       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
@@ -10114,11 +10144,45 @@ public final class OrcProto {
       return maximumLength_;
     }
 
+    // optional uint32 precision = 5;
+    public static final int PRECISION_FIELD_NUMBER = 5;
+    private int precision_;
+    /**
+     * <code>optional uint32 precision = 5;</code>
+     */
+    public boolean hasPrecision() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    /**
+     * <code>optional uint32 precision = 5;</code>
+     */
+    public int getPrecision() {
+      return precision_;
+    }
+
+    // optional uint32 scale = 6;
+    public static final int SCALE_FIELD_NUMBER = 6;
+    private int scale_;
+    /**
+     * <code>optional uint32 scale = 6;</code>
+     */
+    public boolean hasScale() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * <code>optional uint32 scale = 6;</code>
+     */
+    public int getScale() {
+      return scale_;
+    }
+
     private void initFields() {
       kind_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.Type.Kind.BOOLEAN;
       subtypes_ = java.util.Collections.emptyList();
       fieldNames_ = com.google.protobuf.LazyStringArrayList.EMPTY;
       maximumLength_ = 0;
+      precision_ = 0;
+      scale_ = 0;
     }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
@@ -10152,6 +10216,12 @@ public final class OrcProto {
       if (((bitField0_ & 0x00000002) == 0x00000002)) {
         output.writeUInt32(4, maximumLength_);
       }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeUInt32(5, precision_);
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeUInt32(6, scale_);
+      }
       getUnknownFields().writeTo(output);
     }
 
@@ -10192,6 +10262,14 @@ public final class OrcProto {
         size += com.google.protobuf.CodedOutputStream
           .computeUInt32Size(4, maximumLength_);
       }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeUInt32Size(5, precision_);
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeUInt32Size(6, scale_);
+      }
       size += getUnknownFields().getSerializedSize();
       memoizedSerializedSize = size;
       return size;
@@ -10316,6 +10394,10 @@ public final class OrcProto {
         bitField0_ = (bitField0_ & ~0x00000004);
         maximumLength_ = 0;
         bitField0_ = (bitField0_ & ~0x00000008);
+        precision_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000010);
+        scale_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000020);
         return this;
       }
 
@@ -10363,6 +10445,14 @@ public final class OrcProto {
           to_bitField0_ |= 0x00000002;
         }
         result.maximumLength_ = maximumLength_;
+        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+          to_bitField0_ |= 0x00000004;
+        }
+        result.precision_ = precision_;
+        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+          to_bitField0_ |= 0x00000008;
+        }
+        result.scale_ = scale_;
         result.bitField0_ = to_bitField0_;
         onBuilt();
         return result;
@@ -10405,6 +10495,12 @@ public final class OrcProto {
         if (other.hasMaximumLength()) {
           setMaximumLength(other.getMaximumLength());
         }
+        if (other.hasPrecision()) {
+          setPrecision(other.getPrecision());
+        }
+        if (other.hasScale()) {
+          setScale(other.getScale());
+        }
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
@@ -10664,6 +10760,72 @@ public final class OrcProto {
         return this;
       }
 
+      // optional uint32 precision = 5;
+      private int precision_ ;
+      /**
+       * <code>optional uint32 precision = 5;</code>
+       */
+      public boolean hasPrecision() {
+        return ((bitField0_ & 0x00000010) == 0x00000010);
+      }
+      /**
+       * <code>optional uint32 precision = 5;</code>
+       */
+      public int getPrecision() {
+        return precision_;
+      }
+      /**
+       * <code>optional uint32 precision = 5;</code>
+       */
+      public Builder setPrecision(int value) {
+        bitField0_ |= 0x00000010;
+        precision_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional uint32 precision = 5;</code>
+       */
+      public Builder clearPrecision() {
+        bitField0_ = (bitField0_ & ~0x00000010);
+        precision_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // optional uint32 scale = 6;
+      private int scale_ ;
+      /**
+       * <code>optional uint32 scale = 6;</code>
+       */
+      public boolean hasScale() {
+        return ((bitField0_ & 0x00000020) == 0x00000020);
+      }
+      /**
+       * <code>optional uint32 scale = 6;</code>
+       */
+      public int getScale() {
+        return scale_;
+      }
+      /**
+       * <code>optional uint32 scale = 6;</code>
+       */
+      public Builder setScale(int value) {
+        bitField0_ |= 0x00000020;
+        scale_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional uint32 scale = 6;</code>
+       */
+      public Builder clearScale() {
+        bitField0_ = (bitField0_ & ~0x00000020);
+        scale_ = 0;
+        onChanged();
+        return this;
+      }
+
       // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.ql.io.orc.Type)
     }
 
@@ -15166,34 +15328,35 @@ public final class OrcProto {
       "9\n\007streams\030\001 \003(\0132(.org.apache.hadoop.hiv" +
       "e.ql.io.orc.Stream\022A\n\007columns\030\002 \003(\01320.or" +
       "g.apache.hadoop.hive.ql.io.orc.ColumnEnc" +
-      "oding\"\314\002\n\004Type\0229\n\004kind\030\001 \002(\0162+.org.apach" +
+      "oding\"\356\002\n\004Type\0229\n\004kind\030\001 \002(\0162+.org.apach" +
       "e.hadoop.hive.ql.io.orc.Type.Kind\022\024\n\010sub" +
       "types\030\002 \003(\rB\002\020\001\022\022\n\nfieldNames\030\003 \003(\t\022\025\n\rm" +
-      "aximumLength\030\004 \001(\r\"\307\001\n\004Kind\022\013\n\007BOOLEAN\020\000" +
-      "\022\010\n\004BYTE\020\001\022\t\n\005SHORT\020\002\022\007\n\003INT\020\003\022\010\n\004LONG\020\004" +
-      "\022\t\n\005FLOAT\020\005\022\n\n\006DOUBLE\020\006\022\n\n\006STRING\020\007\022\n\n\006B",
-      "INARY\020\010\022\r\n\tTIMESTAMP\020\t\022\010\n\004LIST\020\n\022\007\n\003MAP\020" +
-      "\013\022\n\n\006STRUCT\020\014\022\t\n\005UNION\020\r\022\013\n\007DECIMAL\020\016\022\010\n" +
-      "\004DATE\020\017\022\013\n\007VARCHAR\020\020\"x\n\021StripeInformatio" +
-      "n\022\016\n\006offset\030\001 \001(\004\022\023\n\013indexLength\030\002 \001(\004\022\022" +
-      "\n\ndataLength\030\003 \001(\004\022\024\n\014footerLength\030\004 \001(\004" +
-      "\022\024\n\014numberOfRows\030\005 \001(\004\"/\n\020UserMetadataIt" +
-      "em\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(\014\"\356\002\n\006Foot" +
-      "er\022\024\n\014headerLength\030\001 \001(\004\022\025\n\rcontentLengt" +
-      "h\030\002 \001(\004\022D\n\007stripes\030\003 \003(\01323.org.apache.ha" +
-      "doop.hive.ql.io.orc.StripeInformation\0225\n",
-      "\005types\030\004 \003(\0132&.org.apache.hadoop.hive.ql" +
-      ".io.orc.Type\022D\n\010metadata\030\005 \003(\01322.org.apa" +
-      "che.hadoop.hive.ql.io.orc.UserMetadataIt" +
-      "em\022\024\n\014numberOfRows\030\006 \001(\004\022F\n\nstatistics\030\007" +
-      " \003(\01322.org.apache.hadoop.hive.ql.io.orc." +
-      "ColumnStatistics\022\026\n\016rowIndexStride\030\010 \001(\r" +
-      "\"\255\001\n\nPostScript\022\024\n\014footerLength\030\001 \001(\004\022F\n" +
-      "\013compression\030\002 \001(\01621.org.apache.hadoop.h" +
-      "ive.ql.io.orc.CompressionKind\022\034\n\024compres" +
-      "sionBlockSize\030\003 \001(\004\022\023\n\007version\030\004 \003(\rB\002\020\001",
-      "\022\016\n\005magic\030\300> \001(\t*:\n\017CompressionKind\022\010\n\004N" +
-      "ONE\020\000\022\010\n\004ZLIB\020\001\022\n\n\006SNAPPY\020\002\022\007\n\003LZO\020\003"
+      "aximumLength\030\004 \001(\r\022\021\n\tprecision\030\005 \001(\r\022\r\n" +
+      "\005scale\030\006 \001(\r\"\307\001\n\004Kind\022\013\n\007BOOLEAN\020\000\022\010\n\004BY" +
+      "TE\020\001\022\t\n\005SHORT\020\002\022\007\n\003INT\020\003\022\010\n\004LONG\020\004\022\t\n\005FL",
+      "OAT\020\005\022\n\n\006DOUBLE\020\006\022\n\n\006STRING\020\007\022\n\n\006BINARY\020" +
+      "\010\022\r\n\tTIMESTAMP\020\t\022\010\n\004LIST\020\n\022\007\n\003MAP\020\013\022\n\n\006S" +
+      "TRUCT\020\014\022\t\n\005UNION\020\r\022\013\n\007DECIMAL\020\016\022\010\n\004DATE\020" +
+      "\017\022\013\n\007VARCHAR\020\020\"x\n\021StripeInformation\022\016\n\006o" +
+      "ffset\030\001 \001(\004\022\023\n\013indexLength\030\002 \001(\004\022\022\n\ndata" +
+      "Length\030\003 \001(\004\022\024\n\014footerLength\030\004 \001(\004\022\024\n\014nu" +
+      "mberOfRows\030\005 \001(\004\"/\n\020UserMetadataItem\022\014\n\004" +
+      "name\030\001 \002(\t\022\r\n\005value\030\002 \002(\014\"\356\002\n\006Footer\022\024\n\014" +
+      "headerLength\030\001 \001(\004\022\025\n\rcontentLength\030\002 \001(" +
+      "\004\022D\n\007stripes\030\003 \003(\01323.org.apache.hadoop.h",
+      "ive.ql.io.orc.StripeInformation\0225\n\005types" +
+      "\030\004 \003(\0132&.org.apache.hadoop.hive.ql.io.or" +
+      "c.Type\022D\n\010metadata\030\005 \003(\01322.org.apache.ha" +
+      "doop.hive.ql.io.orc.UserMetadataItem\022\024\n\014" +
+      "numberOfRows\030\006 \001(\004\022F\n\nstatistics\030\007 \003(\01322" +
+      ".org.apache.hadoop.hive.ql.io.orc.Column" +
+      "Statistics\022\026\n\016rowIndexStride\030\010 \001(\r\"\255\001\n\nP" +
+      "ostScript\022\024\n\014footerLength\030\001 \001(\004\022F\n\013compr" +
+      "ession\030\002 \001(\01621.org.apache.hadoop.hive.ql" +
+      ".io.orc.CompressionKind\022\034\n\024compressionBl",
+      "ockSize\030\003 \001(\004\022\023\n\007version\030\004 \003(\rB\002\020\001\022\016\n\005ma" +
+      "gic\030\300> \001(\t*:\n\017CompressionKind\022\010\n\004NONE\020\000\022" +
+      "\010\n\004ZLIB\020\001\022\n\n\006SNAPPY\020\002\022\007\n\003LZO\020\003"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -15283,7 +15446,7 @@ public final class OrcProto {
           internal_static_org_apache_hadoop_hive_ql_io_orc_Type_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_Type_descriptor,
-              new java.lang.String[] { "Kind", "Subtypes", "FieldNames", "MaximumLength", });
+              new java.lang.String[] { "Kind", "Subtypes", "FieldNames", "MaximumLength", "Precision", "Scale", });
           internal_static_org_apache_hadoop_hive_ql_io_orc_StripeInformation_descriptor =
             getDescriptor().getMessageTypes().get(14);
           internal_static_org_apache_hadoop_hive_ql_io_orc_StripeInformation_fieldAccessorTable = new

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Tue Nov  5 07:19:22 2013
@@ -105,7 +105,6 @@ import org.apache.hadoop.hive.ql.udf.UDF
 import org.apache.hadoop.hive.ql.udf.UDFRegExpReplace;
 import org.apache.hadoop.hive.ql.udf.UDFRepeat;
 import org.apache.hadoop.hive.ql.udf.UDFReverse;
-import org.apache.hadoop.hive.ql.udf.UDFRound;
 import org.apache.hadoop.hive.ql.udf.UDFRpad;
 import org.apache.hadoop.hive.ql.udf.UDFSecond;
 import org.apache.hadoop.hive.ql.udf.UDFSign;
@@ -203,7 +202,7 @@ public final class FunctionRegistry {
 
     registerGenericUDF("size", GenericUDFSize.class);
 
-    registerUDF("round", UDFRound.class, false);
+    registerGenericUDF("round", GenericUDFRound.class);
     registerUDF("floor", UDFFloor.class, false);
     registerUDF("sqrt", UDFSqrt.class, false);
     registerUDF("ceil", UDFCeil.class, false);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/RoundWithNumDigitsDoubleToDouble.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/RoundWithNumDigitsDoubleToDouble.java?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/RoundWithNumDigitsDoubleToDouble.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/RoundWithNumDigitsDoubleToDouble.java Tue Nov  5 07:19:22 2013
@@ -19,8 +19,7 @@
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.ql.udf.UDFRound;
-import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.ql.udf.generic.RoundUtils;
 import org.apache.hadoop.io.IntWritable;
 
 // Vectorized implementation of ROUND(Col, N) function
@@ -29,28 +28,21 @@ public class RoundWithNumDigitsDoubleToD
   private static final long serialVersionUID = 1L;
 
   private IntWritable decimalPlaces;
-  private transient UDFRound roundFunc;
-  private transient DoubleWritable dw;
 
   public RoundWithNumDigitsDoubleToDouble(int colNum, long scalarVal, int outputColumn) {
     super(colNum, outputColumn);
     this.decimalPlaces = new IntWritable();
-    roundFunc = new UDFRound();
-    dw = new DoubleWritable();
     decimalPlaces.set((int) scalarVal);
   }
 
   public RoundWithNumDigitsDoubleToDouble() {
     super();
-    dw = new DoubleWritable();
-    roundFunc = new UDFRound();
   }
 
   // Round to the specified number of decimal places using the standard Hive round function.
   @Override
   public double func(double d) {
-    dw.set(d);
-    return roundFunc.evaluate(dw, decimalPlaces).get();
+    return RoundUtils.round(d, decimalPlaces.get());
   }
 
   void setDecimalPlaces(IntWritable decimalPlaces) {
@@ -61,14 +53,6 @@ public class RoundWithNumDigitsDoubleToD
     return this.decimalPlaces;
   }
 
-  void setRoundFunc(UDFRound roundFunc) {
-    this.roundFunc = roundFunc;
-  }
-
-  UDFRound getRoundFunc() {
-    return this.roundFunc;
-  }
-
   @Override
   public void setArg(long l) {
     this.decimalPlaces.set((int) l);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java Tue Nov  5 07:19:22 2013
@@ -25,6 +25,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -544,9 +545,10 @@ final class OrcStruct implements Writabl
       case DATE:
         return PrimitiveObjectInspectorFactory.javaDateObjectInspector;
       case DECIMAL:
-        // TODO: get precision/scale from TYPE
+        int precision = type.hasPrecision() ? type.getPrecision() : HiveDecimal.MAX_PRECISION;
+        int scale =  type.hasScale()? type.getScale() : HiveDecimal.MAX_SCALE;
         return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(
-            TypeInfoFactory.decimalTypeInfo);
+            TypeInfoFactory.getDecimalTypeInfo(precision, scale));
       case STRUCT:
         return new OrcStructInspector(columnId, types);
       case UNION:

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java Tue Nov  5 07:19:22 2013
@@ -46,6 +46,7 @@ import org.apache.hadoop.hive.serde2.io.
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -1022,8 +1023,13 @@ class RecordReaderImpl implements Record
     private InStream valueStream;
     private IntegerReader scaleStream = null;
 
-    DecimalTreeReader(Path path, int columnId) {
+    private final int precision;
+    private final int scale;
+
+    DecimalTreeReader(Path path, int columnId, int precision, int scale) {
       super(path, columnId);
+      this.precision = precision;
+      this.scale = scale;
     }
 
     @Override
@@ -1057,8 +1063,9 @@ class RecordReaderImpl implements Record
     Object next(Object previous) throws IOException {
       super.next(previous);
       if (valuePresent) {
-        return HiveDecimal.create(SerializationUtils.readBigInteger(valueStream),
+        HiveDecimal dec = HiveDecimal.create(SerializationUtils.readBigInteger(valueStream),
             (int) scaleStream.next());
+        return HiveDecimalUtils.enforcePrecisionScale(dec, precision, scale);
       }
       return null;
     }
@@ -1892,7 +1899,9 @@ class RecordReaderImpl implements Record
       case DATE:
         return new DateTreeReader(path, columnId);
       case DECIMAL:
-        return new DecimalTreeReader(path, columnId);
+        int precision = type.hasPrecision() ? type.getPrecision() : HiveDecimal.MAX_PRECISION;
+        int scale =  type.hasScale()? type.getScale() : HiveDecimal.MAX_SCALE;
+        return new DecimalTreeReader(path, columnId, precision, scale);
       case STRUCT:
         return new StructTreeReader(path, columnId, types, included);
       case LIST:

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java Tue Nov  5 07:19:22 2013
@@ -59,6 +59,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
@@ -1625,8 +1626,10 @@ class WriterImpl implements Writer, Memo
             type.setKind(OrcProto.Type.Kind.DATE);
             break;
           case DECIMAL:
-            // TODO: save precision/scale
+            DecimalTypeInfo decTypeInfo = (DecimalTypeInfo)((PrimitiveObjectInspector)treeWriter.inspector).getTypeInfo();
             type.setKind(OrcProto.Type.Kind.DECIMAL);
+            type.setPrecision(decTypeInfo.precision());
+            type.setScale(decTypeInfo.scale());
             break;
           default:
             throw new IllegalArgumentException("Unknown primitive category: " +

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java Tue Nov  5 07:19:22 2013
@@ -75,6 +75,7 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFRound;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper;
@@ -160,7 +161,7 @@ public class Vectorizer implements Physi
     supportedGenericUDFs.add(UDFLog.class);
     supportedGenericUDFs.add(UDFPower.class);
     supportedGenericUDFs.add(UDFPosMod.class);
-    supportedGenericUDFs.add(UDFRound.class);
+    supportedGenericUDFs.add(GenericUDFRound.class);
     supportedGenericUDFs.add(UDFSqrt.class);
     supportedGenericUDFs.add(UDFSign.class);
     supportedGenericUDFs.add(UDFRand.class);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java Tue Nov  5 07:19:22 2013
@@ -28,10 +28,10 @@ import org.apache.hadoop.security.UserGr
 
 public class HadoopDefaultAuthenticator implements HiveAuthenticationProvider {
 
-  private String userName;
-  private List<String> groupNames;
+  protected String userName;
+  protected List<String> groupNames;
   
-  private Configuration conf;
+  protected Configuration conf;
 
   @Override
   public List<String> getGroupNames() {

Modified: hive/branches/tez/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto (original)
+++ hive/branches/tez/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto Tue Nov  5 07:19:22 2013
@@ -135,6 +135,8 @@ message Type {
   repeated uint32 subtypes = 2 [packed=true];
   repeated string fieldNames = 3;
   optional uint32 maximumLength = 4;
+  optional uint32 precision = 5;
+  optional uint32 scale = 6;
 }
 
 message StripeInformation {

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java Tue Nov  5 07:19:22 2013
@@ -28,7 +28,27 @@ import java.util.Map;
 
 import junit.framework.Assert;
 
-import org.apache.hadoop.hive.ql.exec.vector.expressions.*;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.ColAndCol;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.ColOrCol;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterExprAndExpr;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterExprOrExpr;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncLogWithBaseDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncLogWithBaseLongToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncPowerDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IsNotNull;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IsNull;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NotCol;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.RoundWithNumDigitsDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.SelectColumnIsFalse;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.SelectColumnIsNotNull;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.SelectColumnIsNull;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.SelectColumnIsTrue;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringLTrim;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringLower;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringUpper;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFUnixTimeStampLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColUnaryMinus;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColLessDoubleScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColumnBetween;
@@ -59,7 +79,6 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.plan.api.OperatorType;
 import org.apache.hadoop.hive.ql.udf.UDFLTrim;
 import org.apache.hadoop.hive.ql.udf.UDFLog;
 import org.apache.hadoop.hive.ql.udf.UDFOPMinus;
@@ -68,7 +87,6 @@ import org.apache.hadoop.hive.ql.udf.UDF
 import org.apache.hadoop.hive.ql.udf.UDFOPNegative;
 import org.apache.hadoop.hive.ql.udf.UDFOPPlus;
 import org.apache.hadoop.hive.ql.udf.UDFPower;
-import org.apache.hadoop.hive.ql.udf.UDFRound;
 import org.apache.hadoop.hive.ql.udf.UDFSin;
 import org.apache.hadoop.hive.ql.udf.UDFYear;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
@@ -83,6 +101,7 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFRound;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.junit.Test;
@@ -766,15 +785,14 @@ public class TestVectorizationContext {
     Assert.assertEquals(FuncSinDoubleToDouble.class, ve.getClass());
 
     // Round without digits
-    gudfBridge = new GenericUDFBridge("round", false, UDFRound.class.getName());
-    mathFuncExpr.setGenericUDF(gudfBridge);
+    GenericUDFRound udfRound = new GenericUDFRound();
+    mathFuncExpr.setGenericUDF(udfRound);
     mathFuncExpr.setChildren(children2);
     ve = vc.getVectorExpression(mathFuncExpr);
     Assert.assertEquals(FuncRoundDoubleToDouble.class, ve.getClass());
 
     // Round with digits
-    gudfBridge = new GenericUDFBridge("round", false, UDFRound.class.getName());
-    mathFuncExpr.setGenericUDF(gudfBridge);
+    mathFuncExpr.setGenericUDF(udfRound);
     children2.add(new ExprNodeConstantDesc(4));
     mathFuncExpr.setChildren(children2);
     ve = vc.getVectorExpression(mathFuncExpr);
@@ -829,8 +847,7 @@ public class TestVectorizationContext {
     Assert.assertTrue(4.5 == ((FuncPowerDoubleToDouble) ve).getPower());
 
     //Round with default decimal places
-    gudfBridge = new GenericUDFBridge("round", false, UDFRound.class.getName());
-    mathFuncExpr.setGenericUDF(gudfBridge);
+    mathFuncExpr.setGenericUDF(udfRound);
     children2.clear();
     children2.add(colDesc2);
     mathFuncExpr.setChildren(children2);

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/udf_round.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/udf_round.q?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/udf_round.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/udf_round.q Tue Nov  5 07:19:22 2013
@@ -42,5 +42,5 @@ SELECT
   round(3.141592653589793, 15), round(3.141592653589793, 16)
 FROM src tablesample (1 rows);
 
-SELECT round(1809242.3151111344, 9), round(-1809242.3151111344, 9)
+SELECT round(1809242.3151111344, 9), round(-1809242.3151111344, 9), round(1809242.3151111344BD, 9), round(-1809242.3151111344BD, 9)
 FROM src tablesample (1 rows);

Modified: hive/branches/tez/ql/src/test/results/clientpositive/decimal_udf.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/decimal_udf.q.out?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/decimal_udf.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/decimal_udf.q.out Tue Nov  5 07:19:22 2013
@@ -1742,7 +1742,7 @@ STAGE PLANS:
           Select Operator
             expressions:
                   expr: round(key, 2)
-                  type: decimal(65,30)
+                  type: decimal(38,2)
             outputColumnNames: _col0
             ListSink
 

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_round.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_round.q.out?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_round.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_round.q.out Tue Nov  5 07:19:22 2013
@@ -40,7 +40,7 @@ FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-55555	55555.0	55555.0	55555.0	55555.0	55560.0	55600.0	56000.0	60000.0	100000.0	0.0	0.0	0.0
+55555	55555	55555	55555	55555	55560	55600	56000	60000	100000	0	0	0
 PREHOOK: query: SELECT
   round(125.315), round(125.315, 0),
   round(125.315, 1), round(125.315, 2), round(125.315, 3), round(125.315, 4),
@@ -109,14 +109,14 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 0.0	0.0	0.0	0.0	0.0	0.0	0.0	0.0	0.0	0.0	0.0	0.0	0.0	0.0	0.0	0.0	3.0	3.1	3.14	3.142	3.1416	3.14159	3.141593	3.1415927	3.14159265	3.141592654	3.1415926536	3.14159265359	3.14159265359	3.1415926535898	3.1415926535898	3.14159265358979	3.141592653589793	3.141592653589793
-PREHOOK: query: SELECT round(1809242.3151111344, 9), round(-1809242.3151111344, 9)
+PREHOOK: query: SELECT round(1809242.3151111344, 9), round(-1809242.3151111344, 9), round(1809242.3151111344BD, 9), round(-1809242.3151111344BD, 9)
 FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT round(1809242.3151111344, 9), round(-1809242.3151111344, 9)
+POSTHOOK: query: SELECT round(1809242.3151111344, 9), round(-1809242.3151111344, 9), round(1809242.3151111344BD, 9), round(-1809242.3151111344BD, 9)
 FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-1809242.315111134	-1809242.315111134
+1809242.315111134	-1809242.315111134	1809242.315111134	-1809242.315111134

Modified: hive/branches/tez/ql/src/test/results/compiler/plan/udf4.q.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/compiler/plan/udf4.q.xml?rev=1538885&r1=1538884&r2=1538885&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/compiler/plan/udf4.q.xml (original)
+++ hive/branches/tez/ql/src/test/results/compiler/plan/udf4.q.xml Tue Nov  5 07:19:22 2013
@@ -835,14 +835,7 @@
                  </object> 
                 </void> 
                 <void property="genericUDF"> 
-                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge"> 
-                  <void property="udfClassName"> 
-                   <string>org.apache.hadoop.hive.ql.udf.UDFRound</string> 
-                  </void> 
-                  <void property="udfName"> 
-                   <string>round</string> 
-                  </void> 
-                 </object> 
+                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFRound"/> 
                 </void> 
                 <void property="typeInfo"> 
                  <object idref="PrimitiveTypeInfo0"/> 
@@ -867,14 +860,7 @@
                  </object> 
                 </void> 
                 <void property="genericUDF"> 
-                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge"> 
-                  <void property="udfClassName"> 
-                   <string>org.apache.hadoop.hive.ql.udf.UDFRound</string> 
-                  </void> 
-                  <void property="udfName"> 
-                   <string>round</string> 
-                  </void> 
-                 </object> 
+                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFRound"/> 
                 </void> 
                 <void property="typeInfo"> 
                  <object idref="PrimitiveTypeInfo0"/> 
@@ -1243,14 +1229,7 @@
                  </object> 
                 </void> 
                 <void property="genericUDF"> 
-                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge"> 
-                  <void property="udfClassName"> 
-                   <string>org.apache.hadoop.hive.ql.udf.UDFRound</string> 
-                  </void> 
-                  <void property="udfName"> 
-                   <string>round</string> 
-                  </void> 
-                 </object> 
+                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFRound"/> 
                 </void> 
                 <void property="typeInfo"> 
                  <object idref="PrimitiveTypeInfo0"/>