You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/09/04 04:49:50 UTC

svn commit: r1622396 [4/8] - in /hive/branches/cbo: ./ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/apache/hive/beeline/ bin/ bin/ext/ checkstyle/ common/src/java/or...

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java Thu Sep  4 02:49:46 2014
@@ -82,7 +82,7 @@ public final class VectorExpressionWrite
       this.objectInspector = objectInspector;
       return this;
     }
-    
+
     /**
      * The base implementation must be overridden by the Long specialization
      */
@@ -90,7 +90,7 @@ public final class VectorExpressionWrite
     public Object writeValue(long value) throws HiveException {
       throw new HiveException("Internal error: should not reach here");
     }
-    
+
     /**
      * The base implementation must be overridden by the Long specialization
      */
@@ -112,7 +112,7 @@ public final class VectorExpressionWrite
     public Object setValue(Object field, double value) throws HiveException {
       throw new HiveException("Internal error: should not reach here");
     }
-    
+
     /**
      * The base implementation must be overridden by the Bytes specialization
      */
@@ -120,7 +120,7 @@ public final class VectorExpressionWrite
     public Object writeValue(byte[] value, int start, int length) throws HiveException {
       throw new HiveException("Internal error: should not reach here");
     }
-    
+
     /**
      * The base implementation must be overridden by the Bytes specialization
      */
@@ -171,7 +171,7 @@ public final class VectorExpressionWrite
           "Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
           row, lcv.noNulls, lcv.isRepeating, lcv.isNull[row], lcv.isNull[0]));
     }
-    
+
     @Override
     public Object setValue(Object field, ColumnVector column, int row) throws HiveException {
       LongColumnVector lcv = (LongColumnVector) column;
@@ -192,7 +192,7 @@ public final class VectorExpressionWrite
         String.format(
           "Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
           row, lcv.noNulls, lcv.isRepeating, lcv.isNull[row], lcv.isNull[0]));
-    }    
+    }
   }
 
   /**
@@ -221,7 +221,7 @@ public final class VectorExpressionWrite
           "Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
           row, dcv.noNulls, dcv.isRepeating, dcv.isNull[row], dcv.isNull[0]));
     }
-    
+
     @Override
     public Object setValue(Object field, ColumnVector column, int row) throws HiveException {
       DoubleColumnVector dcv = (DoubleColumnVector) column;
@@ -242,7 +242,7 @@ public final class VectorExpressionWrite
         String.format(
           "Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
           row, dcv.noNulls, dcv.isRepeating, dcv.isNull[row], dcv.isNull[0]));
-    }    
+    }
    }
 
   /**
@@ -292,7 +292,7 @@ public final class VectorExpressionWrite
         String.format(
           "Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
           row, bcv.noNulls, bcv.isRepeating, bcv.isNull[row], bcv.isNull[0]));
-    }    
+    }
   }
 
 
@@ -396,7 +396,7 @@ public final class VectorExpressionWrite
                 (SettableLongObjectInspector) fieldObjInspector);
           case VOID:
               return genVectorExpressionWritableVoid(
-                  (VoidObjectInspector) fieldObjInspector);        	  
+                  (VoidObjectInspector) fieldObjInspector);
           case BINARY:
             return genVectorExpressionWritableBinary(
                 (SettableBinaryObjectInspector) fieldObjInspector);
@@ -419,7 +419,7 @@ public final class VectorExpressionWrite
             throw new IllegalArgumentException("Unknown primitive type: " +
               ((PrimitiveObjectInspector) fieldObjInspector).getPrimitiveCategory());
         }
-        
+
       case STRUCT:
       case UNION:
       case MAP:
@@ -428,7 +428,7 @@ public final class VectorExpressionWrite
             fieldObjInspector.getCategory());
       default:
         throw new IllegalArgumentException("Unknown type " +
-            fieldObjInspector.getCategory());      
+            fieldObjInspector.getCategory());
       }
   }
 
@@ -526,7 +526,7 @@ public final class VectorExpressionWrite
       private Object obj;
       private Timestamp ts;
 
-      public VectorExpressionWriter init(SettableTimestampObjectInspector objInspector) 
+      public VectorExpressionWriter init(SettableTimestampObjectInspector objInspector)
           throws HiveException {
         super.init(objInspector);
         ts = new Timestamp(0);
@@ -550,7 +550,7 @@ public final class VectorExpressionWrite
         ((SettableTimestampObjectInspector) this.objectInspector).set(field, ts);
         return field;
       }
-      
+
       @Override
       public Object initValue(Object ignored) {
         return ((SettableTimestampObjectInspector) this.objectInspector).create(new Timestamp(0));
@@ -563,15 +563,15 @@ public final class VectorExpressionWrite
     return new VectorExpressionWriterBytes() {
       private Object obj;
       private Text text;
-      
-      public VectorExpressionWriter init(SettableHiveVarcharObjectInspector objInspector) 
+
+      public VectorExpressionWriter init(SettableHiveVarcharObjectInspector objInspector)
           throws HiveException {
         super.init(objInspector);
         this.text = new Text();
         this.obj = initValue(null);
         return this;
       }
-      
+
       @Override
       public Object writeValue(byte[] value, int start, int length) throws HiveException {
         text.set(value, start, length);
@@ -580,7 +580,7 @@ public final class VectorExpressionWrite
       }
 
       @Override
-      public Object setValue(Object field, byte[] value, int start, int length) 
+      public Object setValue(Object field, byte[] value, int start, int length)
           throws HiveException {
         if (null == field) {
           field = initValue(null);
@@ -589,7 +589,7 @@ public final class VectorExpressionWrite
         ((SettableHiveVarcharObjectInspector) this.objectInspector).set(field, text.toString());
         return field;
       }
-      
+
       @Override
       public Object initValue(Object ignored) {
         return ((SettableHiveVarcharObjectInspector) this.objectInspector)
@@ -603,24 +603,24 @@ public final class VectorExpressionWrite
     return new VectorExpressionWriterBytes() {
       private Object obj;
       private Text text;
-      
-      public VectorExpressionWriter init(SettableStringObjectInspector objInspector) 
+
+      public VectorExpressionWriter init(SettableStringObjectInspector objInspector)
           throws HiveException {
         super.init(objInspector);
         this.text = new Text();
         this.obj = initValue(null);
         return this;
       }
-      
+
       @Override
       public Object writeValue(byte[] value, int start, int length) throws HiveException {
         this.text.set(value, start, length);
         ((SettableStringObjectInspector) this.objectInspector).set(this.obj, this.text.toString());
         return this.obj;
       }
-      
+
       @Override
-      public Object setValue(Object field, byte[] value, int start, int length) 
+      public Object setValue(Object field, byte[] value, int start, int length)
           throws HiveException {
         if (null == field) {
           field = initValue(null);
@@ -628,12 +628,12 @@ public final class VectorExpressionWrite
         this.text.set(value, start, length);
         ((SettableStringObjectInspector) this.objectInspector).set(field, this.text.toString());
         return field;
-      }      
-      
+      }
+
       @Override
       public Object initValue(Object ignored) {
         return ((SettableStringObjectInspector) this.objectInspector).create(StringUtils.EMPTY);
-      }      
+      }
     }.init(fieldObjInspector);
   }
 
@@ -642,22 +642,22 @@ public final class VectorExpressionWrite
     return new VectorExpressionWriterBytes() {
       private Object obj;
       private byte[] bytes;
-      
-      public VectorExpressionWriter init(SettableBinaryObjectInspector objInspector) 
+
+      public VectorExpressionWriter init(SettableBinaryObjectInspector objInspector)
           throws HiveException {
         super.init(objInspector);
         this.bytes = ArrayUtils.EMPTY_BYTE_ARRAY;
         this.obj = initValue(null);
         return this;
       }
-      
+
       @Override
       public Object writeValue(byte[] value, int start, int length) throws HiveException {
         bytes = Arrays.copyOfRange(value, start, start + length);
         ((SettableBinaryObjectInspector) this.objectInspector).set(this.obj, bytes);
         return this.obj;
       }
-      
+
       @Override
       public Object setValue(Object field, byte[] value, int start, int length) throws HiveException {
         if (null == field) {
@@ -666,7 +666,7 @@ public final class VectorExpressionWrite
         bytes = Arrays.copyOfRange(value, start, start + length);
         ((SettableBinaryObjectInspector) this.objectInspector).set(field, bytes);
         return field;
-      }      
+      }
 
       @Override
       public Object initValue(Object ignored) {
@@ -680,20 +680,20 @@ public final class VectorExpressionWrite
       SettableLongObjectInspector fieldObjInspector) throws HiveException {
     return new VectorExpressionWriterLong() {
       private Object obj;
-      
-      public VectorExpressionWriter init(SettableLongObjectInspector objInspector) 
+
+      public VectorExpressionWriter init(SettableLongObjectInspector objInspector)
           throws HiveException {
         super.init(objInspector);
         this.obj = initValue(null);
         return this;
       }
-      
+
       @Override
       public Object writeValue(long value) throws HiveException {
         ((SettableLongObjectInspector) this.objectInspector).set(this.obj, value);
         return this.obj;
       }
-      
+
       @Override
       public Object setValue(Object field, long value) throws HiveException {
         if (null == field) {
@@ -712,56 +712,55 @@ public final class VectorExpressionWrite
   }
 
   private static VectorExpressionWriter genVectorExpressionWritableVoid(
-	      VoidObjectInspector fieldObjInspector) throws HiveException {
-	    return new VectorExpressionWriterLong() {
-	      private Object obj;
-	      
-	      public VectorExpressionWriter init(VoidObjectInspector objInspector) 
-	          throws HiveException {
-	        super.init(objInspector);
-	        this.obj = initValue(null);
-	        return this;
-	      }
-	      
-	      @Override
-	      public Object writeValue(long value) throws HiveException {
-	        return this.obj;
-	      }
-	      
-	      @Override
-	      public Object setValue(Object field, long value) throws HiveException {
-	        if (null == field) {
-	          field = initValue(null);
-	        }
-	        return field;
-	      }
-
-	      @Override
-	      public Object initValue(Object ignored) {
-	        return ((VoidObjectInspector) this.objectInspector).copyObject(null);
-	      }
-	    }.init(fieldObjInspector);
-	  }
-  
-  
+    VoidObjectInspector fieldObjInspector) throws HiveException {
+    return new VectorExpressionWriterLong() {
+      private Object obj;
+
+      public VectorExpressionWriter init(VoidObjectInspector objInspector) throws HiveException {
+        super.init(objInspector);
+        this.obj = initValue(null);
+        return this;
+      }
+
+      @Override
+      public Object writeValue(long value) throws HiveException {
+        return this.obj;
+      }
+
+      @Override
+      public Object setValue(Object field, long value) throws HiveException {
+        if (null == field) {
+          field = initValue(null);
+        }
+        return field;
+      }
+
+      @Override
+      public Object initValue(Object ignored) {
+        return ((VoidObjectInspector) this.objectInspector).copyObject(null);
+      }
+    }.init(fieldObjInspector);
+  }
+
+
   private static VectorExpressionWriter genVectorExpressionWritableInt(
       SettableIntObjectInspector fieldObjInspector) throws HiveException {
     return new VectorExpressionWriterLong() {
       private Object obj;
-      
-      public VectorExpressionWriter init(SettableIntObjectInspector objInspector) 
+
+      public VectorExpressionWriter init(SettableIntObjectInspector objInspector)
           throws HiveException {
         super.init(objInspector);
         this.obj = initValue(null);
         return this;
       }
-      
+
       @Override
       public Object writeValue(long value) throws HiveException {
         ((SettableIntObjectInspector) this.objectInspector).set(this.obj, (int) value);
         return this.obj;
       }
-      
+
       @Override
       public Object setValue(Object field, long value) throws HiveException {
         if (null == field) {
@@ -770,7 +769,7 @@ public final class VectorExpressionWrite
         ((SettableIntObjectInspector) this.objectInspector).set(field, (int) value);
         return field;
       }
-      
+
       @Override
       public Object initValue(Object ignored) {
         return ((SettableIntObjectInspector) this.objectInspector)
@@ -783,20 +782,20 @@ public final class VectorExpressionWrite
       SettableShortObjectInspector fieldObjInspector) throws HiveException {
     return new VectorExpressionWriterLong() {
       private Object obj;
-      
-      public VectorExpressionWriter init(SettableShortObjectInspector objInspector) 
+
+      public VectorExpressionWriter init(SettableShortObjectInspector objInspector)
           throws HiveException {
         super.init(objInspector);
         this.obj = initValue(null);
         return this;
       }
-      
+
       @Override
       public Object writeValue(long value) throws HiveException {
         ((SettableShortObjectInspector) this.objectInspector).set(this.obj, (short) value);
         return this.obj;
       }
-      
+
       @Override
       public Object setValue(Object field, long value) throws HiveException {
         if (null == field) {
@@ -805,7 +804,7 @@ public final class VectorExpressionWrite
         ((SettableShortObjectInspector) this.objectInspector).set(field, (short) value);
         return field;
       }
-      
+
       @Override
       public Object initValue(Object ignored) {
         return ((SettableShortObjectInspector) this.objectInspector)
@@ -818,20 +817,20 @@ public final class VectorExpressionWrite
       SettableByteObjectInspector fieldObjInspector) throws HiveException {
     return new VectorExpressionWriterLong() {
       private Object obj;
-      
-      public VectorExpressionWriter init(SettableByteObjectInspector objInspector) 
+
+      public VectorExpressionWriter init(SettableByteObjectInspector objInspector)
           throws HiveException {
         super.init(objInspector);
         this.obj = initValue(null);
         return this;
       }
-      
+
       @Override
       public Object writeValue(long value) throws HiveException {
         ((SettableByteObjectInspector) this.objectInspector).set(this.obj, (byte) value);
         return this.obj;
       }
-      
+
       @Override
       public Object setValue(Object field, long value) throws HiveException {
         if (null == field) {
@@ -840,7 +839,7 @@ public final class VectorExpressionWrite
         ((SettableByteObjectInspector) this.objectInspector).set(field, (byte) value);
         return field;
       }
-      
+
       @Override
       public Object initValue(Object ignored) {
         return ((SettableByteObjectInspector) this.objectInspector)
@@ -853,31 +852,31 @@ public final class VectorExpressionWrite
       SettableBooleanObjectInspector fieldObjInspector) throws HiveException {
     return new VectorExpressionWriterLong() {
       private Object obj;
-      
-      public VectorExpressionWriter init(SettableBooleanObjectInspector objInspector) 
+
+      public VectorExpressionWriter init(SettableBooleanObjectInspector objInspector)
           throws HiveException {
         super.init(objInspector);
         this.obj = initValue(null);
         return this;
       }
-      
+
       @Override
       public Object writeValue(long value) throws HiveException {
-        ((SettableBooleanObjectInspector) this.objectInspector).set(this.obj, 
+        ((SettableBooleanObjectInspector) this.objectInspector).set(this.obj,
             value == 0 ? false : true);
         return this.obj;
       }
-      
+
       @Override
       public Object setValue(Object field, long value) throws HiveException {
         if (null == field) {
           field = initValue(null);
         }
-        ((SettableBooleanObjectInspector) this.objectInspector).set(field, 
+        ((SettableBooleanObjectInspector) this.objectInspector).set(field,
             value == 0 ? false : true);
         return field;
       }
-      
+
       @Override
       public Object initValue(Object ignored) {
         return ((SettableBooleanObjectInspector) this.objectInspector)
@@ -890,20 +889,20 @@ public final class VectorExpressionWrite
       SettableDoubleObjectInspector fieldObjInspector) throws HiveException {
     return new VectorExpressionWriterDouble() {
       private Object obj;
-      
-      public VectorExpressionWriter init(SettableDoubleObjectInspector objInspector) 
+
+      public VectorExpressionWriter init(SettableDoubleObjectInspector objInspector)
           throws HiveException {
         super.init(objInspector);
         this.obj = initValue(null);
         return this;
       }
-      
+
       @Override
       public Object writeValue(double value) throws HiveException {
         ((SettableDoubleObjectInspector) this.objectInspector).set(this.obj, value);
         return this.obj;
       }
-      
+
       @Override
       public Object setValue(Object field, double value) throws HiveException {
         if (null == field) {
@@ -911,8 +910,8 @@ public final class VectorExpressionWrite
         }
         ((SettableDoubleObjectInspector) this.objectInspector).set(field, value);
         return field;
-      }      
-      
+      }
+
       @Override
       public Object initValue(Object ignored) {
         return ((SettableDoubleObjectInspector) this.objectInspector)
@@ -925,20 +924,20 @@ public final class VectorExpressionWrite
       SettableFloatObjectInspector fieldObjInspector) throws HiveException {
     return new VectorExpressionWriterDouble() {
       private Object obj;
-      
-      public VectorExpressionWriter init(SettableFloatObjectInspector objInspector) 
+
+      public VectorExpressionWriter init(SettableFloatObjectInspector objInspector)
           throws HiveException {
         super.init(objInspector);
         this.obj = initValue(null);
         return this;
       }
-      
+
       @Override
       public Object writeValue(double value) throws HiveException {
         ((SettableFloatObjectInspector) this.objectInspector).set(this.obj, (float) value);
         return this.obj;
       }
-      
+
       @Override
       public Object setValue(Object field, double value) throws HiveException {
         if (null == field) {
@@ -947,7 +946,7 @@ public final class VectorExpressionWrite
         ((SettableFloatObjectInspector) this.objectInspector).set(field, (float) value);
         return field;
       }
-      
+
       @Override
       public Object initValue(Object ignored) {
         return ((SettableFloatObjectInspector) this.objectInspector)
@@ -1027,25 +1026,25 @@ public final class VectorExpressionWrite
    */
   public static VectorExpressionWriter[] getExpressionWriters(StructObjectInspector objInspector)
       throws HiveException {
-    
+
     if (objInspector.isSettable()) {
       return getSettableExpressionWriters((SettableStructObjectInspector) objInspector);
     }
-    
+
     List<? extends StructField> allFieldRefs = objInspector.getAllStructFieldRefs();
-    
+
     VectorExpressionWriter[] expressionWriters = new VectorExpressionWriter[allFieldRefs.size()];
-    
+
     for(int i=0; i<expressionWriters.length; ++i) {
       expressionWriters[i] = genVectorExpressionWritable(allFieldRefs.get(i).getFieldObjectInspector());
     }
-    
+
     return expressionWriters;
   }
 
   public static VectorExpressionWriter[] getSettableExpressionWriters(
       SettableStructObjectInspector objInspector) throws HiveException {
-    List<? extends StructField> fieldsRef = objInspector.getAllStructFieldRefs(); 
+    List<? extends StructField> fieldsRef = objInspector.getAllStructFieldRefs();
     VectorExpressionWriter[] writers = new VectorExpressionWriter[fieldsRef.size()];
     for(int i=0; i<writers.length; ++i) {
       StructField fieldRef = fieldsRef.get(i);
@@ -1054,19 +1053,19 @@ public final class VectorExpressionWrite
       writers[i] = genVectorExpressionWritable(objInspector, fieldRef, baseWriter);
     }
     return writers;
-    
+
   }
-  
+
   /**
-   * VectorExpressionWriterSetter helper for vector expression writers that use 
+   * VectorExpressionWriterSetter helper for vector expression writers that use
    * settable ObjectInspector fields to assign the values.
-   * This is used by the OrcStruct serialization (eg. CREATE TABLE ... AS ...) 
+   * This is used by the OrcStruct serialization (eg. CREATE TABLE ... AS ...)
    */
   private static class VectorExpressionWriterSetter extends VectorExpressionWriterBase {
     private SettableStructObjectInspector settableObjInspector;
     private StructField fieldRef;
     private VectorExpressionWriter baseWriter;
-    
+
     public VectorExpressionWriterSetter init(
         SettableStructObjectInspector objInspector,
         StructField fieldRef,
@@ -1087,15 +1086,15 @@ public final class VectorExpressionWrite
     @Override
     public Object setValue(Object row, ColumnVector column, int columnRow)
         throws HiveException {
-      
+
       // NULLs are handled by each individual base writer setter
       // We could handle NULLs centrally here but that would result in spurious allocs
-      
+
       Object fieldValue = this.settableObjInspector.getStructFieldData(row, fieldRef);
       fieldValue = baseWriter.setValue(fieldValue, column, columnRow);
       return this.settableObjInspector.setStructFieldData(row, fieldRef, fieldValue);
     }
-    
+
     @Override
     public Object initValue(Object struct) throws HiveException {
       Object initValue = this.baseWriter.initValue(null);
@@ -1103,7 +1102,7 @@ public final class VectorExpressionWrite
       return struct;
     }
   }
-  
+
   private static VectorExpressionWriter genVectorExpressionWritable(
       SettableStructObjectInspector objInspector,
       StructField fieldRef,

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java Thu Sep  4 02:49:46 2014
@@ -19,7 +19,9 @@
 package org.apache.hadoop.hive.ql.hooks;
 
 import java.io.Serializable;
+import java.util.ArrayList;
 import java.util.HashSet;
+import java.util.List;
 import java.util.Set;
 
 import org.apache.hadoop.fs.Path;
@@ -49,7 +51,8 @@ public class ReadEntity extends Entity i
   // For views, the entities can be nested - by default, entities are at the top level
   private final Set<ReadEntity> parents = new HashSet<ReadEntity>();
 
-
+  // The accessed columns of query
+  private final List<String> accessedColumns = new ArrayList<String>();
 
   /**
    * For serialization only.
@@ -159,4 +162,8 @@ public class ReadEntity extends Entity i
   public void noLockNeeded() {
     needsLock = false;
   }
+
+  public List<String> getAccessedColumns() {
+    return accessedColumns;
+  }
 }

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java Thu Sep  4 02:49:46 2014
@@ -78,7 +78,7 @@ public class HiveIgnoreKeyTextOutputForm
     final int finalRowSeparator = rowSeparator;
     FileSystem fs = outPath.getFileSystem(jc);
     final OutputStream outStream = Utilities.createCompressedStream(jc,
-	fs.create(outPath, progress), isCompressed);
+    fs.create(outPath, progress), isCompressed);
     return new RecordWriter() {
       @Override
       public void write(Writable r) throws IOException {

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveNullValueSequenceFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveNullValueSequenceFileOutputFormat.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveNullValueSequenceFileOutputFormat.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveNullValueSequenceFileOutputFormat.java Thu Sep  4 02:49:46 2014
@@ -54,7 +54,7 @@ public class HiveNullValueSequenceFileOu
 
     FileSystem fs = finalOutPath.getFileSystem(jc);
     final SequenceFile.Writer outStream = Utilities.createSequenceWriter(jc, fs, finalOutPath,
-	HiveKey.class, NullWritable.class, isCompressed, progress);
+    HiveKey.class, NullWritable.class, isCompressed, progress);
 
     keyWritable = new HiveKey();
     keyIsText = valueClass.equals(Text.class);

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java Thu Sep  4 02:49:46 2014
@@ -62,7 +62,7 @@ public class HiveSequenceFileOutputForma
 
     FileSystem fs = finalOutPath.getFileSystem(jc);
     final SequenceFile.Writer outStream = Utilities.createSequenceWriter(jc, fs, finalOutPath,
-	BytesWritable.class, valueClass, isCompressed, progress);
+    BytesWritable.class, valueClass, isCompressed, progress);
 
     return new RecordWriter() {
       @Override

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java Thu Sep  4 02:49:46 2014
@@ -133,7 +133,7 @@ public class RCFileOutputFormat extends
 
     RCFileOutputFormat.setColumnNumber(jc, cols.length);
     final RCFile.Writer outWriter = Utilities.createRCFileWriter(jc,
-	finalOutPath.getFileSystem(jc), finalOutPath, isCompressed, progress);
+    finalOutPath.getFileSystem(jc), finalOutPath, isCompressed, progress);
 
     return new org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter() {
       @Override

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java Thu Sep  4 02:49:46 2014
@@ -64,7 +64,7 @@ public class RCFileRecordReader<K extend
     private final Map<String, RCFileSyncEntry> cache;
 
     public RCFileSyncCache() {
-	cache = Collections.synchronizedMap(new WeakHashMap<String, RCFileSyncEntry>());
+      cache = Collections.synchronizedMap(new WeakHashMap<String, RCFileSyncEntry>());
     }
 
     public void put(FileSplit split, long endSync) {

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeTask.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeTask.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeTask.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeTask.java Thu Sep  4 02:49:46 2014
@@ -373,6 +373,10 @@ public class MergeTask extends Task<Merg
       }
     }
 
+    if (format == null || format.trim().equals("")) {
+      printUsage();
+    }
+    
     MergeWork mergeWork = null;
     if (format.equals("rcfile")) {
       mergeWork = new MergeWork(inputPaths, new Path(outputDir), RCFileInputFormat.class);

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java Thu Sep  4 02:49:46 2014
@@ -910,31 +910,31 @@ public class OrcInputFormat  implements 
 
   static List<OrcSplit> generateSplitsInfo(Configuration conf)
       throws IOException {
-	  // use threads to resolve directories into splits
-	  Context context = new Context(conf);
-	  for(Path dir: getInputPaths(conf)) {
-	    FileSystem fs = dir.getFileSystem(conf);
-	    context.schedule(new FileGenerator(context, fs, dir));
-	  }
-	  context.waitForTasks();
-	  // deal with exceptions
-	  if (!context.errors.isEmpty()) {
-	    List<IOException> errors =
-	        new ArrayList<IOException>(context.errors.size());
-	    for(Throwable th: context.errors) {
-	      if (th instanceof IOException) {
-	        errors.add((IOException) th);
-	      } else {
-	        throw new RuntimeException("serious problem", th);
-	      }
-	    }
-	    throw new InvalidInputException(errors);
-	  }
+    // use threads to resolve directories into splits
+    Context context = new Context(conf);
+    for(Path dir: getInputPaths(conf)) {
+      FileSystem fs = dir.getFileSystem(conf);
+      context.schedule(new FileGenerator(context, fs, dir));
+    }
+    context.waitForTasks();
+    // deal with exceptions
+    if (!context.errors.isEmpty()) {
+      List<IOException> errors =
+          new ArrayList<IOException>(context.errors.size());
+      for(Throwable th: context.errors) {
+        if (th instanceof IOException) {
+          errors.add((IOException) th);
+        } else {
+          throw new RuntimeException("serious problem", th);
+        }
+      }
+      throw new InvalidInputException(errors);
+    }
     if (context.cacheStripeDetails) {
       LOG.info("FooterCacheHitRatio: " + context.cacheHitCounter.get() + "/"
           + context.numFilesCounter.get());
     }
-	  return context.splits;
+    return context.splits;
   }
 
   @Override
@@ -998,14 +998,14 @@ public class OrcInputFormat  implements 
           ((FileSplit) inputSplit).getPath(),
           OrcFile.readerOptions(conf)), conf, (FileSplit) inputSplit);
     }
-    
+
     OrcSplit split = (OrcSplit) inputSplit;
     reporter.setStatus(inputSplit.toString());
 
     Options options = new Options(conf).reporter(reporter);
     final RowReader<OrcStruct> inner = getReader(inputSplit, options);
-    
-    
+
+
     /*Even though there are no delta files, we still need to produce row ids so that an
     * UPDATE or DELETE statement would work on a table which didn't have any previous updates*/
     if (split.isOriginal() && split.getDeltas().isEmpty()) {

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java Thu Sep  4 02:49:46 2014
@@ -96,9 +96,6 @@ class WriterImpl implements Writer, Memo
   private static final int HDFS_BUFFER_SIZE = 256 * 1024;
   private static final int MIN_ROW_INDEX_STRIDE = 1000;
 
-  // HDFS requires blocks < 2GB and multiples of 512, so pick 1.5GB
-  private static final long MAX_BLOCK_SIZE = 1536 * 1024 * 1024;
-
   // threshold above which buffer size will be automatically resized
   private static final int COLUMN_COUNT_THRESHOLD = 1000;
 
@@ -135,8 +132,6 @@ class WriterImpl implements Writer, Memo
     new TreeMap<String, ByteString>();
   private final StreamFactory streamFactory = new StreamFactory();
   private final TreeWriter treeWriter;
-  private final OrcProto.RowIndex.Builder rowIndex =
-      OrcProto.RowIndex.newBuilder();
   private final boolean buildIndex;
   private final MemoryManager memoryManager;
   private final OrcFile.Version version;
@@ -678,7 +673,7 @@ class WriterImpl implements Writer, Memo
       if (rowIndexStream != null) {
         if (rowIndex.getEntryCount() != requiredIndexEntries) {
           throw new IllegalArgumentException("Column has wrong number of " +
-               "index entries found: " + rowIndexEntry + " expected: " +
+               "index entries found: " + rowIndex.getEntryCount() + " expected: " +
                requiredIndexEntries);
         }
         rowIndex.build().writeTo(rowIndexStream);
@@ -1005,6 +1000,8 @@ class WriterImpl implements Writer, Memo
     private final float dictionaryKeySizeThreshold;
     private boolean useDictionaryEncoding = true;
     private boolean isDirectV2 = true;
+    private boolean doneDictionaryCheck;
+    private final boolean strideDictionaryCheck;
 
     StringTreeWriter(int columnId,
                      ObjectInspector inspector,
@@ -1025,9 +1022,14 @@ class WriterImpl implements Writer, Memo
       directLengthOutput = createIntegerWriter(writer.createStream(id,
           OrcProto.Stream.Kind.LENGTH), false, isDirectV2, writer);
       dictionaryKeySizeThreshold = writer.getConfiguration().getFloat(
-        HiveConf.ConfVars.HIVE_ORC_DICTIONARY_KEY_SIZE_THRESHOLD.varname,
-        HiveConf.ConfVars.HIVE_ORC_DICTIONARY_KEY_SIZE_THRESHOLD.
-          defaultFloatVal);
+          HiveConf.ConfVars.HIVE_ORC_DICTIONARY_KEY_SIZE_THRESHOLD.varname,
+          HiveConf.ConfVars.HIVE_ORC_DICTIONARY_KEY_SIZE_THRESHOLD.
+              defaultFloatVal);
+      strideDictionaryCheck = writer.getConfiguration().getBoolean(
+          HiveConf.ConfVars.HIVE_ORC_ROW_INDEX_STRIDE_DICTIONARY_CHECK.varname,
+          HiveConf.ConfVars.HIVE_ORC_ROW_INDEX_STRIDE_DICTIONARY_CHECK.
+            defaultBoolVal);
+      doneDictionaryCheck = false;
     }
 
     /**
@@ -1045,21 +1047,71 @@ class WriterImpl implements Writer, Memo
       super.write(obj);
       if (obj != null) {
         Text val = getTextValue(obj);
-        rows.add(dictionary.add(val));
+        if (useDictionaryEncoding || !strideDictionaryCheck) {
+          rows.add(dictionary.add(val));
+        } else {
+          // write data and length
+          directStreamOutput.write(val.getBytes(), 0, val.getLength());
+          directLengthOutput.write(val.getLength());
+        }
         indexStatistics.updateString(val);
       }
     }
 
+    private boolean checkDictionaryEncoding() {
+      if (!doneDictionaryCheck) {
+        // Set the flag indicating whether or not to use dictionary encoding
+        // based on whether or not the fraction of distinct keys over number of
+        // non-null rows is less than the configured threshold
+        float ratio = rows.size() > 0 ? (float) (dictionary.size()) / rows.size() : 0.0f;
+        useDictionaryEncoding = !isDirectV2 || ratio <= dictionaryKeySizeThreshold;
+        doneDictionaryCheck = true;
+      }
+      return useDictionaryEncoding;
+    }
+
     @Override
     void writeStripe(OrcProto.StripeFooter.Builder builder,
                      int requiredIndexEntries) throws IOException {
-      // Set the flag indicating whether or not to use dictionary encoding
-      // based on whether or not the fraction of distinct keys over number of
-      // non-null rows is less than the configured threshold
-      useDictionaryEncoding =
-        (!isDirectV2) || (rows.size() > 0 &&
-                          (float)(dictionary.size()) / rows.size() <=
-                            dictionaryKeySizeThreshold);
+      // if rows in stripe is less than dictionaryCheckAfterRows, dictionary
+      // checking would not have happened. So do it again here.
+      checkDictionaryEncoding();
+
+      if (useDictionaryEncoding) {
+        flushDictionary();
+      } else {
+        // flushout any left over entries from dictionary
+        if (rows.size() > 0) {
+          flushDictionary();
+        }
+
+        // suppress the stream for every stripe if dictionary is disabled
+        stringOutput.suppress();
+      }
+
+      // we need to build the rowindex before calling super, since it
+      // writes it out.
+      super.writeStripe(builder, requiredIndexEntries);
+      stringOutput.flush();
+      lengthOutput.flush();
+      rowOutput.flush();
+      directStreamOutput.flush();
+      directLengthOutput.flush();
+      // reset all of the fields to be ready for the next stripe.
+      dictionary.clear();
+      savedRowIndex.clear();
+      rowIndexValueCount.clear();
+      recordPosition(rowIndexPosition);
+      rowIndexValueCount.add(0L);
+
+      if (!useDictionaryEncoding) {
+        // record the start positions of first index stride of next stripe i.e
+        // beginning of the direct streams when dictionary is disabled
+        recordDirectStreamPosition();
+      }
+    }
+
+    private void flushDictionary() throws IOException {
       final int[] dumpOrder = new int[dictionary.size()];
 
       if (useDictionaryEncoding) {
@@ -1113,21 +1165,7 @@ class WriterImpl implements Writer, Memo
           }
         }
       }
-      // we need to build the rowindex before calling super, since it
-      // writes it out.
-      super.writeStripe(builder, requiredIndexEntries);
-      stringOutput.flush();
-      lengthOutput.flush();
-      rowOutput.flush();
-      directStreamOutput.flush();
-      directLengthOutput.flush();
-      // reset all of the fields to be ready for the next stripe.
-      dictionary.clear();
       rows.clear();
-      savedRowIndex.clear();
-      rowIndexValueCount.clear();
-      recordPosition(rowIndexPosition);
-      rowIndexValueCount.add(0L);
     }
 
     @Override
@@ -1165,10 +1203,30 @@ class WriterImpl implements Writer, Memo
       OrcProto.RowIndexEntry.Builder rowIndexEntry = getRowIndexEntry();
       rowIndexEntry.setStatistics(indexStatistics.serialize());
       indexStatistics.reset();
-      savedRowIndex.add(rowIndexEntry.build());
+      OrcProto.RowIndexEntry base = rowIndexEntry.build();
+      savedRowIndex.add(base);
       rowIndexEntry.clear();
       recordPosition(rowIndexPosition);
       rowIndexValueCount.add(Long.valueOf(rows.size()));
+      if (strideDictionaryCheck) {
+        checkDictionaryEncoding();
+      }
+      if (!useDictionaryEncoding) {
+        if (rows.size() > 0) {
+          flushDictionary();
+          // just record the start positions of next index stride
+          recordDirectStreamPosition();
+        } else {
+          // record the start positions of next index stride
+          recordDirectStreamPosition();
+          getRowIndex().addEntry(base);
+        }
+      }
+    }
+
+    private void recordDirectStreamPosition() throws IOException {
+      directStreamOutput.getPosition(rowIndexPosition);
+      directLengthOutput.getPosition(rowIndexPosition);
     }
 
     @Override

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java Thu Sep  4 02:49:46 2014
@@ -95,7 +95,7 @@ public class HiveSchemaConverter {
         int scale = decimalTypeInfo.scale();
         int bytes = ParquetHiveSerDe.PRECISION_TO_BYTE_COUNT[prec - 1];
         return Types.optional(PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY).length(bytes).as(OriginalType.DECIMAL).
-        		scale(scale).precision(prec).named(name);
+            scale(scale).precision(prec).named(name);
       } else if (typeInfo.equals(TypeInfoFactory.unknownTypeInfo)) {
         throw new UnsupportedOperationException("Unknown type not implemented");
       } else {

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java Thu Sep  4 02:49:46 2014
@@ -140,7 +140,7 @@ public class DataWritableReadSupport ext
               throw new IllegalStateException(msg);
             }
           }
-	}
+        }
       }
       requestedSchemaByUser = resolveSchemaAccess(new MessageType(fileSchema.getName(),
               typeListWanted), fileSchema, configuration);

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java Thu Sep  4 02:49:46 2014
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.lockmgr
 import org.apache.hadoop.hive.ql.metadata.*;
 
 import java.util.*;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.ReentrantLock;
 
 /**
@@ -38,7 +39,7 @@ public class EmbeddedLockManager impleme
 
   private HiveLockManagerCtx ctx;
 
-  private int sleepTime = 1000;
+  private long sleepTime = 1000;
   private int numRetriesForLock = 0;
   private int numRetriesForUnLock = 0;
 
@@ -82,12 +83,13 @@ public class EmbeddedLockManager impleme
 
   public void refresh() {
     HiveConf conf = ctx.getConf();
-    sleepTime = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES) * 1000;
+    sleepTime = conf.getTimeVar(
+        HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES, TimeUnit.MILLISECONDS);
     numRetriesForLock = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_NUMRETRIES);
     numRetriesForUnLock = conf.getIntVar(HiveConf.ConfVars.HIVE_UNLOCK_NUMRETRIES);
   }
 
-  public HiveLock lock(HiveLockObject key, HiveLockMode mode, int numRetriesForLock, int sleepTime)
+  public HiveLock lock(HiveLockObject key, HiveLockMode mode, int numRetriesForLock, long sleepTime)
       throws LockException {
     for (int i = 0; i <= numRetriesForLock; i++) {
       if (i > 0) {
@@ -101,7 +103,7 @@ public class EmbeddedLockManager impleme
     return null;
   }
 
-  private void sleep(int sleepTime) {
+  private void sleep(long sleepTime) {
     try {
       Thread.sleep(sleepTime);
     } catch (InterruptedException e) {
@@ -109,7 +111,7 @@ public class EmbeddedLockManager impleme
     }
   }
 
-  public List<HiveLock> lock(List<HiveLockObj> objs, int numRetriesForLock, int sleepTime)
+  public List<HiveLock> lock(List<HiveLockObj> objs, int numRetriesForLock, long sleepTime)
       throws LockException {
     sortLocks(objs);
     for (int i = 0; i <= numRetriesForLock; i++) {
@@ -132,7 +134,7 @@ public class EmbeddedLockManager impleme
   }
 
   private List<HiveLock> lockPrimitive(List<HiveLockObj> objs, int numRetriesForLock,
-      int sleepTime) throws LockException {
+      long sleepTime) throws LockException {
     List<HiveLock> locks = new ArrayList<HiveLock>();
     for (HiveLockObj obj : objs) {
       HiveLock lock = lockPrimitive(obj.getObj(), obj.getMode());
@@ -164,7 +166,7 @@ public class EmbeddedLockManager impleme
     });
   }
 
-  public void unlock(HiveLock hiveLock, int numRetriesForUnLock, int sleepTime)
+  public void unlock(HiveLock hiveLock, int numRetriesForUnLock, long sleepTime)
       throws LockException {
     String[] paths = hiveLock.getHiveLockObject().getPaths();
     HiveLockObjectData data = hiveLock.getHiveLockObject().getData();
@@ -179,7 +181,7 @@ public class EmbeddedLockManager impleme
     throw new LockException("Failed to release lock " + hiveLock);
   }
 
-  public void releaseLocks(List<HiveLock> hiveLocks, int numRetriesForUnLock, int sleepTime) {
+  public void releaseLocks(List<HiveLock> hiveLocks, int numRetriesForUnLock, long sleepTime) {
     for (HiveLock locked : hiveLocks) {
       try {
         unlock(locked, numRetriesForUnLock, sleepTime);

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java Thu Sep  4 02:49:46 2014
@@ -37,6 +37,7 @@ import org.apache.zookeeper.ZooKeeper;
 import java.io.IOException;
 import java.net.InetAddress;
 import java.util.*;
+import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -53,7 +54,7 @@ public class ZooKeeperHiveLockManager im
   private int sessionTimeout;
   private String quorumServers;
 
-  private int sleepTime;
+  private long sleepTime;
   private int numRetriesForLock;
   private int numRetriesForUnLock;
 
@@ -106,7 +107,8 @@ public class ZooKeeperHiveLockManager im
     sessionTimeout = conf.getIntVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_SESSION_TIMEOUT);
     quorumServers = ZooKeeperHiveLockManager.getQuorumServers(conf);
 
-    sleepTime = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES) * 1000;
+    sleepTime = conf.getTimeVar(
+        HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES, TimeUnit.MILLISECONDS);
     numRetriesForLock = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_NUMRETRIES);
     numRetriesForUnLock = conf.getIntVar(HiveConf.ConfVars.HIVE_UNLOCK_NUMRETRIES);
 
@@ -132,7 +134,8 @@ public class ZooKeeperHiveLockManager im
   @Override
   public void refresh() {
     HiveConf conf = ctx.getConf();
-    sleepTime = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES) * 1000;
+    sleepTime = conf.getTimeVar(
+        HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES, TimeUnit.MILLISECONDS);
     numRetriesForLock = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_NUMRETRIES);
     numRetriesForUnLock = conf.getIntVar(HiveConf.ConfVars.HIVE_UNLOCK_NUMRETRIES);
   }
@@ -268,7 +271,7 @@ public class ZooKeeperHiveLockManager im
    * @param mode
    *          The mode of the lock
    * @param keepAlive
-   *          Whether the lock is to be persisted after the statement Acuire the
+   *          Whether the lock is to be persisted after the statement Acquire the
    *          lock. Return null if a conflicting lock is present.
    **/
   public ZooKeeperHiveLock lock(HiveLockObject key, HiveLockMode mode,
@@ -515,8 +518,8 @@ public class ZooKeeperHiveLockManager im
     try {
       int sessionTimeout = conf.getIntVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_SESSION_TIMEOUT);
       String quorumServers = getQuorumServers(conf);
-      Watcher dummWatcher = new DummyWatcher();
-      zkpClient = new ZooKeeper(quorumServers, sessionTimeout, dummWatcher);
+      Watcher dummyWatcher = new DummyWatcher();
+      zkpClient = new ZooKeeper(quorumServers, sessionTimeout, dummyWatcher);
       String parent = conf.getVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_NAMESPACE);
       List<HiveLock> locks = getLocks(conf, zkpClient, null, parent, false, false);
       Exception lastExceptionGot = null;

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Thu Sep  4 02:49:46 2014
@@ -109,6 +109,7 @@ import org.apache.hadoop.hive.shims.Hado
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.thrift.TException;
 
 import com.google.common.collect.Sets;
@@ -128,6 +129,7 @@ public class Hive {
 
   private HiveConf conf = null;
   private IMetaStoreClient metaStoreClient;
+  private UserGroupInformation owner;
 
   private static ThreadLocal<Hive> hiveDB = new ThreadLocal<Hive>() {
     @Override
@@ -181,7 +183,11 @@ public class Hive {
    */
   public static Hive get(HiveConf c, boolean needsRefresh) throws HiveException {
     Hive db = hiveDB.get();
-    if (db == null || needsRefresh) {
+    if (db == null || needsRefresh || !db.isCurrentUserOwner()) {
+      if (db != null) {
+        LOG.debug("Creating new db. db = " + db + ", needsRefresh = " + needsRefresh +
+          ", db.isCurrentUserOwner = " + db.isCurrentUserOwner());
+      }
       closeCurrent();
       c.set("fs.scheme.class", "dfs");
       Hive newdb = new Hive(c);
@@ -194,6 +200,11 @@ public class Hive {
 
   public static Hive get() throws HiveException {
     Hive db = hiveDB.get();
+    if (db != null && !db.isCurrentUserOwner()) {
+      LOG.debug("Creating new db. db.isCurrentUserOwner = " + db.isCurrentUserOwner());
+      db.close();
+      db = null;
+    }
     if (db == null) {
       SessionState session = SessionState.get();
       db = new Hive(session == null ? new HiveConf(Hive.class) : session.getConf());
@@ -220,6 +231,17 @@ public class Hive {
     conf = c;
   }
 
+
+  private boolean isCurrentUserOwner() throws HiveException {
+    try {
+      return owner == null || owner.equals(UserGroupInformation.getCurrentUser());
+    } catch(IOException e) {
+      throw new HiveException("Error getting current user: " + e.getMessage(), e);
+    }
+  }
+
+
+
   /**
    * closes the connection to metastore for the calling thread
    */
@@ -2496,6 +2518,13 @@ private void constructOneLBLocationMap(F
   @Unstable
   public IMetaStoreClient getMSC() throws MetaException {
     if (metaStoreClient == null) {
+      try {
+        owner = UserGroupInformation.getCurrentUser();
+      } catch(IOException e) {
+        String msg = "Error getting current user: " + e.getMessage();
+        LOG.error(msg, e);
+        throw new MetaException(msg + "\n" + StringUtils.stringifyException(e));
+      }
       metaStoreClient = createMetaStoreClient();
     }
     return metaStoreClient;

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java Thu Sep  4 02:49:46 2014
@@ -5,6 +5,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -13,22 +14,33 @@ import java.util.regex.Pattern;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaHook;
 import org.apache.hadoop.hive.metastore.HiveMetaHookLoader;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
 import org.apache.hadoop.hive.metastore.api.HiveObjectType;
+import org.apache.hadoop.hive.metastore.api.InvalidInputException;
 import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.PartitionsStatsRequest;
 import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
+import org.apache.hadoop.hive.metastore.api.TableStatsRequest;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 import org.apache.thrift.TException;
 
 public class SessionHiveMetaStoreClient extends HiveMetaStoreClient implements IMetaStoreClient {
@@ -71,6 +83,12 @@ public class SessionHiveMetaStoreClient 
     // First try temp table
     org.apache.hadoop.hive.metastore.api.Table table = getTempTable(dbname, name);
     if (table != null) {
+      try {
+        deleteTempTableColumnStatsForTable(dbname, name);
+      } catch (NoSuchObjectException err){
+        // No stats to delete, forgivable error.
+        LOG.info(err);
+      }
       dropTempTable(table, deleteData, envContext);
       return;
     }
@@ -217,6 +235,41 @@ public class SessionHiveMetaStoreClient 
     return super.get_privilege_set(hiveObject, userName, groupNames);
   }
 
+  /** {@inheritDoc} */
+  @Override
+  public boolean updateTableColumnStatistics(ColumnStatistics statsObj)
+      throws NoSuchObjectException, InvalidObjectException, MetaException, TException,
+      InvalidInputException {
+    String dbName = statsObj.getStatsDesc().getDbName().toLowerCase();
+    String tableName = statsObj.getStatsDesc().getTableName().toLowerCase();
+    if (getTempTable(dbName, tableName) != null) {
+      return updateTempTableColumnStats(dbName, tableName, statsObj);
+    }
+    return super.updateTableColumnStatistics(statsObj);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public List<ColumnStatisticsObj> getTableColumnStatistics(String dbName, String tableName,
+      List<String> colNames) throws NoSuchObjectException, MetaException, TException,
+      InvalidInputException, InvalidObjectException {
+    if (getTempTable(dbName, tableName) != null) {
+      return getTempTableColumnStats(dbName, tableName, colNames);
+    }
+    return super.getTableColumnStatistics(dbName, tableName, colNames);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public boolean deleteTableColumnStatistics(String dbName, String tableName, String colName)
+      throws NoSuchObjectException, InvalidObjectException, MetaException, TException,
+      InvalidInputException {
+    if (getTempTable(dbName, tableName) != null) {
+      return deleteTempTableColumnStats(dbName, tableName, colName);
+    }
+    return super.deleteTableColumnStatistics(dbName, tableName, colName);
+  }
+
   private void createTempTable(org.apache.hadoop.hive.metastore.api.Table tbl,
       EnvironmentContext envContext) throws AlreadyExistsException, InvalidObjectException,
       MetaException, NoSuchObjectException, TException {
@@ -277,15 +330,19 @@ public class SessionHiveMetaStoreClient 
       org.apache.hadoop.hive.metastore.api.Table oldt,
       org.apache.hadoop.hive.metastore.api.Table newt,
       EnvironmentContext envContext) throws InvalidOperationException, MetaException, TException {
-    Table newTable = new Table(deepCopyAndLowerCaseTable(newt));
     dbname = dbname.toLowerCase();
     tbl_name = tbl_name.toLowerCase();
+    boolean shouldDeleteColStats = false;
 
     // Disallow changing temp table location
     if (!newt.getSd().getLocation().equals(oldt.getSd().getLocation())) {
       throw new MetaException("Temp table location cannot be changed");
     }
 
+    org.apache.hadoop.hive.metastore.api.Table newtCopy = deepCopyAndLowerCaseTable(newt);
+    MetaStoreUtils.updateUnpartitionedTableStatsFast(newtCopy,
+        wh.getFileStatusesForSD(newtCopy.getSd()), false, true);
+    Table newTable = new Table(newtCopy);
     String newDbName = newTable.getDbName();
     String newTableName = newTable.getTableName();
     if (!newDbName.equals(oldt.getDbName()) || !newTableName.equals(oldt.getTableName())) {
@@ -303,6 +360,7 @@ public class SessionHiveMetaStoreClient 
       if (tables == null || tables.remove(tbl_name) == null) {
         throw new MetaException("Could not find temp table entry for " + dbname + "." + tbl_name);
       }
+      shouldDeleteColStats = true;
 
       tables = getTempTablesForDatabase(newDbName);
       if (tables == null) {
@@ -311,8 +369,50 @@ public class SessionHiveMetaStoreClient 
       }
       tables.put(newTableName, newTable);
     } else {
+      if (haveTableColumnsChanged(oldt, newt)) {
+        shouldDeleteColStats = true;
+      }
       getTempTablesForDatabase(dbname).put(tbl_name, newTable);
     }
+
+    if (shouldDeleteColStats) {
+      try {
+        deleteTempTableColumnStatsForTable(dbname, tbl_name);
+      } catch (NoSuchObjectException err){
+        // No stats to delete, forgivable error.
+        LOG.info(err);
+      }
+    }
+  }
+
+  private static boolean haveTableColumnsChanged(org.apache.hadoop.hive.metastore.api.Table oldt,
+      org.apache.hadoop.hive.metastore.api.Table newt) {
+    List<FieldSchema> oldCols = oldt.getSd().getCols();
+    List<FieldSchema> newCols = newt.getSd().getCols();
+    if (oldCols.size() != newCols.size()) {
+      return true;
+    }
+    Iterator<FieldSchema> oldColsIter = oldCols.iterator();
+    Iterator<FieldSchema> newColsIter = newCols.iterator();
+    while (oldColsIter.hasNext()) {
+      // Don't use FieldSchema.equals() since it also compares comments,
+      // which is unnecessary for this method.
+      if (!fieldSchemaEqualsIgnoreComment(oldColsIter.next(), newColsIter.next())) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  private static boolean fieldSchemaEqualsIgnoreComment(FieldSchema left, FieldSchema right) {
+    // Just check name/type for equality, don't compare comment
+    if (!left.getName().equals(right.getName())) {
+      return true;
+    }
+    if (!left.getType().equals(right.getType())) {
+      return true;
+    }
+    return false;
   }
 
   private void dropTempTable(org.apache.hadoop.hive.metastore.api.Table table, boolean deleteData,
@@ -373,4 +473,102 @@ public class SessionHiveMetaStoreClient 
     }
     return ss.getTempTables().get(dbName);
   }
+
+  private Map<String, ColumnStatisticsObj> getTempTableColumnStatsForTable(String dbName,
+      String tableName) {
+    SessionState ss = SessionState.get();
+    if (ss == null) {
+      LOG.debug("No current SessionState, skipping temp tables");
+      return null;
+    }
+    String lookupName = StatsUtils.getFullyQualifiedTableName(dbName.toLowerCase(),
+        tableName.toLowerCase());
+    return ss.getTempTableColStats().get(lookupName);
+  }
+
+  private static List<ColumnStatisticsObj> copyColumnStatisticsObjList(Map<String, ColumnStatisticsObj> csoMap) {
+    List<ColumnStatisticsObj> retval = new ArrayList<ColumnStatisticsObj>(csoMap.size());
+    for (ColumnStatisticsObj cso : csoMap.values()) {
+      retval.add(new ColumnStatisticsObj(cso));
+    }
+    return retval;
+  }
+
+  private List<ColumnStatisticsObj> getTempTableColumnStats(String dbName, String tableName,
+      List<String> colNames) {
+    Map<String, ColumnStatisticsObj> tableColStats =
+        getTempTableColumnStatsForTable(dbName, tableName);
+    List<ColumnStatisticsObj> retval = new ArrayList<ColumnStatisticsObj>();
+
+    if (tableColStats != null) {
+      for (String colName : colNames) {
+        colName = colName.toLowerCase();
+        if (tableColStats.containsKey(colName)) {
+          retval.add(new ColumnStatisticsObj(tableColStats.get(colName)));
+        }
+      }
+    }
+    return retval;
+  }
+
+  private boolean updateTempTableColumnStats(String dbName, String tableName,
+      ColumnStatistics colStats) throws MetaException {
+    SessionState ss = SessionState.get();
+    if (ss == null) {
+      throw new MetaException("No current SessionState, cannot update temporary table stats for "
+          + dbName + "." + tableName);
+    }
+    Map<String, ColumnStatisticsObj> ssTableColStats =
+        getTempTableColumnStatsForTable(dbName, tableName);
+    if (ssTableColStats == null) {
+      // Add new entry for this table
+      ssTableColStats = new HashMap<String, ColumnStatisticsObj>();
+      ss.getTempTableColStats().put(
+          StatsUtils.getFullyQualifiedTableName(dbName, tableName),
+          ssTableColStats);
+    }
+    mergeColumnStats(ssTableColStats, colStats);
+    return true;
+  }
+
+  private static void mergeColumnStats(Map<String, ColumnStatisticsObj> oldStats,
+      ColumnStatistics newStats) {
+    List<ColumnStatisticsObj> newColList = newStats.getStatsObj();
+    if (newColList != null) {
+      for (ColumnStatisticsObj colStat : newColList) {
+        // This is admittedly a bit simple, StatsObjectConverter seems to allow
+        // old stats attributes to be kept if the new values do not overwrite them.
+        oldStats.put(colStat.getColName().toLowerCase(), colStat);
+      }
+    }
+  }
+
+  private boolean deleteTempTableColumnStatsForTable(String dbName, String tableName)
+      throws NoSuchObjectException {
+    Map<String, ColumnStatisticsObj> deletedEntry =
+        getTempTableColumnStatsForTable(dbName, tableName);
+    if (deletedEntry != null) {
+      SessionState.get().getTempTableColStats().remove(
+          StatsUtils.getFullyQualifiedTableName(dbName, tableName));
+    } else {
+      throw new NoSuchObjectException("Column stats doesn't exist for db=" + dbName +
+          " temp table=" + tableName);
+    }
+    return true;
+  }
+
+  private boolean deleteTempTableColumnStats(String dbName, String tableName, String columnName)
+      throws NoSuchObjectException {
+    ColumnStatisticsObj deletedEntry = null;
+    Map<String, ColumnStatisticsObj> ssTableColStats =
+        getTempTableColumnStatsForTable(dbName, tableName);
+    if (ssTableColStats != null) {
+      deletedEntry = ssTableColStats.remove(columnName.toLowerCase());
+    }
+    if (deletedEntry == null) {
+      throw new NoSuchObjectException("Column stats doesn't exist for db=" + dbName +
+          " temp table=" + tableName);
+    }
+    return true;
+  }
 }

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java Thu Sep  4 02:49:46 2014
@@ -369,7 +369,7 @@ public final class ColumnPrunerProcFacto
         return null;
       }
       cols = cols == null ? new ArrayList<String>() : cols;
-      
+
       cppCtx.getPrunedColLists().put((Operator<? extends OperatorDesc>) nd,
           cols);
       RowResolver inputRR = cppCtx.getOpToParseCtxMap().get(scanOp).getRowResolver();
@@ -479,13 +479,13 @@ public final class ColumnPrunerProcFacto
           flags[index] = true;
           colLists = Utilities.mergeUniqElems(colLists, valCols.get(index).getCols());
         }
-        
+
         Collections.sort(colLists);
         pruneReduceSinkOperator(flags, op, cppCtx);
         cppCtx.getPrunedColLists().put(op, colLists);
         return null;
       }
-      
+
       // Reduce Sink contains the columns needed - no need to aggregate from
       // children
       for (ExprNodeDesc val : valCols) {
@@ -519,7 +519,7 @@ public final class ColumnPrunerProcFacto
       if (cols == null) {
         return null;
       }
-      
+
       Map<String, ExprNodeDesc> colExprMap = op.getColumnExprMap();
       // As columns go down the DAG, the LVJ will transform internal column
       // names from something like 'key' to '_col0'. Because of this, we need
@@ -604,8 +604,8 @@ public final class ColumnPrunerProcFacto
         Object... nodeOutputs) throws SemanticException {
       SelectOperator op = (SelectOperator) nd;
       ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
-      
-      
+
+
       if (op.getChildOperators() != null) {
         for (Operator<? extends OperatorDesc> child : op.getChildOperators()) {
           // UDTF is not handled yet, so the parent SelectOp of UDTF should just assume
@@ -858,11 +858,11 @@ public final class ColumnPrunerProcFacto
     if (inputSchema != null) {
       ArrayList<ColumnInfo> rs = new ArrayList<ColumnInfo>();
       ArrayList<ColumnInfo> inputCols = inputSchema.getSignature();
-    	for (ColumnInfo i: inputCols) {
+      for (ColumnInfo i: inputCols) {
         if (cols.contains(i.getInternalName())) {
           rs.add(i);
         }
-    	}
+      }
       op.getSchema().setSignature(rs);
     }
   }

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java Thu Sep  4 02:49:46 2014
@@ -4,9 +4,9 @@
  * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance with the License. You may obtain a
  * copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software distributed under the License
  * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
  * or implied. See the License for the specific language governing permissions and limitations under
@@ -100,7 +100,7 @@ public final class ConstantPropagateProc
 
   /**
    * Get ColumnInfo from column expression.
-   * 
+   *
    * @param rr
    * @param desc
    * @return
@@ -139,7 +139,7 @@ public final class ConstantPropagateProc
 
   /**
    * Cast type from expression type to expected type ti.
-   * 
+   *
    * @param desc constant expression
    * @param ti expected type info
    * @return cast constant, or null if the type cast failed.
@@ -189,10 +189,10 @@ public final class ConstantPropagateProc
 
   /**
    * Fold input expression desc.
-   * 
+   *
    * If desc is a UDF and all parameters are constants, evaluate it. If desc is a column expression,
    * find it from propagated constants, and if there is, replace it with constant.
-   * 
+   *
    * @param desc folding expression
    * @param constants current propagated constant map
    * @param cppCtx
@@ -296,7 +296,7 @@ public final class ConstantPropagateProc
 
   /**
    * Propagate assignment expression, adding an entry into constant map constants.
-   * 
+   *
    * @param udf expression UDF, currently only 2 UDFs are supported: '=' and 'is null'.
    * @param newExprs child expressions (parameters).
    * @param cppCtx
@@ -350,7 +350,7 @@ public final class ConstantPropagateProc
           ExprNodeConstantDesc c = (ExprNodeConstantDesc) childExpr;
           if (Boolean.TRUE.equals(c.getValue())) {
 
-        	  // if true, prune it
+            // if true, prune it
             return newExprs.get(Math.abs(i - 1));
           } else {
 
@@ -384,7 +384,7 @@ public final class ConstantPropagateProc
 
   /**
    * Evaluate column, replace the deterministic columns with constants if possible
-   * 
+   *
    * @param desc
    * @param ctx
    * @param op
@@ -435,7 +435,7 @@ public final class ConstantPropagateProc
 
   /**
    * Evaluate UDF
-   * 
+   *
    * @param udf UDF object
    * @param exprs
    * @param oldExprs
@@ -512,7 +512,7 @@ public final class ConstantPropagateProc
 
   /**
    * Change operator row schema, replace column with constant if it is.
-   * 
+   *
    * @param op
    * @param constants
    * @throws SemanticException
@@ -584,7 +584,7 @@ public final class ConstantPropagateProc
 
   /**
    * Factory method to get the ConstantPropagateFilterProc class.
-   * 
+   *
    * @return ConstantPropagateFilterProc
    */
   public static ConstantPropagateFilterProc getFilterProc() {
@@ -621,7 +621,7 @@ public final class ConstantPropagateProc
 
   /**
    * Factory method to get the ConstantPropagateGroupByProc class.
-   * 
+   *
    * @return ConstantPropagateGroupByProc
    */
   public static ConstantPropagateGroupByProc getGroupByProc() {
@@ -650,7 +650,7 @@ public final class ConstantPropagateProc
 
   /**
    * Factory method to get the ConstantPropagateDefaultProc class.
-   * 
+   *
    * @return ConstantPropagateDefaultProc
    */
   public static ConstantPropagateDefaultProc getDefaultProc() {
@@ -683,7 +683,7 @@ public final class ConstantPropagateProc
 
   /**
    * The Factory method to get the ConstantPropagateSelectProc class.
-   * 
+   *
    * @return ConstantPropagateSelectProc
    */
   public static ConstantPropagateSelectProc getSelectProc() {
@@ -877,7 +877,7 @@ public final class ConstantPropagateProc
         return null;
       }
 
-      // Note: the following code (removing folded constants in exprs) is deeply coupled with 
+      // Note: the following code (removing folded constants in exprs) is deeply coupled with
       //    ColumnPruner optimizer.
       // Assuming ColumnPrunner will remove constant columns so we don't deal with output columns.
       //    Except one case that the join operator is followed by a redistribution (RS operator).

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java Thu Sep  4 02:49:46 2014
@@ -353,14 +353,14 @@ public class OpProcFactory {
           if (inpOp.getSchema() != null && inpOp.getSchema().getSignature() != null ) {
             for(ColumnInfo ci : inpOp.getSchema().getSignature()) {
               Dependency inp_dep = lctx.getIndex().getDependency(inpOp, ci);
-            	// The dependency can be null as some of the input cis may not have
-            	// been set in case of joins.
-            	if (inp_dep != null) {
-            	  for(BaseColumnInfo bci : inp_dep.getBaseCols()) {
-            	    new_type = LineageCtx.getNewDependencyType(inp_dep.getType(), new_type);
-            	    tai_set.add(bci.getTabAlias());
-            	  }
-            	}
+              // The dependency can be null as some of the input cis may not have
+              // been set in case of joins.
+              if (inp_dep != null) {
+                for(BaseColumnInfo bci : inp_dep.getBaseCols()) {
+                  new_type = LineageCtx.getNewDependencyType(inp_dep.getType(), new_type);
+                  tai_set.add(bci.getTabAlias());
+                }
+              }
             }
           }
 

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java Thu Sep  4 02:49:46 2014
@@ -815,9 +815,11 @@ public class Vectorizer implements Physi
         ret = validateSelectOperator((SelectOperator) op);
         break;
       case REDUCESINK:
-          ret = validateReduceSinkOperator((ReduceSinkOperator) op);
-          break;
+        ret = validateReduceSinkOperator((ReduceSinkOperator) op);
+        break;
       case FILESINK:
+        ret = validateFileSinkOperator((FileSinkOperator) op);
+        break;
       case LIMIT:
         ret = true;
         break;
@@ -899,6 +901,15 @@ public class Vectorizer implements Physi
     return true;
   }
 
+  private boolean validateFileSinkOperator(FileSinkOperator op) {
+    // HIVE-7557: For now, turn off dynamic partitioning to give more time to 
+    // figure out how to make VectorFileSink work correctly with it...
+   if (op.getConf().getDynPartCtx() != null) {
+     return false;
+   }
+   return true;
+  }
+
   private boolean validateExprNodeDesc(List<ExprNodeDesc> descs) {
     return validateExprNodeDesc(descs, VectorExpressionDescriptor.Mode.PROJECTION);
   }

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java Thu Sep  4 02:49:46 2014
@@ -104,7 +104,8 @@ public class StatsRulesProcFactory {
         tsop.setStatistics(stats.clone());
 
         if (LOG.isDebugEnabled()) {
-          LOG.debug("[0] STATS-" + tsop.toString() + ": " + stats.extendedToString());
+          LOG.debug("[0] STATS-" + tsop.toString() + " (" + table.getTableName()
+              + "): " + stats.extendedToString());
         }
       } catch (CloneNotSupportedException e) {
         throw new SemanticException(ErrorMsg.STATISTICS_CLONING_FAILED.getMsg());
@@ -1092,7 +1093,9 @@ public class StatsRulesProcFactory {
             String key = entry.getValue().get(joinColIdx);
             key = StatsUtils.stripPrefixFromColumnName(key);
             ColStatistics cs = joinedColStats.get(key);
-            cs.setCountDistint(minNDV);
+            if (cs != null) {
+              cs.setCountDistint(minNDV);
+            }
           }
         }
 

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java Thu Sep  4 02:49:46 2014
@@ -363,7 +363,6 @@ public class ColumnStatsSemanticAnalyzer
       originalTree = tree;
       boolean isPartitionStats = isPartitionLevelStats(tree);
       Map<String,String> partSpec = null;
-      checkIfTemporaryTable();
       checkForPartitionColumns(colNames, Utilities.getColumnNamesFromFieldSchema(tbl.getPartitionKeys()));
       validateSpecifiedColumnNames(colNames);
       if (conf.getBoolVar(ConfVars.HIVE_STATS_COLLECT_PART_LEVEL_STATS) && tbl.isPartitioned()) {
@@ -414,13 +413,6 @@ public class ColumnStatsSemanticAnalyzer
     }
   }
 
-  private void checkIfTemporaryTable() throws SemanticException {
-    if (tbl.isTemporary()) {
-      throw new SemanticException(tbl.getTableName()
-          + " is a temporary table.  Column statistics are not supported on temporary tables.");
-    }
-  }
-
   @Override
   public void analyze(ASTNode ast, Context origCtx) throws SemanticException {
     QB qb;

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java Thu Sep  4 02:49:46 2014
@@ -177,10 +177,19 @@ public class GenTezUtils {
 
     // map work starts with table scan operators
     assert root instanceof TableScanOperator;
-    String alias = ((TableScanOperator)root).getConf().getAlias();
+    TableScanOperator ts = (TableScanOperator) root;
+
+    String alias = ts.getConf().getAlias();
 
     setupMapWork(mapWork, context, partitions, root, alias);
 
+    if (context.parseContext != null
+        && context.parseContext.getTopToTable() != null
+        && context.parseContext.getTopToTable().containsKey(ts)
+        && context.parseContext.getTopToTable().get(ts).isDummyTable()) {
+      mapWork.setDummyTableScan(true);
+    }
+
     // add new item to the tez work
     tezWork.add(mapWork);
 

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java Thu Sep  4 02:49:46 2014
@@ -66,26 +66,26 @@ public class QB {
   private HashMap<String, WindowingSpec> destToWindowingSpec;
 
   /*
-   * If this QB represents a SubQuery predicate then this will point to the SubQuery object.
+   * If this QB represents a  SubQuery predicate then this will point to the SubQuery object.
    */
   private QBSubQuery subQueryPredicateDef;
-  
-	/*
-	 * used to give a unique name to each SubQuery QB Currently there can be at
-	 * most 2 SubQueries in a Query: 1 in the Where clause, and 1 in the Having
-	 * clause.
-	 */
-	private int numSubQueryPredicates;
-	
-	/*
-	 * for now a top level QB can have 1 where clause SQ predicate.
-	 */
-	private QBSubQuery whereClauseSubQueryPredicate;
-	
+
+  /*
+   * used to give a unique name to each SubQuery QB Currently there can be at
+   * most 2 SubQueries in a Query: 1 in the Where clause, and 1 in the Having
+   * clause.
+   */
+  private int numSubQueryPredicates;
+
   /*
    * for now a top level QB can have 1 where clause SQ predicate.
    */
-	private QBSubQuery havingClauseSubQueryPredicate;
+  private QBSubQuery whereClauseSubQueryPredicate;
+
+  /*
+   * for now a top level QB can have 1 where clause SQ predicate.
+   */
+  private QBSubQuery havingClauseSubQueryPredicate;
 
   // results
 
@@ -341,28 +341,28 @@ public class QB {
   protected QBSubQuery getSubQueryPredicateDef() {
     return subQueryPredicateDef;
   }
-  
-	protected int getNumSubQueryPredicates() {
-		return numSubQueryPredicates;
-	}
-
-	protected int incrNumSubQueryPredicates() {
-		return ++numSubQueryPredicates;
-	}
-	
-	void setWhereClauseSubQueryPredicate(QBSubQuery sq) {
-	  whereClauseSubQueryPredicate = sq;
-  }
-	
-	public QBSubQuery getWhereClauseSubQueryPredicate() {
-	  return whereClauseSubQueryPredicate;
-	}
-	
-	void setHavingClauseSubQueryPredicate(QBSubQuery sq) {
+
+  protected int getNumSubQueryPredicates() {
+    return numSubQueryPredicates;
+  }
+
+  protected int incrNumSubQueryPredicates() {
+    return ++numSubQueryPredicates;
+  }
+
+  void setWhereClauseSubQueryPredicate(QBSubQuery sq) {
+    whereClauseSubQueryPredicate = sq;
+  }
+
+  public QBSubQuery getWhereClauseSubQueryPredicate() {
+    return whereClauseSubQueryPredicate;
+  }
+
+  void setHavingClauseSubQueryPredicate(QBSubQuery sq) {
     havingClauseSubQueryPredicate = sq;
   }
-	
-	public QBSubQuery getHavingClauseSubQueryPredicate() {
+
+  public QBSubQuery getHavingClauseSubQueryPredicate() {
     return havingClauseSubQueryPredicate;
   }