You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/08/30 21:46:17 UTC

svn commit: r1519056 [3/3] - in /hive/branches/tez: ./ common/src/java/org/apache/hadoop/hive/conf/ conf/ hbase-handler/src/test/queries/positive/ hbase-handler/src/test/results/positive/ hcatalog/ hcatalog/build-support/ant/ hcatalog/core/ hcatalog/co...

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java Fri Aug 30 19:46:15 2013
@@ -38,7 +38,6 @@ import org.apache.hadoop.hive.ql.plan.PT
 import org.apache.hadoop.hive.ql.plan.PTFDesc.PartitionedTableFunctionDef;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.ShapeDetails;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.ValueBoundaryDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowExpressionDef;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowFrameDef;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowFunctionDef;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowTableFunctionDef;
@@ -53,7 +52,6 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
@@ -114,67 +112,37 @@ public class PTFDeserializer {
     /*
      * 2. initialize WFns.
      */
-    if (def.getWindowFunctions() != null) {
-      for (WindowFunctionDef wFnDef : def.getWindowFunctions()) {
-
-        if (wFnDef.getArgs() != null) {
-          for (PTFExpressionDef arg : wFnDef.getArgs()) {
-            initialize(arg, inpShape);
-          }
+    for (WindowFunctionDef wFnDef : def.getWindowFunctions()) {
+      if (wFnDef.getArgs() != null) {
+        for (PTFExpressionDef arg : wFnDef.getArgs()) {
+          initialize(arg, inpShape);
         }
 
-        if (wFnDef.getWindowFrame() != null) {
-          WindowFrameDef wFrmDef = wFnDef.getWindowFrame();
-          initialize(wFrmDef.getStart(), inpShape);
-          initialize(wFrmDef.getEnd(), inpShape);
-        }
-        setupWdwFnEvaluator(wFnDef);
       }
-      ArrayList<String> aliases = new ArrayList<String>();
-      ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
-      for (WindowFunctionDef wFnDef : def.getWindowFunctions()) {
-        aliases.add(wFnDef.getAlias());
-        if (wFnDef.isPivotResult()) {
-          fieldOIs.add(((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector());
-        } else {
-          fieldOIs.add(wFnDef.getOI());
-        }
+      if (wFnDef.getWindowFrame() != null) {
+        WindowFrameDef wFrmDef = wFnDef.getWindowFrame();
+        initialize(wFrmDef.getStart(), inpShape);
+        initialize(wFrmDef.getEnd(), inpShape);
       }
-      PTFDeserializer.addInputColumnsToList(inpShape, aliases, fieldOIs);
-      StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector(
-          aliases, fieldOIs);
-      tResolver.setWdwProcessingOutputOI(wdwOutOI);
-      initialize(def.getOutputFromWdwFnProcessing(), wdwOutOI);
-    } else {
-      def.setOutputFromWdwFnProcessing(inpShape);
+      setupWdwFnEvaluator(wFnDef);
     }
-
-    inpShape = def.getOutputFromWdwFnProcessing();
-
-    /*
-     * 3. initialize WExprs. + having clause
-     */
-    if (def.getWindowExpressions() != null) {
-      for (WindowExpressionDef wEDef : def.getWindowExpressions()) {
-        initialize(wEDef, inpShape);
+    ArrayList<String> aliases = new ArrayList<String>();
+    ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
+    for (WindowFunctionDef wFnDef : def.getWindowFunctions()) {
+      aliases.add(wFnDef.getAlias());
+      if (wFnDef.isPivotResult()) {
+        fieldOIs.add(((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector());
+      } else {
+        fieldOIs.add(wFnDef.getOI());
       }
     }
 
-    /*
-     * 4. give Evaluator chance to setup for Output execution; setup Output shape.
-     */
+    PTFDeserializer.addInputColumnsToList(inpShape, aliases, fieldOIs);
+    StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector(
+        aliases, fieldOIs);
+    tResolver.setWdwProcessingOutputOI(wdwOutOI);
+    initialize(def.getOutputShape(), wdwOutOI);
     tResolver.initializeOutputOI();
-    initialize(def.getOutputShape(), tEval.getOutputOI());
-
-    /*
-     * If we have windowExpressions then we convert to Std. Object to process;
-     * we just stream these rows; no need to put in an output Partition.
-     */
-    if (def.getWindowExpressions().size() > 0) {
-      StructObjectInspector oi = (StructObjectInspector)
-          ObjectInspectorUtils.getStandardObjectInspector(def.getOutputShape().getOI());
-      def.getOutputShape().setOI(oi);
-    }
   }
 
   protected void initialize(PTFQueryInputDef def, StructObjectInspector OI) throws HiveException {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java Fri Aug 30 19:46:15 2013
@@ -68,6 +68,10 @@ public class ReduceSinkDesc extends Abst
 
   private int numReducers;
 
+  private int topN = -1;
+  private float topNMemoryUsage = -1;
+  private boolean mapGroupBy;  // for group-by, values with same key on top-K should be forwarded
+
   public ReduceSinkDesc() {
   }
 
@@ -178,6 +182,40 @@ public class ReduceSinkDesc extends Abst
     this.tag = tag;
   }
 
+  public int getTopN() {
+    return topN;
+  }
+
+  public void setTopN(int topN) {
+    this.topN = topN;
+  }
+
+  @Explain(displayName = "TopN")
+  public Integer getTopNExplain() {
+    return topN > 0 ? topN : null;
+  }
+
+  public float getTopNMemoryUsage() {
+    return topNMemoryUsage;
+  }
+
+  public void setTopNMemoryUsage(float topNMemoryUsage) {
+    this.topNMemoryUsage = topNMemoryUsage;
+  }
+
+  @Explain(displayName = "TopN Hash Memory Usage")
+  public Float getTopNMemoryUsageExplain() {
+    return topN > 0 && topNMemoryUsage > 0 ? topNMemoryUsage : null;
+  }
+
+  public boolean isMapGroupBy() {
+    return mapGroupBy;
+  }
+
+  public void setMapGroupBy(boolean mapGroupBy) {
+    this.mapGroupBy = mapGroupBy;
+  }
+
   /**
    * Returns the number of reducers for the map-reduce job. -1 means to decide
    * the number of reducers at runtime. This enables Hive to estimate the number

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java Fri Aug 30 19:46:15 2013
@@ -46,34 +46,12 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 
-public class WindowingTableFunction extends TableFunctionEvaluator
-{
-
-  @Override
-  public PTFPartition execute(PTFPartition iPart)
-      throws HiveException
-  {
-    WindowTableFunctionDef wFnDef = (WindowTableFunctionDef) getTableDef();
-    PTFPartitionIterator<Object> pItr = iPart.iterator();
-    PTFOperator.connectLeadLagFunctionsToPartition(ptfDesc, pItr);
-
-    if ( outputPartition == null ) {
-      outputPartition = PTFPartition.create(ptfDesc.getCfg(),
-          wFnDef.getOutputFromWdwFnProcessing().getSerde(),
-          OI, wFnDef.getOutputFromWdwFnProcessing().getOI());
-    }
-    else {
-      outputPartition.reset();
-    }
-
-    execute(pItr, outputPartition);
-    return outputPartition;
-  }
+@SuppressWarnings("deprecation")
+public class WindowingTableFunction extends TableFunctionEvaluator {
 
   @SuppressWarnings({ "unchecked", "rawtypes" })
   @Override
-  public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException
-  {
+  public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException {
     ArrayList<List<?>> oColumns = new ArrayList<List<?>>();
     PTFPartition iPart = pItr.getPartition();
     StructObjectInspector inputOI;
@@ -82,36 +60,29 @@ public class WindowingTableFunction exte
     WindowTableFunctionDef wTFnDef = (WindowTableFunctionDef) getTableDef();
     Order order = wTFnDef.getOrder().getExpressions().get(0).getOrder();
 
-    for(WindowFunctionDef wFn : wTFnDef.getWindowFunctions())
-    {
+    for(WindowFunctionDef wFn : wTFnDef.getWindowFunctions()) {
       boolean processWindow = processWindow(wFn);
       pItr.reset();
-      if ( !processWindow )
-      {
+      if ( !processWindow ) {
         GenericUDAFEvaluator fEval = wFn.getWFnEval();
         Object[] args = new Object[wFn.getArgs() == null ? 0 : wFn.getArgs().size()];
         AggregationBuffer aggBuffer = fEval.getNewAggregationBuffer();
-        while(pItr.hasNext())
-        {
+        while(pItr.hasNext()) {
           Object row = pItr.next();
           int i =0;
           if ( wFn.getArgs() != null ) {
-            for(PTFExpressionDef arg : wFn.getArgs())
-            {
+            for(PTFExpressionDef arg : wFn.getArgs()) {
               args[i++] = arg.getExprEvaluator().evaluate(row);
             }
           }
           fEval.aggregate(aggBuffer, args);
         }
         Object out = fEval.evaluate(aggBuffer);
-        if ( !wFn.isPivotResult())
-        {
+        if ( !wFn.isPivotResult()) {
           out = new SameList(iPart.size(), out);
         }
         oColumns.add((List<?>)out);
-      }
-      else
-      {
+      } else {
         oColumns.add(executeFnwithWindow(getQueryDef(), wFn, iPart, order));
       }
     }
@@ -122,18 +93,15 @@ public class WindowingTableFunction exte
      * - the input Rows columns
      */
 
-    for(int i=0; i < iPart.size(); i++)
-    {
+    for(int i=0; i < iPart.size(); i++) {
       ArrayList oRow = new ArrayList();
       Object iRow = iPart.getAt(i);
 
-      for(int j=0; j < oColumns.size(); j++)
-      {
+      for(int j=0; j < oColumns.size(); j++) {
         oRow.add(oColumns.get(j).get(i));
       }
 
-      for(StructField f : inputOI.getAllStructFieldRefs())
-      {
+      for(StructField f : inputOI.getAllStructFieldRefs()) {
         oRow.add(inputOI.getStructFieldData(iRow, f));
       }
 
@@ -187,15 +155,13 @@ public class WindowingTableFunction exte
      * - the Window Functions.
      */
     @Override
-    public void initializeOutputOI() throws HiveException
-    {
+    public void initializeOutputOI() throws HiveException {
       setupOutputOI();
     }
 
 
     @Override
-    public boolean transformsRawInput()
-    {
+    public boolean transformsRawInput() {
       return false;
     }
 
@@ -225,26 +191,22 @@ public class WindowingTableFunction exte
       WindowFunctionDef wFnDef,
       PTFPartition iPart,
       Order order)
-    throws HiveException
-  {
+    throws HiveException {
     ArrayList<Object> vals = new ArrayList<Object>();
 
     GenericUDAFEvaluator fEval = wFnDef.getWFnEval();
 
     Object[] args = new Object[wFnDef.getArgs() == null ? 0 : wFnDef.getArgs().size()];
-    for(int i=0; i < iPart.size(); i++)
-    {
+    for(int i=0; i < iPart.size(); i++) {
       AggregationBuffer aggBuffer = fEval.getNewAggregationBuffer();
       Range rng = getRange(wFnDef, i, iPart, order);
       PTFPartitionIterator<Object> rItr = rng.iterator();
       PTFOperator.connectLeadLagFunctionsToPartition(ptfDesc, rItr);
-      while(rItr.hasNext())
-      {
+      while(rItr.hasNext()) {
         Object row = rItr.next();
         int j = 0;
         if ( wFnDef.getArgs() != null ) {
-          for(PTFExpressionDef arg : wFnDef.getArgs())
-          {
+          for(PTFExpressionDef arg : wFnDef.getArgs()) {
             args[j++] = arg.getExprEvaluator().evaluate(row);
           }
         }
@@ -701,8 +663,7 @@ public class WindowingTableFunction exte
       }
     }
 
-    public Object computeValue(Object row) throws HiveException
-    {
+    public Object computeValue(Object row) throws HiveException {
       Object o = expressionDef.getExprEvaluator().evaluate(row);
       return ObjectInspectorUtils.copyToStandardObject(o, expressionDef.getOI());
     }
@@ -713,11 +674,10 @@ public class WindowingTableFunction exte
 
 
     @SuppressWarnings("incomplete-switch")
-    public static ValueBoundaryScanner getScanner(ValueBoundaryDef vbDef, Order order) throws HiveException
-    {
+    public static ValueBoundaryScanner getScanner(ValueBoundaryDef vbDef, Order order)
+        throws HiveException {
       PrimitiveObjectInspector pOI = (PrimitiveObjectInspector) vbDef.getOI();
-      switch(pOI.getPrimitiveCategory())
-      {
+      switch(pOI.getPrimitiveCategory()) {
       case BYTE:
       case INT:
       case LONG:
@@ -736,16 +696,14 @@ public class WindowingTableFunction exte
     }
   }
 
-  public static class LongValueBoundaryScanner extends ValueBoundaryScanner
-  {
-    public LongValueBoundaryScanner(BoundaryDef bndDef, Order order, PTFExpressionDef expressionDef)
-    {
+  public static class LongValueBoundaryScanner extends ValueBoundaryScanner {
+    public LongValueBoundaryScanner(BoundaryDef bndDef, Order order,
+        PTFExpressionDef expressionDef) {
       super(bndDef,order,expressionDef);
     }
 
     @Override
-    public boolean isGreater(Object v1, Object v2, int amt)
-    {
+    public boolean isGreater(Object v1, Object v2, int amt) {
       long l1 = PrimitiveObjectInspectorUtils.getLong(v1,
           (PrimitiveObjectInspector) expressionDef.getOI());
       long l2 = PrimitiveObjectInspectorUtils.getLong(v2,
@@ -754,8 +712,7 @@ public class WindowingTableFunction exte
     }
 
     @Override
-    public boolean isEqual(Object v1, Object v2)
-    {
+    public boolean isEqual(Object v1, Object v2) {
       long l1 = PrimitiveObjectInspectorUtils.getLong(v1,
           (PrimitiveObjectInspector) expressionDef.getOI());
       long l2 = PrimitiveObjectInspectorUtils.getLong(v2,
@@ -764,16 +721,14 @@ public class WindowingTableFunction exte
     }
   }
 
-  public static class DoubleValueBoundaryScanner extends ValueBoundaryScanner
-  {
-    public DoubleValueBoundaryScanner(BoundaryDef bndDef, Order order, PTFExpressionDef expressionDef)
-    {
+  public static class DoubleValueBoundaryScanner extends ValueBoundaryScanner {
+    public DoubleValueBoundaryScanner(BoundaryDef bndDef, Order order,
+        PTFExpressionDef expressionDef) {
       super(bndDef,order,expressionDef);
     }
 
     @Override
-    public boolean isGreater(Object v1, Object v2, int amt)
-    {
+    public boolean isGreater(Object v1, Object v2, int amt) {
       double d1 = PrimitiveObjectInspectorUtils.getDouble(v1,
           (PrimitiveObjectInspector) expressionDef.getOI());
       double d2 = PrimitiveObjectInspectorUtils.getDouble(v2,
@@ -782,8 +737,7 @@ public class WindowingTableFunction exte
     }
 
     @Override
-    public boolean isEqual(Object v1, Object v2)
-    {
+    public boolean isEqual(Object v1, Object v2) {
       double d1 = PrimitiveObjectInspectorUtils.getDouble(v1,
           (PrimitiveObjectInspector) expressionDef.getOI());
       double d2 = PrimitiveObjectInspectorUtils.getDouble(v2,
@@ -792,16 +746,14 @@ public class WindowingTableFunction exte
     }
   }
 
-  public static class StringValueBoundaryScanner extends ValueBoundaryScanner
-  {
-    public StringValueBoundaryScanner(BoundaryDef bndDef, Order order, PTFExpressionDef expressionDef)
-    {
+  public static class StringValueBoundaryScanner extends ValueBoundaryScanner {
+    public StringValueBoundaryScanner(BoundaryDef bndDef, Order order,
+        PTFExpressionDef expressionDef) {
       super(bndDef,order,expressionDef);
     }
 
     @Override
-    public boolean isGreater(Object v1, Object v2, int amt)
-    {
+    public boolean isGreater(Object v1, Object v2, int amt) {
       String s1 = PrimitiveObjectInspectorUtils.getString(v1,
           (PrimitiveObjectInspector) expressionDef.getOI());
       String s2 = PrimitiveObjectInspectorUtils.getString(v2,
@@ -810,8 +762,7 @@ public class WindowingTableFunction exte
     }
 
     @Override
-    public boolean isEqual(Object v1, Object v2)
-    {
+    public boolean isEqual(Object v1, Object v2) {
       String s1 = PrimitiveObjectInspectorUtils.getString(v1,
           (PrimitiveObjectInspector) expressionDef.getOI());
       String s2 = PrimitiveObjectInspectorUtils.getString(v2,
@@ -820,26 +771,22 @@ public class WindowingTableFunction exte
     }
   }
 
-  public static class SameList<E> extends AbstractList<E>
-  {
+  public static class SameList<E> extends AbstractList<E> {
     int sz;
     E val;
 
-    public SameList(int sz, E val)
-    {
+    public SameList(int sz, E val) {
       this.sz = sz;
       this.val = val;
     }
 
     @Override
-    public E get(int index)
-    {
+    public E get(int index) {
       return val;
     }
 
     @Override
-    public int size()
-    {
+    public int size() {
       return sz;
     }
 

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Fri Aug 30 19:46:15 2013
@@ -217,6 +217,10 @@ public class QTestUtil {
       convertPathsFromWindowsToHdfs();
     }
 
+    // Plug verifying metastore in for testing.
+    conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
+        "org.apache.hadoop.hive.metastore.VerifyingObjectStore");
+
     if (miniMr) {
       assert dfs != null;
       assert mr != null;

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewIntegerEncoding.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewIntegerEncoding.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewIntegerEncoding.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewIntegerEncoding.java Fri Aug 30 19:46:15 2013
@@ -108,8 +108,12 @@ public class TestNewIntegerEncoding {
           Row.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     writer.addRow(new Row(111, 1111L));
     writer.addRow(new Row(111, 1111L));
     writer.addRow(new Row(111, 1111L));
@@ -138,9 +142,12 @@ public class TestNewIntegerEncoding {
         9, 2, 6, 3, 7, 1, 9, 2, 6, 2000, 2, 1, 1, 1, 1, 1, 3, 7, 1, 9, 2, 6, 1,
         1, 1, 1, 1 };
     List<Long> input = Lists.newArrayList(Longs.asList(inp));
-    conf.set("hive.exec.orc.write.format", "0.11");
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .compress(CompressionKind.NONE)
+                                         .version(OrcFile.Version.V_0_11)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -170,8 +177,12 @@ public class TestNewIntegerEncoding {
         1, 1, 1, 1 };
     List<Long> input = Lists.newArrayList(Longs.asList(inp));
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -197,8 +208,12 @@ public class TestNewIntegerEncoding {
     long[] inp = new long[] { -500, -400, -350, -325, -310 };
     List<Long> input = Lists.newArrayList(Longs.asList(inp));
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -224,8 +239,12 @@ public class TestNewIntegerEncoding {
     long[] inp = new long[] { -500, -600, -650, -675, -710 };
     List<Long> input = Lists.newArrayList(Longs.asList(inp));
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -251,8 +270,12 @@ public class TestNewIntegerEncoding {
     long[] inp = new long[] { 500, 400, 350, 325, 310 };
     List<Long> input = Lists.newArrayList(Longs.asList(inp));
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -278,8 +301,12 @@ public class TestNewIntegerEncoding {
     long[] inp = new long[] { 500, 600, 650, 675, 710 };
     List<Long> input = Lists.newArrayList(Longs.asList(inp));
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -305,8 +332,11 @@ public class TestNewIntegerEncoding {
     List<Long> input = Lists.newArrayList();
     input.add((long) Integer.MIN_VALUE);
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.ZLIB, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -332,8 +362,12 @@ public class TestNewIntegerEncoding {
     List<Long> input = Lists.newArrayList();
     input.add((long) Integer.MAX_VALUE);
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -359,8 +393,12 @@ public class TestNewIntegerEncoding {
     List<Long> input = Lists.newArrayList();
     input.add(Long.MIN_VALUE);
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -386,8 +424,12 @@ public class TestNewIntegerEncoding {
     List<Long> input = Lists.newArrayList();
     input.add(Long.MAX_VALUE);
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -416,8 +458,12 @@ public class TestNewIntegerEncoding {
       input.add((long) rand.nextInt());
     }
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -446,8 +492,12 @@ public class TestNewIntegerEncoding {
       input.add(rand.nextLong());
     }
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -484,8 +534,12 @@ public class TestNewIntegerEncoding {
         2, 16 };
     List<Long> input = Lists.newArrayList(Longs.asList(inp));
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -522,8 +576,12 @@ public class TestNewIntegerEncoding {
         2, 16 };
     List<Long> input = Lists.newArrayList(Longs.asList(inp));
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -560,8 +618,12 @@ public class TestNewIntegerEncoding {
         2, 16 };
     List<Long> input = Lists.newArrayList(Longs.asList(inp));
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -589,8 +651,12 @@ public class TestNewIntegerEncoding {
         6, 8, 7, 9, 9, 11, 33, 11, 3, 7, 4, 6, 10, 14, 12, 5, 14, 7, 6 };
     List<Long> input = Lists.newArrayList(Longs.asList(inp));
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -620,8 +686,12 @@ public class TestNewIntegerEncoding {
     }
     input.set(0, 20000L);
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -651,8 +721,12 @@ public class TestNewIntegerEncoding {
     }
     input.set(1, 20000L);
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -682,8 +756,11 @@ public class TestNewIntegerEncoding {
     }
     input.set(255, 20000L);
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.ZLIB, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -713,8 +790,11 @@ public class TestNewIntegerEncoding {
     }
     input.set(256, 20000L);
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.ZLIB, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -744,8 +824,11 @@ public class TestNewIntegerEncoding {
     }
     input.set(510, 20000L);
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.ZLIB, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -775,8 +858,11 @@ public class TestNewIntegerEncoding {
     }
     input.set(511, 20000L);
 
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.ZLIB, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .bufferSize(10000));
     for(Long l : input) {
       writer.addRow(l);
     }
@@ -804,9 +890,13 @@ public class TestNewIntegerEncoding {
     for(int i = 0; i < 100000; i++) {
       input.add((long) rand.nextInt());
     }
-    conf.set("hive.exec.orc.write.format", "0.11");
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .compress(CompressionKind.NONE)
+                                         .stripeSize(100000)
+                                         .bufferSize(10000)
+                                         .version(OrcFile.Version.V_0_11));
     for(Long l : input) {
       writer.addRow(l);
     }

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java Fri Aug 30 19:46:15 2013
@@ -193,8 +193,11 @@ public class TestOrcFile {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.ZLIB, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .bufferSize(10000));
     writer.addRow(new BigRow(false, (byte) 1, (short) 1024, 65536,
         Long.MAX_VALUE, (float) 1.0, -15.0, bytes(0,1,2,3,4), "hi",
         new MiddleStruct(inner(1, "bye"), inner(2, "sigh")),
@@ -425,8 +428,13 @@ public class TestOrcFile {
           (InnerStruct.class,
               ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        1000, CompressionKind.NONE, 100, 1000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(100)
+                                         .rowIndexStride(1000));
     Random r1 = new Random(1);
     Random r2 = new Random(2);
     int x;
@@ -508,8 +516,12 @@ public class TestOrcFile {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        1000, CompressionKind.NONE, 100, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(100));
     writer.close();
     Reader reader = OrcFile.createReader(fs, testFilePath);
     assertEquals(false, reader.rows(null).hasNext());
@@ -528,9 +540,14 @@ public class TestOrcFile {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        1000, CompressionKind.NONE, 100, 10000);
-    writer.addUserMetadata("my.meta", byteBuf(1, 2, 3, 4, 5, 6, 7, -1, -2, 127, -128));
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(100));
+    writer.addUserMetadata("my.meta", byteBuf(1, 2, 3, 4, 5, 6, 7, -1, -2, 127,
+                                              -128));
     writer.addUserMetadata("clobber", byteBuf(1,2,3));
     writer.addUserMetadata("clobber", byteBuf(4,3,2,1));
     ByteBuffer bigBuf = ByteBuffer.allocate(40000);
@@ -594,8 +611,13 @@ public class TestOrcFile {
       inspector = OrcStruct.createObjectInspector(0, types);
     }
     HiveDecimal maxValue = new HiveDecimal("100000000000000000000");
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        1000, CompressionKind.NONE, 100, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(100)
+                                         .blockPadding(false));
     OrcStruct row = new OrcStruct(3);
     OrcUnion union = new OrcUnion();
     row.setFieldValue(1, union);
@@ -773,8 +795,12 @@ public class TestOrcFile {
           (InnerStruct.class,
               ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        1000, CompressionKind.SNAPPY, 100, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.SNAPPY)
+                                         .bufferSize(100));
     Random rand = new Random(12);
     for(int i=0; i < 10000; ++i) {
       writer.addRow(new InnerStruct(rand.nextInt(),
@@ -808,8 +834,13 @@ public class TestOrcFile {
           (InnerStruct.class,
               ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        5000, CompressionKind.SNAPPY, 1000, 0);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(5000)
+                                         .compress(CompressionKind.SNAPPY)
+                                         .bufferSize(1000)
+                                         .rowIndexStride(0));
     Random rand = new Random(24);
     for(int i=0; i < 10000; ++i) {
       InnerStruct row = new InnerStruct(rand.nextInt(),
@@ -849,8 +880,12 @@ public class TestOrcFile {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        200000, CompressionKind.ZLIB, 65536, 1000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(200000)
+                                         .bufferSize(65536)
+                                         .rowIndexStride(1000));
     Random rand = new Random(42);
     final int COUNT=32768;
     long[] intValues= new long[COUNT];
@@ -1032,8 +1067,14 @@ public class TestOrcFile {
               ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
     MyMemoryManager memory = new MyMemoryManager(conf, 10000, 0.1);
-    Writer writer = new WriterImpl(fs, testFilePath, conf, inspector,
-        50000, CompressionKind.NONE, 100, 0, memory);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .compress(CompressionKind.NONE)
+                                         .stripeSize(50000)
+                                         .bufferSize(100)
+                                         .rowIndexStride(0)
+                                         .memory(memory));
     assertEquals(testFilePath, memory.path);
     for(int i=0; i < 2500; ++i) {
       writer.addRow(new InnerStruct(i*300, Integer.toHexString(10*i)));

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcNullOptimization.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcNullOptimization.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcNullOptimization.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcNullOptimization.java Fri Aug 30 19:46:15 2013
@@ -91,15 +91,21 @@ public class TestOrcNullOptimization {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (MyStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     Random rand = new Random(100);
-    writer.addRow(new MyStruct(null, null, true, Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(null, null, true,
+                               Lists.newArrayList(new InnerStruct(100))));
     for (int i = 2; i < 20000; i++) {
       writer.addRow(new MyStruct(rand.nextInt(1), "a", true, Lists
           .newArrayList(new InnerStruct(100))));
     }
-    writer.addRow(new MyStruct(null, null, true, Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(null, null, true,
+                               Lists.newArrayList(new InnerStruct(100))));
     writer.close();
 
     Reader reader = OrcFile.createReader(fs, testFilePath);
@@ -117,7 +123,8 @@ public class TestOrcNullOptimization {
 
     assertEquals("a", ((StringColumnStatistics) stats[2]).getMaximum());
     assertEquals("a", ((StringColumnStatistics) stats[2]).getMinimum());
-    assertEquals(19998, ((StringColumnStatistics) stats[2]).getNumberOfValues());
+    assertEquals(19998,
+                 ((StringColumnStatistics) stats[2]).getNumberOfValues());
     assertEquals("count: 19998 min: a max: a",
         stats[2].toString());
 
@@ -142,8 +149,10 @@ public class TestOrcNullOptimization {
     List<Boolean> got = Lists.newArrayList();
     // check if the strip footer contains PRESENT stream
     for (StripeInformation sinfo : reader.getStripes()) {
-      OrcProto.StripeFooter sf = ((RecordReaderImpl) rows).readStripeFooter(sinfo);
-      got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString()) != -1);
+      OrcProto.StripeFooter sf =
+        ((RecordReaderImpl) rows).readStripeFooter(sinfo);
+      got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString())
+              != -1);
     }
     assertEquals(expected, got);
 
@@ -154,7 +163,8 @@ public class TestOrcNullOptimization {
     assertNull(row.getFieldValue(1));
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                 getFieldValue(0));
 
     rows.seekToRow(19998);
     // last-1 row
@@ -164,7 +174,8 @@ public class TestOrcNullOptimization {
     assertEquals(new IntWritable(0), row.getFieldValue(0));
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                 getFieldValue(0));
 
     // last row
     row = (OrcStruct) rows.next(row);
@@ -173,7 +184,8 @@ public class TestOrcNullOptimization {
     assertNull(row.getFieldValue(1));
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                 getFieldValue(0));
 
     rows.close();
   }
@@ -185,14 +197,19 @@ public class TestOrcNullOptimization {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (MyStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     Random rand = new Random(100);
     for (int i = 1; i < 20000; i++) {
       writer.addRow(new MyStruct(rand.nextInt(1), "a", true, Lists
           .newArrayList(new InnerStruct(100))));
     }
-    writer.addRow(new MyStruct(0, "b", true, Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(0, "b", true,
+                               Lists.newArrayList(new InnerStruct(100))));
     writer.close();
 
     Reader reader = OrcFile.createReader(fs, testFilePath);
@@ -210,7 +227,8 @@ public class TestOrcNullOptimization {
 
     assertEquals("b", ((StringColumnStatistics) stats[2]).getMaximum());
     assertEquals("a", ((StringColumnStatistics) stats[2]).getMinimum());
-    assertEquals(20000, ((StringColumnStatistics) stats[2]).getNumberOfValues());
+    assertEquals(20000,
+                 ((StringColumnStatistics) stats[2]).getNumberOfValues());
     assertEquals("count: 20000 min: a max: b",
         stats[2].toString());
 
@@ -233,8 +251,10 @@ public class TestOrcNullOptimization {
     List<Boolean> got = Lists.newArrayList();
     // check if the strip footer contains PRESENT stream
     for (StripeInformation sinfo : reader.getStripes()) {
-      OrcProto.StripeFooter sf = ((RecordReaderImpl) rows).readStripeFooter(sinfo);
-      got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString()) != -1);
+      OrcProto.StripeFooter sf =
+        ((RecordReaderImpl) rows).readStripeFooter(sinfo);
+      got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString())
+              != -1);
     }
     assertEquals(expected, got);
 
@@ -247,7 +267,8 @@ public class TestOrcNullOptimization {
     assertEquals("a", row.getFieldValue(1).toString());
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+                 ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                   getFieldValue(0));
 
     // last row
     row = (OrcStruct) rows.next(row);
@@ -257,8 +278,8 @@ public class TestOrcNullOptimization {
     assertEquals("b", row.getFieldValue(1).toString());
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
-
+                 ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                   getFieldValue(0));
     rows.close();
   }
 
@@ -269,16 +290,27 @@ public class TestOrcNullOptimization {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (MyStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.ZLIB, 10000, 10000);
-    writer.addRow(new MyStruct(3, "a", true, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(null, "b", true, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(3, null, false, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(3, "d", true, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(2, "e", true, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(2, "f", true, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(2, "g", true, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(2, "h", true, Lists.newArrayList(new InnerStruct(100))));
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .bufferSize(10000));
+    writer.addRow(new MyStruct(3, "a", true,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(null, "b", true,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(3, null, false,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(3, "d", true,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(2, "e", true,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(2, "f", true,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(2, "g", true,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(2, "h", true,
+                               Lists.newArrayList(new InnerStruct(100))));
     writer.close();
 
     Reader reader = OrcFile.createReader(fs, testFilePath);
@@ -319,8 +351,10 @@ public class TestOrcNullOptimization {
     List<Boolean> got = Lists.newArrayList();
     // check if the strip footer contains PRESENT stream
     for (StripeInformation sinfo : reader.getStripes()) {
-      OrcProto.StripeFooter sf = ((RecordReaderImpl) rows).readStripeFooter(sinfo);
-      got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString()) != -1);
+      OrcProto.StripeFooter sf =
+        ((RecordReaderImpl) rows).readStripeFooter(sinfo);
+      got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString())
+              != -1);
     }
     assertEquals(expected, got);
 
@@ -331,7 +365,8 @@ public class TestOrcNullOptimization {
     assertEquals("a", row.getFieldValue(1).toString());
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                 getFieldValue(0));
 
     // row 2
     row = (OrcStruct) rows.next(row);
@@ -340,7 +375,8 @@ public class TestOrcNullOptimization {
     assertEquals("b", row.getFieldValue(1).toString());
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                 getFieldValue(0));
 
     // row 3
     row = (OrcStruct) rows.next(row);
@@ -349,7 +385,8 @@ public class TestOrcNullOptimization {
     assertEquals(new IntWritable(3), row.getFieldValue(0));
     assertEquals(new BooleanWritable(false), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+                 ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                 getFieldValue(0));
     rows.close();
   }
 }

Modified: hive/branches/tez/ql/src/test/resources/orc-file-dump.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/resources/orc-file-dump.out?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/resources/orc-file-dump.out (original)
+++ hive/branches/tez/ql/src/test/resources/orc-file-dump.out Fri Aug 30 19:46:15 2013
@@ -53,31 +53,31 @@ Stripes:
     Encoding column 1: DIRECT_V2
     Encoding column 2: DIRECT_V2
     Encoding column 3: DICTIONARY_V2
-  Stripe: offset: 191873 data: 63796 rows: 5000 tail: 74 index: 119
-    Stream: column 0 section ROW_INDEX start: 191873 length 10
-    Stream: column 1 section ROW_INDEX start: 191883 length 35
-    Stream: column 2 section ROW_INDEX start: 191918 length 39
-    Stream: column 3 section ROW_INDEX start: 191957 length 35
-    Stream: column 1 section DATA start: 191992 length 20029
-    Stream: column 2 section DATA start: 212021 length 40035
-    Stream: column 3 section DATA start: 252056 length 3574
-    Stream: column 3 section LENGTH start: 255630 length 25
-    Stream: column 3 section DICTIONARY_DATA start: 255655 length 133
+  Stripe: offset: 200000 data: 63796 rows: 5000 tail: 74 index: 119
+    Stream: column 0 section ROW_INDEX start: 200000 length 10
+    Stream: column 1 section ROW_INDEX start: 200010 length 35
+    Stream: column 2 section ROW_INDEX start: 200045 length 39
+    Stream: column 3 section ROW_INDEX start: 200084 length 35
+    Stream: column 1 section DATA start: 200119 length 20029
+    Stream: column 2 section DATA start: 220148 length 40035
+    Stream: column 3 section DATA start: 260183 length 3574
+    Stream: column 3 section LENGTH start: 263757 length 25
+    Stream: column 3 section DICTIONARY_DATA start: 263782 length 133
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DIRECT_V2
     Encoding column 3: DICTIONARY_V2
-  Stripe: offset: 255862 data: 12940 rows: 1000 tail: 71 index: 120
-    Stream: column 0 section ROW_INDEX start: 255862 length 10
-    Stream: column 1 section ROW_INDEX start: 255872 length 36
-    Stream: column 2 section ROW_INDEX start: 255908 length 39
-    Stream: column 3 section ROW_INDEX start: 255947 length 35
-    Stream: column 1 section DATA start: 255982 length 4007
-    Stream: column 2 section DATA start: 259989 length 8007
-    Stream: column 3 section DATA start: 267996 length 768
-    Stream: column 3 section LENGTH start: 268764 length 25
-    Stream: column 3 section DICTIONARY_DATA start: 268789 length 133
+  Stripe: offset: 263989 data: 12940 rows: 1000 tail: 71 index: 120
+    Stream: column 0 section ROW_INDEX start: 263989 length 10
+    Stream: column 1 section ROW_INDEX start: 263999 length 36
+    Stream: column 2 section ROW_INDEX start: 264035 length 39
+    Stream: column 3 section ROW_INDEX start: 264074 length 35
+    Stream: column 1 section DATA start: 264109 length 4007
+    Stream: column 2 section DATA start: 268116 length 8007
+    Stream: column 3 section DATA start: 276123 length 768
+    Stream: column 3 section LENGTH start: 276891 length 25
+    Stream: column 3 section DICTIONARY_DATA start: 276916 length 133
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DIRECT_V2
-    Encoding column 3: DICTIONARY_V2
\ No newline at end of file
+    Encoding column 3: DICTIONARY_V2

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionHookContext.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionHookContext.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionHookContext.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionHookContext.java Fri Aug 30 19:46:15 2013
@@ -26,10 +26,6 @@ import org.apache.hadoop.hive.conf.HiveC
  */
 public interface HiveSessionHookContext {
 
-  enum State {OPEN, CLOSE}
-
-  public State getState();
-
   /**
    * Retrieve session conf
    * @return

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionHookContextImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionHookContextImpl.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionHookContextImpl.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionHookContextImpl.java Fri Aug 30 19:46:15 2013
@@ -29,16 +29,9 @@ import org.apache.hadoop.hive.conf.HiveC
 public class HiveSessionHookContextImpl implements HiveSessionHookContext {
 
   private final HiveSession hiveSession;
-  private final State state;
 
-  HiveSessionHookContextImpl(HiveSession hiveSession, State state) {
+  HiveSessionHookContextImpl(HiveSession hiveSession) {
     this.hiveSession = hiveSession;
-    this.state = state;
-  }
-
-  @Override
-  public State getState() {
-    return state;
   }
 
   @Override

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/SessionManager.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/SessionManager.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/SessionManager.java Fri Aug 30 19:46:15 2013
@@ -92,7 +92,7 @@ public class SessionManager extends Comp
       handleToSession.put(session.getSessionHandle(), session);
     }
     try {
-      executeSessionHooks(session, HiveSessionHookContext.State.OPEN);
+      executeSessionHooks(session);
     } catch (Exception e) {
       throw new HiveSQLException("Failed to execute session hooks", e);
     }
@@ -107,11 +107,6 @@ public class SessionManager extends Comp
     if (session == null) {
       throw new HiveSQLException("Session does not exist!");
     }
-    try {
-      executeSessionHooks(session, HiveSessionHookContext.State.CLOSE);
-    } catch (Exception e) {
-      throw new HiveSQLException("Failed to execute session hooks", e);
-    }
     session.close();
   }
 
@@ -162,12 +157,11 @@ public class SessionManager extends Comp
   }
 
   // execute session hooks
-  private void executeSessionHooks(HiveSession session, HiveSessionHookContext.State state)
-      throws Exception {
+  private void executeSessionHooks(HiveSession session) throws Exception {
     List<HiveSessionHook> sessionHooks = HookUtils.getHooks(hiveConf,
         HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK, HiveSessionHook.class);
     for (HiveSessionHook sessionHook : sessionHooks) {
-      sessionHook.run(new HiveSessionHookContextImpl(session, state));
+      sessionHook.run(new HiveSessionHookContextImpl(session));
     }
   }
 }

Modified: hive/branches/tez/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/branches/tez/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Fri Aug 30 19:46:15 2013
@@ -735,5 +735,12 @@ public class Hadoop20Shims implements Ha
     throw new UnsupportedOperationException(
         "Kerberos not supported in current hadoop version");
   }
-
+  @Override
+  public HCatHadoopShims getHCatShim() {
+      throw new UnsupportedOperationException("HCatalog does not support Hadoop 0.20.x");
+  }
+  @Override
+  public WebHCatJTShim getWebHCatShim(Configuration conf) throws IOException {
+      throw new UnsupportedOperationException("WebHCat does not support Hadoop 0.20.x");
+  }
 }

Modified: hive/branches/tez/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/branches/tez/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Fri Aug 30 19:46:15 2013
@@ -18,21 +18,30 @@
 package org.apache.hadoop.hive.shims;
 
 import java.io.IOException;
+import java.net.InetSocketAddress;
 import java.net.MalformedURLException;
 import java.net.URL;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.filecache.DistributedCache;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Trash;
-import org.apache.hadoop.hive.shims.HadoopShimsSecure;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.JobTracker;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TaskLogServlet;
+import org.apache.hadoop.mapred.WebHCatJTShim20S;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.JobStatus;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.mapred.lib.TotalOrderPartitioner;
 
@@ -195,4 +204,129 @@ public class Hadoop20SShims extends Hado
       cluster.shutdown();
     }
   }
+  private volatile HCatHadoopShims hcatShimInstance;
+  @Override
+  public HCatHadoopShims getHCatShim() {
+    if(hcatShimInstance == null) {
+      hcatShimInstance = new HCatHadoopShims20S();
+    }
+    return hcatShimInstance;
+  }
+  private final class HCatHadoopShims20S implements HCatHadoopShims {
+    @Override
+    public TaskID createTaskID() {
+      return new TaskID();
+    }
+
+    @Override
+    public TaskAttemptID createTaskAttemptID() {
+      return new TaskAttemptID();
+    }
+
+    @Override
+    public TaskAttemptContext createTaskAttemptContext(Configuration conf, TaskAttemptID taskId) {
+      return new TaskAttemptContext(conf, taskId);
+    }
+
+    @Override
+    public org.apache.hadoop.mapred.TaskAttemptContext createTaskAttemptContext(org.apache.hadoop.mapred.JobConf conf,
+                 org.apache.hadoop.mapred.TaskAttemptID taskId, Progressable progressable) {
+      org.apache.hadoop.mapred.TaskAttemptContext newContext = null;
+      try {
+        java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.TaskAttemptContext.class.getDeclaredConstructor(
+                org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapred.TaskAttemptID.class,
+                Progressable.class);
+        construct.setAccessible(true);
+        newContext = (org.apache.hadoop.mapred.TaskAttemptContext)construct.newInstance(conf, taskId, progressable);
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+      return newContext;
+    }
+
+    @Override
+    public JobContext createJobContext(Configuration conf,
+                                       JobID jobId) {
+      return new JobContext(conf, jobId);
+    }
+
+    @Override
+    public org.apache.hadoop.mapred.JobContext createJobContext(org.apache.hadoop.mapred.JobConf conf,
+                                   org.apache.hadoop.mapreduce.JobID jobId, Progressable progressable) {
+      org.apache.hadoop.mapred.JobContext newContext = null;
+      try {
+        java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.JobContext.class.getDeclaredConstructor(
+                org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapreduce.JobID.class,
+                Progressable.class);
+        construct.setAccessible(true);
+        newContext = (org.apache.hadoop.mapred.JobContext)construct.newInstance(conf, jobId, progressable);
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+      return newContext;
+    }
+
+    @Override
+    public void commitJob(OutputFormat outputFormat, Job job) throws IOException {
+      if( job.getConfiguration().get("mapred.job.tracker", "").equalsIgnoreCase("local") ) {
+        try {
+          //In local mode, mapreduce will not call OutputCommitter.cleanupJob.
+          //Calling it from here so that the partition publish happens.
+          //This call needs to be removed after MAPREDUCE-1447 is fixed.
+          outputFormat.getOutputCommitter(createTaskAttemptContext(
+                  job.getConfiguration(), createTaskAttemptID())).commitJob(job);
+        } catch (IOException e) {
+          throw new IOException("Failed to cleanup job",e);
+        } catch (InterruptedException e) {
+          throw new IOException("Failed to cleanup job",e);
+        }
+      }
+    }
+
+    @Override
+    public void abortJob(OutputFormat outputFormat, Job job) throws IOException {
+      if (job.getConfiguration().get("mapred.job.tracker", "")
+              .equalsIgnoreCase("local")) {
+        try {
+          // This call needs to be removed after MAPREDUCE-1447 is fixed.
+          outputFormat.getOutputCommitter(createTaskAttemptContext(
+                  job.getConfiguration(), new TaskAttemptID())).abortJob(job, JobStatus.State.FAILED);
+        } catch (IOException e) {
+          throw new IOException("Failed to abort job", e);
+        } catch (InterruptedException e) {
+          throw new IOException("Failed to abort job", e);
+        }
+      }
+    }
+
+    @Override
+    public InetSocketAddress getResourceManagerAddress(Configuration conf)
+    {
+      return JobTracker.getAddress(conf);
+    }
+
+    @Override
+    public String getPropertyName(PropertyName name) {
+      switch (name) {
+        case CACHE_ARCHIVES:
+          return DistributedCache.CACHE_ARCHIVES;
+        case CACHE_FILES:
+          return DistributedCache.CACHE_FILES;
+        case CACHE_SYMLINK:
+          return DistributedCache.CACHE_SYMLINK;
+      }
+
+      return "";
+    }
+
+    @Override
+    public boolean isFileInHDFS(FileSystem fs, Path path) throws IOException {
+      // In hadoop 1.x.x the file system URI is sufficient to determine the uri of the file
+      return "hdfs".equals(fs.getUri().getScheme());
+    }
+  }
+  @Override
+  public WebHCatJTShim getWebHCatShim(Configuration conf) throws IOException {
+    return new WebHCatJTShim20S(conf);//this has state, so can't be cached
+  }
 }

Modified: hive/branches/tez/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/branches/tez/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Fri Aug 30 19:46:15 2013
@@ -19,6 +19,7 @@ package org.apache.hadoop.hive.shims;
 
 import java.io.IOException;
 import java.lang.Integer;
+import java.net.InetSocketAddress;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.Map;
@@ -28,17 +29,24 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Trash;
-import org.apache.hadoop.hive.shims.HadoopShims.JobTrackerState;
-import org.apache.hadoop.hive.shims.HadoopShimsSecure;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.WebHCatJTShim23;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 import org.apache.hadoop.mapreduce.util.HostUtil;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.mapred.lib.TotalOrderPartitioner;
 
@@ -230,4 +238,104 @@ public class Hadoop23Shims extends Hadoo
       cluster.shutdown();
     }
   }
+  private volatile HCatHadoopShims hcatShimInstance;
+  @Override
+  public HCatHadoopShims getHCatShim() {
+    if(hcatShimInstance == null) {
+      hcatShimInstance = new HCatHadoopShims23();
+    }
+    return hcatShimInstance;
+  }
+  private final class HCatHadoopShims23 implements HCatHadoopShims {
+    @Override
+    public TaskID createTaskID() {
+      return new TaskID("", 0, TaskType.MAP, 0);
+    }
+
+    @Override
+    public TaskAttemptID createTaskAttemptID() {
+      return new TaskAttemptID("", 0, TaskType.MAP, 0, 0);
+    }
+
+    @Override
+    public org.apache.hadoop.mapreduce.TaskAttemptContext createTaskAttemptContext(Configuration conf,
+                                                                                   org.apache.hadoop.mapreduce.TaskAttemptID taskId) {
+      return new org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl(
+              conf instanceof JobConf? new JobConf(conf) : conf,
+              taskId);
+    }
+
+    @Override
+    public org.apache.hadoop.mapred.TaskAttemptContext createTaskAttemptContext(org.apache.hadoop.mapred.JobConf conf,
+                                                                                org.apache.hadoop.mapred.TaskAttemptID taskId, Progressable progressable) {
+      org.apache.hadoop.mapred.TaskAttemptContext newContext = null;
+      try {
+        java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.TaskAttemptContextImpl.class.getDeclaredConstructor(
+                org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapred.TaskAttemptID.class,
+                Reporter.class);
+        construct.setAccessible(true);
+        newContext = (org.apache.hadoop.mapred.TaskAttemptContext) construct.newInstance(
+                new JobConf(conf), taskId, (Reporter) progressable);
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+      return newContext;
+    }
+
+    @Override
+    public JobContext createJobContext(Configuration conf,
+                                       JobID jobId) {
+      return new JobContextImpl(conf instanceof JobConf? new JobConf(conf) : conf,
+              jobId);
+    }
+
+    @Override
+    public org.apache.hadoop.mapred.JobContext createJobContext(org.apache.hadoop.mapred.JobConf conf,
+                                                                org.apache.hadoop.mapreduce.JobID jobId, Progressable progressable) {
+      return new org.apache.hadoop.mapred.JobContextImpl(
+              new JobConf(conf), jobId, (org.apache.hadoop.mapred.Reporter) progressable);
+    }
+
+    @Override
+    public void commitJob(OutputFormat outputFormat, Job job) throws IOException {
+      // Do nothing as this was fixed by MAPREDUCE-1447.
+    }
+
+    @Override
+    public void abortJob(OutputFormat outputFormat, Job job) throws IOException {
+      // Do nothing as this was fixed by MAPREDUCE-1447.
+    }
+
+    @Override
+    public InetSocketAddress getResourceManagerAddress(Configuration conf) {
+      String addr = conf.get("yarn.resourcemanager.address", "localhost:8032");
+
+      return NetUtils.createSocketAddr(addr);
+    }
+
+    @Override
+    public String getPropertyName(PropertyName name) {
+      switch (name) {
+        case CACHE_ARCHIVES:
+          return MRJobConfig.CACHE_ARCHIVES;
+        case CACHE_FILES:
+          return MRJobConfig.CACHE_FILES;
+        case CACHE_SYMLINK:
+          return MRJobConfig.CACHE_SYMLINK;
+      }
+
+      return "";
+    }
+
+    @Override
+    public boolean isFileInHDFS(FileSystem fs, Path path) throws IOException {
+      // In case of viewfs we need to lookup where the actual file is to know the filesystem in use.
+      // resolvePath is a sure shot way of knowing which file system the file is.
+      return "hdfs".equals(fs.resolvePath(path).toUri().getScheme());
+    }
+  }
+  @Override
+  public WebHCatJTShim getWebHCatShim(Configuration conf) throws IOException {
+    return new WebHCatJTShim23(conf);//this has state, so can't be cached
+  }
 }

Modified: hive/branches/tez/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/branches/tez/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java Fri Aug 30 19:46:15 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.shims;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.net.InetSocketAddress;
 import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -40,13 +41,19 @@ import org.apache.hadoop.mapred.ClusterS
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobProfile;
+import org.apache.hadoop.mapred.JobStatus;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Progressable;
 
@@ -461,4 +468,72 @@ public interface HadoopShims {
         Class<RecordReader<K, V>> rrClass) throws IOException;
   }
 
+  public HCatHadoopShims getHCatShim();
+  public interface HCatHadoopShims {
+
+    enum PropertyName {CACHE_ARCHIVES, CACHE_FILES, CACHE_SYMLINK}
+
+    public TaskID createTaskID();
+
+    public TaskAttemptID createTaskAttemptID();
+
+    public org.apache.hadoop.mapreduce.TaskAttemptContext createTaskAttemptContext(Configuration conf,
+                                                                                   TaskAttemptID taskId);
+
+    public org.apache.hadoop.mapred.TaskAttemptContext createTaskAttemptContext(JobConf conf,
+                                                                                org.apache.hadoop.mapred.TaskAttemptID taskId, Progressable progressable);
+
+    public JobContext createJobContext(Configuration conf, JobID jobId);
+
+    public org.apache.hadoop.mapred.JobContext createJobContext(JobConf conf, JobID jobId, Progressable progressable);
+
+    public void commitJob(OutputFormat outputFormat, Job job) throws IOException;
+
+    public void abortJob(OutputFormat outputFormat, Job job) throws IOException;
+
+    /* Referring to job tracker in 0.20 and resource manager in 0.23 */
+    public InetSocketAddress getResourceManagerAddress(Configuration conf);
+
+    public String getPropertyName(PropertyName name);
+
+    /**
+     * Checks if file is in HDFS filesystem.
+     *
+     * @param fs
+     * @param path
+     * @return true if the file is in HDFS, false if the file is in other file systems.
+     */
+    public boolean isFileInHDFS(FileSystem fs, Path path) throws IOException;
+  }
+  /**
+   * Provides a Hadoop JobTracker shim.
+   * @param conf not {@code null}
+   */
+  public WebHCatJTShim getWebHCatShim(Configuration conf) throws IOException;
+  public interface WebHCatJTShim {
+    /**
+     * Grab a handle to a job that is already known to the JobTracker.
+     *
+     * @return Profile of the job, or null if not found.
+     */
+    public JobProfile getJobProfile(org.apache.hadoop.mapred.JobID jobid) throws IOException;
+    /**
+     * Grab a handle to a job that is already known to the JobTracker.
+     *
+     * @return Status of the job, or null if not found.
+     */
+    public JobStatus getJobStatus(org.apache.hadoop.mapred.JobID jobid) throws IOException;
+    /**
+     * Kill a job.
+     */
+    public void killJob(org.apache.hadoop.mapred.JobID jobid) throws IOException;
+    /**
+     * Get all the jobs submitted.
+     */
+    public JobStatus[] getAllJobs() throws IOException;
+    /**
+     * Close the connection to the Job Tracker.
+     */
+    public void close();
+  }
 }