You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/10/30 17:22:48 UTC

svn commit: r1635536 [15/28] - in /hive/branches/spark: ./ accumulo-handler/ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/columns/ accumulo-handler/src/test/org/apache/hado...

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java Thu Oct 30 16:22:33 2014
@@ -247,16 +247,16 @@ public class TestVectorizationContext {
     children5.add(col6Expr);
     modExpr.setChildren(children5);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 1);
-    columnMap.put("col2", 2);
-    columnMap.put("col3", 3);
-    columnMap.put("col4", 4);
-    columnMap.put("col5", 5);
-    columnMap.put("col6", 6);
+    VectorizationContext vc = new VectorizationContext();
+    vc.addInitialColumn("col1");
+    vc.addInitialColumn("col2");
+    vc.addInitialColumn("col3");
+    vc.addInitialColumn("col4");
+    vc.addInitialColumn("col5");
+    vc.addInitialColumn("col6");
+    vc.finishedAddingInitialColumns();
 
     //Generate vectorized expression
-    VectorizationContext vc = new VectorizationContext(columnMap, 6);
     VectorExpression ve = vc.getVectorExpression(sumExpr, VectorExpressionDescriptor.Mode.PROJECTION);
 
     //Verify vectorized expression
@@ -293,11 +293,11 @@ public class TestVectorizationContext {
     children1.add(constDesc);
     exprDesc.setChildren(children1);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 1);
-    columnMap.put("col2", 2);
-
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col0");
+    columns.add("col1");
+    columns.add("col2");
+    VectorizationContext vc = new VectorizationContext(columns);
 
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -318,11 +318,11 @@ public class TestVectorizationContext {
     children1.add(col2Expr);
     exprDesc.setChildren(children1);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 1);
-    columnMap.put("col2", 2);
-
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col0");
+    columns.add("col1");
+    columns.add("col2");
+    VectorizationContext vc = new VectorizationContext(columns);
 
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -341,7 +341,7 @@ public class TestVectorizationContext {
     children1.add(col2Expr);
     exprDesc.setChildren(children1);
 
-    vc = new VectorizationContext(columnMap, 2);
+    vc = new VectorizationContext(columns);
 
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -360,7 +360,7 @@ public class TestVectorizationContext {
     children1.add(col2Expr);
     exprDesc.setChildren(children1);
 
-    vc = new VectorizationContext(columnMap, 2);
+    vc = new VectorizationContext(columns);
 
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -378,7 +378,7 @@ public class TestVectorizationContext {
     children1.add(col2Expr);
     exprDesc.setChildren(children1);
 
-    vc = new VectorizationContext(columnMap, 2);
+    vc = new VectorizationContext(columns);
 
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -395,7 +395,7 @@ public class TestVectorizationContext {
     children1.add(col2Expr);
     exprDesc.setChildren(children1);
 
-    vc = new VectorizationContext(columnMap, 2);
+    vc = new VectorizationContext(columns);
 
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -412,7 +412,7 @@ public class TestVectorizationContext {
     children1.add(col2Expr);
     exprDesc.setChildren(children1);
 
-    vc = new VectorizationContext(columnMap, 2);
+    vc = new VectorizationContext(columns);
 
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -432,10 +432,9 @@ public class TestVectorizationContext {
     ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.doubleTypeInfo, udf,
         children1);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 0);
-
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col1");
+    VectorizationContext vc = new VectorizationContext(columns);
 
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
 
@@ -477,11 +476,11 @@ public class TestVectorizationContext {
     children3.add(lessExprDesc);
     andExprDesc.setChildren(children3);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 0);
-    columnMap.put("col2", 1);
-
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col0");
+    columns.add("col1");
+    columns.add("col2");
+    VectorizationContext vc = new VectorizationContext(columns);
 
     VectorExpression ve = vc.getVectorExpression(andExprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -528,11 +527,10 @@ public class TestVectorizationContext {
     children3.add(col2Expr);
     andExprDesc.setChildren(children3);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 0);
-    columnMap.put("col2", 1);
-
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col1");
+    columns.add("col2");
+    VectorizationContext vc = new VectorizationContext(columns);
     VectorExpression veAnd = vc.getVectorExpression(andExprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertEquals(veAnd.getClass(), FilterExprAndExpr.class);
     assertEquals(veAnd.getChildExpressions()[0].getClass(), FilterLongColGreaterLongScalar.class);
@@ -557,7 +555,7 @@ public class TestVectorizationContext {
     orExprDesc.setChildren(children4);
 
     //Allocate new Vectorization context to reset the intermediate columns.
-    vc = new VectorizationContext(columnMap, 2);
+    vc = new VectorizationContext(columns);
     VectorExpression veOr = vc.getVectorExpression(orExprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertEquals(veOr.getClass(), FilterExprOrExpr.class);
     assertEquals(veOr.getChildExpressions()[0].getClass(), FilterLongColGreaterLongScalar.class);
@@ -594,11 +592,11 @@ public class TestVectorizationContext {
     childOfNot.add(greaterExprDesc);
     notExpr.setChildren(childOfNot);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 0);
-    columnMap.put("col2", 1);
-
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col0");
+    columns.add("col1");
+    columns.add("col2");
+    VectorizationContext vc = new VectorizationContext(columns);
 
     VectorExpression ve = vc.getVectorExpression(notExpr, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -632,11 +630,10 @@ public class TestVectorizationContext {
     childOfIsNull.add(greaterExprDesc);
     isNullExpr.setChildren(childOfIsNull);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 0);
-    columnMap.put("col2", 1);
-
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col1");
+    columns.add("col2");
+    VectorizationContext vc = new VectorizationContext(columns);
 
     VectorExpression ve = vc.getVectorExpression(isNullExpr, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -674,11 +671,10 @@ public class TestVectorizationContext {
     childOfNot.add(greaterExprDesc);
     isNotNullExpr.setChildren(childOfNot);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 0);
-    columnMap.put("col2", 1);
-
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col1");
+    columns.add("col2");
+    VectorizationContext vc = new VectorizationContext(columns);
 
     VectorExpression ve = vc.getVectorExpression(isNotNullExpr, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -705,10 +701,9 @@ public class TestVectorizationContext {
     ExprNodeGenericFuncDesc scalarMinusConstant = new ExprNodeGenericFuncDesc(TypeInfoFactory.longTypeInfo,
         gudf, children);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("a", 0);
-
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("a");
+    VectorizationContext vc = new VectorizationContext(columns);
     VectorExpression ve = vc.getVectorExpression(scalarMinusConstant, VectorExpressionDescriptor.Mode.PROJECTION);
 
     assertEquals(ve.getClass(), LongScalarSubtractLongColumn.class);
@@ -727,11 +722,11 @@ public class TestVectorizationContext {
     children1.add(constDesc);
     exprDesc.setChildren(children1);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 1);
-    columnMap.put("col2", 2);
-
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col0");
+    columns.add("col1");
+    columns.add("col2");
+    VectorizationContext vc = new VectorizationContext(columns);
 
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -746,9 +741,10 @@ public class TestVectorizationContext {
     children.add(col1Expr);
     ExprNodeGenericFuncDesc negExprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.longTypeInfo, gudf,
         children);
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 1);
-    VectorizationContext vc = new VectorizationContext(columnMap, 1);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col0");
+    columns.add("col1");
+    VectorizationContext vc = new VectorizationContext(columns);
 
     VectorExpression ve = vc.getVectorExpression(negExprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
 
@@ -763,9 +759,10 @@ public class TestVectorizationContext {
     children.add(col1Expr);
     ExprNodeGenericFuncDesc negExprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.doubleTypeInfo, gudf,
         children);
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 1);
-    VectorizationContext vc = new VectorizationContext(columnMap, 1);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col0");
+    columns.add("col1");
+    VectorizationContext vc = new VectorizationContext(columns);
 
     VectorExpression ve = vc.getVectorExpression(negExprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
 
@@ -788,10 +785,9 @@ public class TestVectorizationContext {
 
     scalarGreaterColExpr.setChildren(children);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("a", 0);
-
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("a");
+    VectorizationContext vc = new VectorizationContext(columns);
     VectorExpression ve = vc.getVectorExpression(scalarGreaterColExpr, VectorExpressionDescriptor.Mode.FILTER);
     assertEquals(FilterLongScalarGreaterLongColumn.class, ve.getClass());
   }
@@ -812,10 +808,9 @@ public class TestVectorizationContext {
 
     colEqualScalar.setChildren(children);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("a", 0);
-
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("a");
+    VectorizationContext vc = new VectorizationContext(columns);
     VectorExpression ve = vc.getVectorExpression(colEqualScalar, VectorExpressionDescriptor.Mode.FILTER);
     assertEquals(FilterLongColEqualLongScalar.class, ve.getClass());
   }
@@ -836,9 +831,9 @@ public class TestVectorizationContext {
 
     colEqualScalar.setChildren(children);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("a", 0);
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("a");
+    VectorizationContext vc = new VectorizationContext(columns);
     VectorExpression ve = vc.getVectorExpression(colEqualScalar, VectorExpressionDescriptor.Mode.PROJECTION);
     assertEquals(LongColEqualLongScalar.class, ve.getClass());
   }
@@ -852,11 +847,10 @@ public class TestVectorizationContext {
     children.add(colDesc);
     stringUnary.setChildren(children);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("b", 0);
-    columnMap.put("a", 1);
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
-
+    List<String> columns = new ArrayList<String>();
+    columns.add("b");
+    columns.add("a");
+    VectorizationContext vc = new VectorizationContext(columns);
     GenericUDF stringLower = new GenericUDFLower();
     stringUnary.setGenericUDF(stringLower);
 
@@ -866,7 +860,7 @@ public class TestVectorizationContext {
     assertEquals(1, ((StringLower) ve).getColNum());
     assertEquals(2, ((StringLower) ve).getOutputColumn());
 
-    vc = new VectorizationContext(columnMap, 2);
+    vc = new VectorizationContext(columns);
 
     ExprNodeGenericFuncDesc anotherUnary = new ExprNodeGenericFuncDesc();
     anotherUnary.setTypeInfo(TypeInfoFactory.stringTypeInfo);
@@ -898,10 +892,10 @@ public class TestVectorizationContext {
     children1.add(colDesc1);
     children2.add(colDesc2);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("b", 0);
-    columnMap.put("a", 1);
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("b");
+    columns.add("a");
+    VectorizationContext vc = new VectorizationContext(columns);
 
     // Sin(double)
     GenericUDFBridge gudfBridge = new GenericUDFBridge("sin", false, UDFSin.class.getName());
@@ -989,10 +983,10 @@ public class TestVectorizationContext {
     List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
     children.add(colDesc1);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("b", 0);
-    columnMap.put("a", 1);
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("b");
+    columns.add("a");
+    VectorizationContext vc = new VectorizationContext(columns);
 
     //UDFYear
     GenericUDFBridge gudfBridge = new GenericUDFBridge("year", false, UDFYear.class.getName());
@@ -1026,10 +1020,12 @@ public class TestVectorizationContext {
     ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf,
         children1);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 1);
-    columnMap.put("col2", 2);
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col0");
+    columns.add("col1");
+    columns.add("col2");
+    VectorizationContext vc = new VectorizationContext(columns);
+
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertTrue(ve instanceof FilterStringColumnBetween);
 
@@ -1054,7 +1050,7 @@ public class TestVectorizationContext {
     exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf,
         children1);
 
-    vc = new VectorizationContext(columnMap, 2);
+    vc = new VectorizationContext(columns);
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertTrue(ve instanceof FilterCharColumnBetween);
 
@@ -1079,7 +1075,7 @@ public class TestVectorizationContext {
     exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf,
         children1);
 
-    vc = new VectorizationContext(columnMap, 2);
+    vc = new VectorizationContext(columns);
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertTrue(ve instanceof FilterVarCharColumnBetween);
 
@@ -1144,10 +1140,11 @@ public class TestVectorizationContext {
     ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,
         udf, children1);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 1);
-    columnMap.put("col2", 2);
-    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col0");
+    columns.add("col1");
+    columns.add("col2");
+    VectorizationContext vc = new VectorizationContext(columns);
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertTrue(ve instanceof FilterStringColumnInList);
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
@@ -1197,11 +1194,12 @@ public class TestVectorizationContext {
     ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf,
         children1);
 
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 1);
-    columnMap.put("col2", 2);
-    columnMap.put("col3", 3);
-    VectorizationContext vc = new VectorizationContext(columnMap, 3);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col0");
+    columns.add("col1");
+    columns.add("col2");
+    columns.add("col3");
+    VectorizationContext vc = new VectorizationContext(columns);
     VectorExpression ve = vc.getVectorExpression(exprDesc);
     assertTrue(ve instanceof IfExprLongColumnLongColumn);
 

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidUtils.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidUtils.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidUtils.java Thu Oct 30 16:22:33 2014
@@ -217,11 +217,11 @@ public class TestAcidUtils {
     Path part = new MockPath(fs, "/tbl/part1");
     AcidUtils.Directory dir =
         AcidUtils.getAcidState(part, conf, new ValidTxnListImpl("150:"));
+    // The two original buckets won't be in the obsolete list because we don't look at those
+    // until we have determined there is no base.
     List<FileStatus> obsolete = dir.getObsolete();
-    assertEquals(3, obsolete.size());
+    assertEquals(1, obsolete.size());
     assertEquals("mock:/tbl/part1/base_5", obsolete.get(0).getPath().toString());
-    assertEquals("mock:/tbl/part1/000000_0", obsolete.get(1).getPath().toString());
-    assertEquals("mock:/tbl/part1/000001_1", obsolete.get(2).getPath().toString());
     assertEquals("mock:/tbl/part1/base_10", dir.getBaseDirectory().toString());
   }
 

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java Thu Oct 30 16:22:33 2014
@@ -73,6 +73,7 @@ import org.apache.hadoop.hive.ql.io.sarg
 import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
@@ -1266,6 +1267,8 @@ public class TestInputOutputFormat {
     }
     conf.set("hive.io.file.readcolumn.ids", columnIds.toString());
     conf.set("partition_columns", "p");
+    conf.set(serdeConstants.LIST_COLUMNS, columnNames.toString());
+    conf.set(serdeConstants.LIST_COLUMN_TYPES, columnTypes.toString());
     MockFileSystem fs = (MockFileSystem) warehouseDir.getFileSystem(conf);
     fs.clear();
 
@@ -1295,8 +1298,8 @@ public class TestInputOutputFormat {
     }
     mapWork.setPathToAliases(aliasMap);
     mapWork.setPathToPartitionInfo(partMap);
-    mapWork.setScratchColumnMap(new HashMap<String, Map<String, Integer>>());
-    mapWork.setScratchColumnVectorTypes(new HashMap<String,
+    mapWork.setAllColumnVectorMaps(new HashMap<String, Map<String, Integer>>());
+    mapWork.setAllScratchColumnVectorTypeMaps(new HashMap<String,
         Map<Integer, String>>());
 
     // write the plan out

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java Thu Oct 30 16:22:33 2014
@@ -56,6 +56,7 @@ import java.util.List;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 
 public class TestOrcRawRecordMerger {
@@ -574,12 +575,14 @@ public class TestOrcRawRecordMerger {
         OrcRecordUpdater.getOperation(event));
     assertEquals(new ReaderKey(0, BUCKET, 0, 200), id);
     assertEquals("update 1", getValue(event));
+    assertFalse(merger.isDelete(event));
 
     assertEquals(true, merger.next(id, event));
     assertEquals(OrcRecordUpdater.INSERT_OPERATION,
         OrcRecordUpdater.getOperation(event));
     assertEquals(new ReaderKey(0, BUCKET, 1, 0), id);
     assertEquals("second", getValue(event));
+    assertFalse(merger.isDelete(event));
 
     assertEquals(true, merger.next(id, event));
     assertEquals(OrcRecordUpdater.UPDATE_OPERATION,
@@ -616,6 +619,7 @@ public class TestOrcRawRecordMerger {
         OrcRecordUpdater.getOperation(event));
     assertEquals(new ReaderKey(0, BUCKET, 7, 200), id);
     assertNull(OrcRecordUpdater.getRow(event));
+    assertTrue(merger.isDelete(event));
 
     assertEquals(true, merger.next(id, event));
     assertEquals(OrcRecordUpdater.DELETE_OPERATION,

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetOutputFormat.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetOutputFormat.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetOutputFormat.java Thu Oct 30 16:22:33 2014
@@ -73,7 +73,8 @@ public class TestMapredParquetOutputForm
             ParquetOutputFormat<ArrayWritable> realOutputFormat,
             JobConf jobConf,
             String finalOutPath,
-            Progressable progress
+            Progressable progress,
+            Properties tableProperties
             ) throws IOException {
           assertEquals(outputFormat, realOutputFormat);
           assertNotNull(jobConf.get(DataWritableWriteSupport.PARQUET_HIVE_SCHEMA));
@@ -87,4 +88,17 @@ public class TestMapredParquetOutputForm
       assertEquals("passed tests", e.getMessage());
     }
   }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testInvalidCompressionTableProperties() throws IOException {
+    Properties tableProps = new Properties();
+    tableProps.setProperty("parquet.compression", "unsupported");
+    tableProps.setProperty("columns", "foo,bar");
+    tableProps.setProperty("columns.types", "int:int");
+
+    JobConf jobConf = new JobConf();
+
+    new MapredParquetOutputFormat().getHiveRecordWriter(jobConf,
+            new Path("/foo"), null, false, tableProps, null);
+  }
 }

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java Thu Oct 30 16:22:33 2014
@@ -2881,4 +2881,24 @@ public class TestSearchArgumentImpl {
         "leaf-3 = (NULL_SAFE_EQUALS a stinger)\n" +
         "expr = (and (not leaf-0) (not leaf-1) (not leaf-2) (not leaf-3))", sarg.toString());
   }
+
+  @Test
+  public void testBuilderFloat() throws Exception {
+    SearchArgument sarg =
+        SearchArgumentFactory.newBuilder()
+            .startAnd()
+            .lessThan("x", new Short((short) 22))
+            .lessThan("x1", new Integer(22))
+            .lessThanEquals("y", new HiveChar("hi", 10))
+            .equals("z", new Float("0.22"))
+            .equals("z1", new Double(0.22))
+            .end()
+            .build();
+    assertEquals("leaf-0 = (LESS_THAN x 22)\n" +
+        "leaf-1 = (LESS_THAN x1 22)\n" +
+        "leaf-2 = (LESS_THAN_EQUALS y hi)\n" +
+        "leaf-3 = (EQUALS z 0.22)\n" +
+        "leaf-4 = (EQUALS z1 0.22)\n" +
+        "expr = (and leaf-0 leaf-1 leaf-2 leaf-3 leaf-4)", sarg.toString());
+  }
 }

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java Thu Oct 30 16:22:33 2014
@@ -550,7 +550,7 @@ public class TestHive extends TestCase {
             index.getIndexName());
         assertEquals("Table names don't match for index: " + indexName, tableName,
             index.getOrigTableName());
-        assertEquals("Index table names didn't match for index: " + indexName, indexTableName,
+        assertEquals("Index table names didn't match for index: " + indexName, qIndexTableName,
             index.getIndexTableName());
         assertEquals("Index handler classes didn't match for index: " + indexName,
             indexHandlerClass, index.getIndexHandlerClass());
@@ -564,7 +564,7 @@ public class TestHive extends TestCase {
 
       // Drop index
       try {
-        hm.dropIndex(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, indexName, true);
+        hm.dropIndex(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, indexName, false, true);
       } catch (HiveException e) {
         System.err.println(StringUtils.stringifyException(e));
         assertTrue("Unable to drop index: " + indexName, false);

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java Thu Oct 30 16:22:33 2014
@@ -45,13 +45,14 @@ public class TestVectorizer {
 
   @Before
   public void setUp() {
-    Map<String, Integer> columnMap = new HashMap<String, Integer>();
-    columnMap.put("col1", 0);
-    columnMap.put("col2", 1);
-    columnMap.put("col3", 2);
+    List<String> columns = new ArrayList<String>();
+    columns.add("col0");
+    columns.add("col1");
+    columns.add("col2");
+    columns.add("col3");
 
     //Generate vectorized expression
-    vContext = new VectorizationContext(columnMap, 3);
+    vContext = new VectorizationContext(columns);
   }
 
   @Description(name = "fake", value = "FAKE")

Copied: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java (from r1630265, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java)
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java?p2=hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java&p1=hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java&r1=1630265&r2=1635536&rev=1635536&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java Thu Oct 30 16:22:33 2014
@@ -31,6 +31,7 @@ import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.QueryPlan;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
@@ -47,6 +48,14 @@ public class TestColumnAccess {
     Assert.assertEquals("Checking command success", 0, ret);
   }
 
+  @AfterClass
+  public static void Teardown() throws Exception {
+    Driver driver = createDriver();
+    driver.run("drop table t1");
+    driver.run("drop table t2");
+    driver.run("drop view v1");
+  }
+
   @Test
   public void testQueryTable1() throws ParseException {
     String query = "select * from t1";

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java Thu Oct 30 16:22:33 2014
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.parse.S
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.AfterClass;
 import org.junit.Test;
 
 /**
@@ -56,6 +57,13 @@ public class TestReadEntityDirect {
     assertEquals("Checking command success", 0, ret);
   }
 
+  @AfterClass
+  public static void onetimeTeardown() throws Exception {
+    Driver driver = createDriver();
+    driver.run("drop table t1");
+    driver.run("drop view v1");
+  }
+
   @Before
   public void setup() {
     CheckInputReadEntityDirect.readEntities = null;

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerCLI.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerCLI.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerCLI.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerCLI.java Thu Oct 30 16:22:33 2014
@@ -55,13 +55,10 @@ public class TestSQLStdHiveAccessControl
     assertFalse("Check for transform query disabling hook",
         processedConf.getVar(ConfVars.PREEXECHOOKS).contains(DisallowTransformHook.class.getName()));
 
-    // check that set param whitelist is not set
-    assertTrue(processedConf.getVar(ConfVars.HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST) == null
-        || processedConf.getVar(ConfVars.HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST).trim()
-            .equals(""));
-
     // verify that some dummy param can be set
     processedConf.verifyAndSet("dummy.param", "dummy.val");
+    processedConf.verifyAndSet(ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, "true");
+
   }
 
   private HiveAuthzSessionContext getCLISessionCtx() {

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java Thu Oct 30 16:22:33 2014
@@ -20,6 +20,11 @@ package org.apache.hadoop.hive.ql.securi
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator;
@@ -42,20 +47,53 @@ public class TestSQLStdHiveAccessControl
    * policy on hiveconf correctly
    *
    * @throws HiveAuthzPluginException
+   * @throws IllegalAccessException
+   * @throws NoSuchFieldException
+   * @throws IllegalArgumentException
+   * @throws SecurityException
    */
   @Test
-  public void testConfigProcessing() throws HiveAuthzPluginException {
-    HiveConf processedConf = new HiveConf();
+  public void testConfigProcessing() throws HiveAuthzPluginException, SecurityException,
+      IllegalArgumentException, NoSuchFieldException, IllegalAccessException {
+    HiveConf processedConf = newAuthEnabledConf();
     SQLStdHiveAccessController accessController = new SQLStdHiveAccessController(null,
-        processedConf, new HadoopDefaultAuthenticator(), getHS2SessionCtx()
-        );
+        processedConf, new HadoopDefaultAuthenticator(), getHS2SessionCtx());
     accessController.applyAuthorizationConfigPolicy(processedConf);
 
     // check that hook to disable transforms has been added
     assertTrue("Check for transform query disabling hook",
         processedConf.getVar(ConfVars.PREEXECHOOKS).contains(DisallowTransformHook.class.getName()));
 
-    verifyParamSettability(SQLStdHiveAccessController.defaultModWhiteListSqlStdAuth, processedConf);
+    List<String> settableParams = getSettableParams();
+    verifyParamSettability(settableParams, processedConf);
+
+  }
+
+  private HiveConf newAuthEnabledConf() {
+    HiveConf conf = new HiveConf();
+    conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true);
+    return conf;
+  }
+
+  /**
+   * @return list of parameters that should be possible to set
+   */
+  private List<String> getSettableParams() throws SecurityException, NoSuchFieldException,
+      IllegalArgumentException, IllegalAccessException {
+    // get all the variable names being converted to regex in HiveConf, using reflection
+    Field varNameField = HiveConf.class.getDeclaredField("sqlStdAuthSafeVarNames");
+    varNameField.setAccessible(true);
+    List<String> confVarList = Arrays.asList((String[]) varNameField.get(null));
+
+    // create list with variables that match some of the regexes
+    List<String> confVarRegexList = Arrays.asList("hive.convert.join.bucket.mapjoin.tez",
+        "hive.optimize.index.filter.compact.maxsize", "hive.tez.dummy", "tez.task.dummy");
+
+    // combine two lists
+    List<String> varList = new ArrayList<String>();
+    varList.addAll(confVarList);
+    varList.addAll(confVarRegexList);
+    return varList;
 
   }
 
@@ -70,7 +108,7 @@ public class TestSQLStdHiveAccessControl
    * @param settableParams
    * @param processedConf
    */
-  private void verifyParamSettability(String [] settableParams, HiveConf processedConf) {
+  private void verifyParamSettability(List<String> settableParams, HiveConf processedConf) {
     // verify that the whitlelist params can be set
     for (String param : settableParams) {
       try {
@@ -90,24 +128,42 @@ public class TestSQLStdHiveAccessControl
   }
 
   /**
-   * Test that modifying HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST config works
+   * Test that setting HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST_APPEND config works
+   * @throws HiveAuthzPluginException
+   */
+  @Test
+  public void testConfigProcessingCustomSetWhitelistAppend() throws HiveAuthzPluginException {
+    // append new config params to whitelist
+    List<String> paramRegexes = Arrays.asList("hive.ctest.param", "hive.abc..*");
+    List<String> settableParams = Arrays.asList("hive.ctest.param", "hive.abc.def");
+    verifySettability(paramRegexes, settableParams,
+        ConfVars.HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST_APPEND);
+  }
+
+  /**
+   * Test that setting HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST config works
    * @throws HiveAuthzPluginException
    */
   @Test
   public void testConfigProcessingCustomSetWhitelist() throws HiveAuthzPluginException {
+    // append new config params to whitelist
+    List<String> paramRegexes = Arrays.asList("hive.ctest.param", "hive.abc..*");
+    List<String> settableParams = Arrays.asList("hive.ctest.param", "hive.abc.def");
+    verifySettability(paramRegexes, settableParams,
+        ConfVars.HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST);
+  }
 
-    HiveConf processedConf = new HiveConf();
-    // add custom value, including one from the default, one new one
-    String[] settableParams = { SQLStdHiveAccessController.defaultModWhiteListSqlStdAuth[0],
-        "abcs.dummy.test.param" };
-   processedConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST,
-        Joiner.on(",").join(settableParams));
+  private void verifySettability(List<String> paramRegexes, List<String> settableParams,
+      ConfVars whiteListParam) throws HiveAuthzPluginException {
+    HiveConf processedConf = newAuthEnabledConf();
+    processedConf.setVar(whiteListParam,
+        Joiner.on("|").join(paramRegexes));
 
     SQLStdHiveAccessController accessController = new SQLStdHiveAccessController(null,
         processedConf, new HadoopDefaultAuthenticator(), getHS2SessionCtx());
     accessController.applyAuthorizationConfigPolicy(processedConf);
-    verifyParamSettability(settableParams, processedConf);
 
+    verifyParamSettability(settableParams, processedConf);
   }
 
   private void assertConfModificationException(HiveConf processedConf, String param) {
@@ -120,4 +176,5 @@ public class TestSQLStdHiveAccessControl
     assertTrue("Exception should be thrown while modifying the param " + param, caughtEx);
   }
 
+
 }

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java Thu Oct 30 16:22:33 2014
@@ -63,12 +63,13 @@ public abstract class CompactorTest {
   protected CompactionTxnHandler txnHandler;
   protected IMetaStoreClient ms;
   protected long sleepTime = 1000;
+  protected HiveConf conf;
 
   private final MetaStoreThread.BooleanPointer stop = new MetaStoreThread.BooleanPointer();
   private final File tmpdir;
 
   protected CompactorTest() throws Exception {
-    HiveConf conf = new HiveConf();
+    conf = new HiveConf();
     TxnDbUtil.setConfValues(conf);
     TxnDbUtil.cleanDb();
     ms = new HiveMetaStoreClient(conf);
@@ -79,16 +80,20 @@ public abstract class CompactorTest {
     tmpdir.deleteOnExit();
   }
 
-  protected void startInitiator(HiveConf conf) throws Exception {
-    startThread('i', conf);
+  protected void startInitiator() throws Exception {
+    startThread('i', true);
   }
 
-  protected void startWorker(HiveConf conf) throws Exception {
-    startThread('w', conf);
+  protected void startWorker() throws Exception {
+    startThread('w', true);
   }
 
-  protected void startCleaner(HiveConf conf) throws Exception {
-    startThread('c', conf);
+  protected void startCleaner() throws Exception {
+    startThread('c', true);
+  }
+
+  protected void startCleaner(MetaStoreThread.BooleanPointer looped) throws Exception {
+    startThread('c', false, looped);
   }
 
   protected Table newTable(String dbName, String tableName, boolean partitioned) throws TException {
@@ -117,6 +122,9 @@ public abstract class CompactorTest {
 
     table.setParameters(parameters);
 
+    // drop the table first, in case some previous test created it
+    ms.dropTable(dbName, tableName);
+
     ms.createTable(table);
     return table;
   }
@@ -142,37 +150,27 @@ public abstract class CompactorTest {
     return txns.get(0);
   }
 
-  protected void addDeltaFile(HiveConf conf, Table t, Partition p, long minTxn, long maxTxn,
-                              int numRecords) throws Exception{
-    addFile(conf, t, p, minTxn, maxTxn, numRecords, FileType.DELTA, 2, true);
-  }
-
-  protected void addBaseFile(HiveConf conf, Table t, Partition p, long maxTxn,
-                             int numRecords) throws Exception{
-    addFile(conf, t, p, 0, maxTxn, numRecords, FileType.BASE, 2, true);
+  protected void addDeltaFile(Table t, Partition p, long minTxn, long maxTxn, int numRecords)
+      throws Exception {
+    addFile(t, p, minTxn, maxTxn, numRecords, FileType.DELTA, 2, true);
   }
 
-  protected void addLegacyFile(HiveConf conf, Table t, Partition p,
-                               int numRecords) throws Exception {
-    addFile(conf, t, p, 0, 0, numRecords, FileType.LEGACY, 2, true);
+  protected void addBaseFile(Table t, Partition p, long maxTxn, int numRecords) throws Exception {
+    addFile(t, p, 0, maxTxn, numRecords, FileType.BASE, 2, true);
   }
 
-  protected void addDeltaFile(HiveConf conf, Table t, Partition p, long minTxn, long maxTxn,
-                              int numRecords, int numBuckets, boolean allBucketsPresent)
-      throws Exception {
-    addFile(conf, t, p, minTxn, maxTxn, numRecords, FileType.DELTA, numBuckets, allBucketsPresent);
+  protected void addLegacyFile(Table t, Partition p, int numRecords) throws Exception {
+    addFile(t, p, 0, 0, numRecords, FileType.LEGACY, 2, true);
   }
 
-  protected void addBaseFile(HiveConf conf, Table t, Partition p, long maxTxn,
-                             int numRecords, int numBuckets, boolean allBucketsPresent)
-      throws Exception {
-    addFile(conf, t, p, 0, maxTxn, numRecords, FileType.BASE, numBuckets, allBucketsPresent);
+  protected void addDeltaFile(Table t, Partition p, long minTxn, long maxTxn, int numRecords,
+                              int numBuckets, boolean allBucketsPresent) throws Exception {
+    addFile(t, p, minTxn, maxTxn, numRecords, FileType.DELTA, numBuckets, allBucketsPresent);
   }
 
-  protected void addLegacyFile(HiveConf conf, Table t, Partition p,
-                               int numRecords, int numBuckets, boolean allBucketsPresent)
-      throws Exception {
-    addFile(conf, t, p, 0, 0, numRecords, FileType.LEGACY, numBuckets, allBucketsPresent);
+  protected void addBaseFile(Table t, Partition p, long maxTxn, int numRecords, int numBuckets,
+                             boolean allBucketsPresent) throws Exception {
+    addFile(t, p, 0, maxTxn, numRecords, FileType.BASE, numBuckets, allBucketsPresent);
   }
 
   protected List<Path> getDirectories(HiveConf conf, Table t, Partition p) throws Exception {
@@ -191,6 +189,10 @@ public abstract class CompactorTest {
     for (long tid : rsp.getTxn_ids()) txnHandler.commitTxn(new CommitTxnRequest(tid));
   }
 
+  protected void stopThread() {
+    stop.boolVal = true;
+  }
+
   private StorageDescriptor newStorageDescriptor(String location, List<Order> sortCols) {
     StorageDescriptor sd = new StorageDescriptor();
     List<FieldSchema> cols = new ArrayList<FieldSchema>(2);
@@ -214,9 +216,13 @@ public abstract class CompactorTest {
     return sd;
   }
 
-  // I can't do this with @Before because I want to be able to control the config file provided
-  // to each test.
-  private void startThread(char type, HiveConf conf) throws Exception {
+  // I can't do this with @Before because I want to be able to control when the thead starts
+  private void startThread(char type, boolean stopAfterOne) throws Exception {
+    startThread(type, stopAfterOne, new MetaStoreThread.BooleanPointer());
+  }
+
+  private void startThread(char type, boolean stopAfterOne, MetaStoreThread.BooleanPointer looped)
+    throws Exception {
     TxnDbUtil.setConfValues(conf);
     CompactorThread t = null;
     switch (type) {
@@ -227,9 +233,10 @@ public abstract class CompactorTest {
     }
     t.setThreadId((int) t.getId());
     t.setHiveConf(conf);
-    stop.boolVal = true;
-    t.init(stop);
-    t.run();
+    stop.boolVal = stopAfterOne;
+    t.init(stop, looped);
+    if (stopAfterOne) t.run();
+    else t.start();
   }
 
   private String getLocation(String tableName, String partValue) {
@@ -243,7 +250,7 @@ public abstract class CompactorTest {
 
   private enum FileType {BASE, DELTA, LEGACY};
 
-  private void addFile(HiveConf conf, Table t, Partition p, long minTxn, long maxTxn,
+  private void addFile(Table t, Partition p, long minTxn, long maxTxn,
                        int numRecords,  FileType type, int numBuckets,
                        boolean allBucketsPresent) throws Exception {
     String partValue = (p == null) ? null : p.getValues().get(0);
@@ -332,6 +339,7 @@ public abstract class CompactorTest {
     private final Configuration conf;
     private FSDataInputStream is = null;
     private final FileSystem fs;
+    private boolean lastWasDelete = true;
 
     MockRawReader(Configuration conf, List<Path> files) throws IOException {
       filesToRead = new Stack<Path>();
@@ -346,6 +354,15 @@ public abstract class CompactorTest {
     }
 
     @Override
+    public boolean isDelete(Text value) {
+      // Alternate between returning deleted and not.  This is easier than actually
+      // tracking operations. We test that this is getting properly called by checking that only
+      // half the records show up in base files after major compactions.
+      lastWasDelete = !lastWasDelete;
+      return lastWasDelete;
+    }
+
+    @Override
     public boolean next(RecordIdentifier identifier, Text text) throws IOException {
       if (is == null) {
         // Open the next file

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java Thu Oct 30 16:22:33 2014
@@ -18,21 +18,26 @@
 package org.apache.hadoop.hive.ql.txn.compactor;
 
 import junit.framework.Assert;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStoreThread;
 import org.apache.hadoop.hive.metastore.api.*;
 import org.apache.hadoop.hive.metastore.txn.CompactionInfo;
-import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
-import org.junit.Before;
 import org.junit.Test;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.concurrent.TimeUnit;
 
 /**
  * Tests for the compactor Cleaner thread
  */
 public class TestCleaner extends CompactorTest {
+
+  static final private Log LOG = LogFactory.getLog(TestCleaner.class.getName());
+
   public TestCleaner() throws Exception {
     super();
   }
@@ -41,19 +46,17 @@ public class TestCleaner extends Compact
   public void nothing() throws Exception {
     // Test that the whole things works when there's nothing in the queue.  This is just a
     // survival test.
-    startCleaner(new HiveConf());
+    startCleaner();
   }
 
   @Test
   public void cleanupAfterMajorTableCompaction() throws Exception {
     Table t = newTable("default", "camtc", false);
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, null, 20L, 20);
-    addDeltaFile(conf, t, null, 21L, 22L, 2);
-    addDeltaFile(conf, t, null, 23L, 24L, 2);
-    addBaseFile(conf, t, null, 25L, 25);
+    addBaseFile(t, null, 20L, 20);
+    addDeltaFile(t, null, 21L, 22L, 2);
+    addDeltaFile(t, null, 23L, 24L, 2);
+    addBaseFile(t, null, 25L, 25);
 
     burnThroughTransactions(25);
 
@@ -63,7 +66,7 @@ public class TestCleaner extends Compact
     txnHandler.markCompacted(ci);
     txnHandler.setRunAs(ci.id, System.getProperty("user.name"));
 
-    startCleaner(conf);
+    startCleaner();
 
     // Check there are no compactions requests left.
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
@@ -80,12 +83,10 @@ public class TestCleaner extends Compact
     Table t = newTable("default", "campc", true);
     Partition p = newPartition(t, "today");
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, p, 20L, 20);
-    addDeltaFile(conf, t, p, 21L, 22L, 2);
-    addDeltaFile(conf, t, p, 23L, 24L, 2);
-    addBaseFile(conf, t, p, 25L, 25);
+    addBaseFile(t, p, 20L, 20);
+    addDeltaFile(t, p, 21L, 22L, 2);
+    addDeltaFile(t, p, 23L, 24L, 2);
+    addBaseFile(t, p, 25L, 25);
 
     burnThroughTransactions(25);
 
@@ -96,7 +97,7 @@ public class TestCleaner extends Compact
     txnHandler.markCompacted(ci);
     txnHandler.setRunAs(ci.id, System.getProperty("user.name"));
 
-    startCleaner(conf);
+    startCleaner();
 
     // Check there are no compactions requests left.
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
@@ -112,12 +113,10 @@ public class TestCleaner extends Compact
   public void cleanupAfterMinorTableCompaction() throws Exception {
     Table t = newTable("default", "camitc", false);
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, null, 20L, 20);
-    addDeltaFile(conf, t, null, 21L, 22L, 2);
-    addDeltaFile(conf, t, null, 23L, 24L, 2);
-    addDeltaFile(conf, t, null, 21L, 24L, 4);
+    addBaseFile(t, null, 20L, 20);
+    addDeltaFile(t, null, 21L, 22L, 2);
+    addDeltaFile(t, null, 23L, 24L, 2);
+    addDeltaFile(t, null, 21L, 24L, 4);
 
     burnThroughTransactions(25);
 
@@ -127,7 +126,7 @@ public class TestCleaner extends Compact
     txnHandler.markCompacted(ci);
     txnHandler.setRunAs(ci.id, System.getProperty("user.name"));
 
-    startCleaner(conf);
+    startCleaner();
 
     // Check there are no compactions requests left.
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
@@ -151,12 +150,10 @@ public class TestCleaner extends Compact
     Table t = newTable("default", "camipc", true);
     Partition p = newPartition(t, "today");
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, p, 20L, 20);
-    addDeltaFile(conf, t, p, 21L, 22L, 2);
-    addDeltaFile(conf, t, p, 23L, 24L, 2);
-    addDeltaFile(conf, t, p, 21L, 24L, 4);
+    addBaseFile(t, p, 20L, 20);
+    addDeltaFile(t, p, 21L, 22L, 2);
+    addDeltaFile(t, p, 23L, 24L, 2);
+    addDeltaFile(t, p, 21L, 24L, 4);
 
     burnThroughTransactions(25);
 
@@ -167,7 +164,7 @@ public class TestCleaner extends Compact
     txnHandler.markCompacted(ci);
     txnHandler.setRunAs(ci.id, System.getProperty("user.name"));
 
-    startCleaner(conf);
+    startCleaner();
 
     // Check there are no compactions requests left.
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
@@ -190,12 +187,10 @@ public class TestCleaner extends Compact
   public void blockedByLockTable() throws Exception {
     Table t = newTable("default", "bblt", false);
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, null, 20L, 20);
-    addDeltaFile(conf, t, null, 21L, 22L, 2);
-    addDeltaFile(conf, t, null, 23L, 24L, 2);
-    addDeltaFile(conf, t, null, 21L, 24L, 4);
+    addBaseFile(t, null, 20L, 20);
+    addDeltaFile(t, null, 21L, 22L, 2);
+    addDeltaFile(t, null, 23L, 24L, 2);
+    addDeltaFile(t, null, 21L, 24L, 4);
 
     burnThroughTransactions(25);
 
@@ -212,7 +207,7 @@ public class TestCleaner extends Compact
     LockRequest req = new LockRequest(components, "me", "localhost");
     LockResponse res = txnHandler.lock(req);
 
-    startCleaner(conf);
+    startCleaner();
 
     // Check there are no compactions requests left.
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
@@ -228,12 +223,10 @@ public class TestCleaner extends Compact
     Table t = newTable("default", "bblp", true);
     Partition p = newPartition(t, "today");
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, p, 20L, 20);
-    addDeltaFile(conf, t, p, 21L, 22L, 2);
-    addDeltaFile(conf, t, p, 23L, 24L, 2);
-    addDeltaFile(conf, t, p, 21L, 24L, 4);
+    addBaseFile(t, p, 20L, 20);
+    addDeltaFile(t, p, 21L, 22L, 2);
+    addDeltaFile(t, p, 23L, 24L, 2);
+    addDeltaFile(t, p, 21L, 24L, 4);
 
     burnThroughTransactions(25);
 
@@ -244,7 +237,7 @@ public class TestCleaner extends Compact
     txnHandler.markCompacted(ci);
     txnHandler.setRunAs(ci.id, System.getProperty("user.name"));
 
-    LockComponent comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.TABLE, "default");
+    LockComponent comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.PARTITION, "default");
     comp.setTablename("bblp");
     comp.setPartitionname("ds=today");
     List<LockComponent> components = new ArrayList<LockComponent>(1);
@@ -252,7 +245,7 @@ public class TestCleaner extends Compact
     LockRequest req = new LockRequest(components, "me", "localhost");
     LockResponse res = txnHandler.lock(req);
 
-    startCleaner(conf);
+    startCleaner();
 
     // Check there are no compactions requests left.
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
@@ -265,15 +258,154 @@ public class TestCleaner extends Compact
   }
 
   @Test
+  public void notBlockedBySubsequentLock() throws Exception {
+    Table t = newTable("default", "bblt", false);
+
+    // Set the run frequency low on this test so it doesn't take long
+    conf.setTimeVar(HiveConf.ConfVars.HIVE_COMPACTOR_CLEANER_RUN_INTERVAL, 100,
+        TimeUnit.MILLISECONDS);
+
+    addBaseFile(t, null, 20L, 20);
+    addDeltaFile(t, null, 21L, 22L, 2);
+    addDeltaFile(t, null, 23L, 24L, 2);
+    addDeltaFile(t, null, 21L, 24L, 4);
+
+    burnThroughTransactions(25);
+
+    CompactionRequest rqst = new CompactionRequest("default", "bblt", CompactionType.MINOR);
+    txnHandler.compact(rqst);
+    CompactionInfo ci = txnHandler.findNextToCompact("fred");
+    txnHandler.markCompacted(ci);
+    txnHandler.setRunAs(ci.id, System.getProperty("user.name"));
+
+    LockComponent comp = new LockComponent(LockType.SHARED_READ, LockLevel.TABLE, "default");
+    comp.setTablename("bblt");
+    List<LockComponent> components = new ArrayList<LockComponent>(1);
+    components.add(comp);
+    LockRequest req = new LockRequest(components, "me", "localhost");
+    LockResponse res = txnHandler.lock(req);
+
+    MetaStoreThread.BooleanPointer looped = new MetaStoreThread.BooleanPointer();
+    looped.boolVal = false;
+    startCleaner(looped);
+
+    // Make sure the compactor has a chance to run once
+    while (!looped.boolVal) {
+      Thread.currentThread().sleep(100);
+    }
+
+    // There should still be one request, as the locks still held.
+    ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
+    List<ShowCompactResponseElement> compacts = rsp.getCompacts();
+    Assert.assertEquals(1, compacts.size());
+
+    // obtain a second lock.  This shouldn't block cleaner as it was acquired after the initial
+    // clean request
+    LockComponent comp2 = new LockComponent(LockType.SHARED_READ, LockLevel.TABLE, "default");
+    comp2.setTablename("bblt");
+    List<LockComponent> components2 = new ArrayList<LockComponent>(1);
+    components2.add(comp2);
+    LockRequest req2 = new LockRequest(components, "me", "localhost");
+    LockResponse res2 = txnHandler.lock(req2);
+
+    // Unlock the previous lock
+    txnHandler.unlock(new UnlockRequest(res.getLockid()));
+    looped.boolVal = false;
+
+    while (!looped.boolVal) {
+      Thread.currentThread().sleep(100);
+    }
+    stopThread();
+    Thread.currentThread().sleep(200);
+
+
+    // Check there are no compactions requests left.
+    rsp = txnHandler.showCompact(new ShowCompactRequest());
+    compacts = rsp.getCompacts();
+    Assert.assertEquals(0, compacts.size());
+  }
+
+  @Test
+  public void partitionNotBlockedBySubsequentLock() throws Exception {
+    Table t = newTable("default", "bblt", true);
+    Partition p = newPartition(t, "today");
+
+    // Set the run frequency low on this test so it doesn't take long
+    conf.setTimeVar(HiveConf.ConfVars.HIVE_COMPACTOR_CLEANER_RUN_INTERVAL, 100,
+        TimeUnit.MILLISECONDS);
+
+    addBaseFile(t, p, 20L, 20);
+    addDeltaFile(t, p, 21L, 22L, 2);
+    addDeltaFile(t, p, 23L, 24L, 2);
+    addDeltaFile(t, p, 21L, 24L, 4);
+
+    burnThroughTransactions(25);
+
+    CompactionRequest rqst = new CompactionRequest("default", "bblt", CompactionType.MINOR);
+    rqst.setPartitionname("ds=today");
+    txnHandler.compact(rqst);
+    CompactionInfo ci = txnHandler.findNextToCompact("fred");
+    txnHandler.markCompacted(ci);
+    txnHandler.setRunAs(ci.id, System.getProperty("user.name"));
+
+    LockComponent comp = new LockComponent(LockType.SHARED_READ, LockLevel.PARTITION, "default");
+    comp.setTablename("bblt");
+    comp.setPartitionname("ds=today");
+    List<LockComponent> components = new ArrayList<LockComponent>(1);
+    components.add(comp);
+    LockRequest req = new LockRequest(components, "me", "localhost");
+    LockResponse res = txnHandler.lock(req);
+
+    MetaStoreThread.BooleanPointer looped = new MetaStoreThread.BooleanPointer();
+    looped.boolVal = false;
+    startCleaner(looped);
+
+    // Make sure the compactor has a chance to run once
+    while (!looped.boolVal) {
+      Thread.currentThread().sleep(100);
+    }
+
+    // There should still be one request, as the locks still held.
+    ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
+    List<ShowCompactResponseElement> compacts = rsp.getCompacts();
+    Assert.assertEquals(1, compacts.size());
+
+
+    // obtain a second lock.  This shouldn't block cleaner as it was acquired after the initial
+    // clean request
+    LockComponent comp2 = new LockComponent(LockType.SHARED_READ, LockLevel.PARTITION, "default");
+    comp2.setTablename("bblt");
+    comp2.setPartitionname("ds=today");
+    List<LockComponent> components2 = new ArrayList<LockComponent>(1);
+    components2.add(comp2);
+    LockRequest req2 = new LockRequest(components, "me", "localhost");
+    LockResponse res2 = txnHandler.lock(req2);
+
+    // Unlock the previous lock
+    txnHandler.unlock(new UnlockRequest(res.getLockid()));
+    looped.boolVal = false;
+
+    while (!looped.boolVal) {
+      Thread.currentThread().sleep(100);
+    }
+    stopThread();
+    Thread.currentThread().sleep(200);
+
+
+    // Check there are no compactions requests left.
+    rsp = txnHandler.showCompact(new ShowCompactRequest());
+    compacts = rsp.getCompacts();
+    Assert.assertEquals(0, compacts.size());
+  }
+
+  @Test
   public void cleanupAfterMajorPartitionCompactionNoBase() throws Exception {
     Table t = newTable("default", "campcnb", true);
     Partition p = newPartition(t, "today");
 
-    HiveConf conf = new HiveConf();
-
-    addDeltaFile(conf, t, p, 1L, 22L, 22);
-    addDeltaFile(conf, t, p, 23L, 24L, 2);
-    addBaseFile(conf, t, p, 25L, 25);
+    addDeltaFile(t, p, 1L, 22L, 22);
+    addDeltaFile(t, p, 23L, 24L, 2);
+    addBaseFile(t, p, 25L, 25);
 
     burnThroughTransactions(25);
 
@@ -284,7 +416,7 @@ public class TestCleaner extends Compact
     txnHandler.markCompacted(ci);
     txnHandler.setRunAs(ci.id, System.getProperty("user.name"));
 
-    startCleaner(conf);
+    startCleaner();
 
     // Check there are no compactions requests left.
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
@@ -295,9 +427,4 @@ public class TestCleaner extends Compact
     Assert.assertEquals(1, paths.size());
     Assert.assertEquals("base_25", paths.get(0).getName());
   }
-
-  @Before
-  public void setUpTxnDb() throws Exception {
-    TxnDbUtil.setConfValues(new HiveConf());
-  }
 }

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java Thu Oct 30 16:22:33 2014
@@ -47,7 +47,7 @@ public class TestInitiator extends Compa
   public void nothing() throws Exception {
     // Test that the whole things works when there's nothing in the queue.  This is just a
     // survival test.
-    startInitiator(new HiveConf());
+    startInitiator();
   }
 
   @Test
@@ -63,7 +63,7 @@ public class TestInitiator extends Compa
     txnHandler.findNextToCompact(Worker.hostname() + "-193892");
     txnHandler.findNextToCompact("nosuchhost-193892");
 
-    startInitiator(new HiveConf());
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     List<ShowCompactResponseElement> compacts = rsp.getCompacts();
@@ -89,10 +89,9 @@ public class TestInitiator extends Compa
 
     txnHandler.findNextToCompact("nosuchhost-193892");
 
-    HiveConf conf = new HiveConf();
     conf.setTimeVar(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_TIMEOUT, 1L, TimeUnit.MILLISECONDS);
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     List<ShowCompactResponseElement> compacts = rsp.getCompacts();
@@ -104,7 +103,6 @@ public class TestInitiator extends Compa
   public void majorCompactOnTableTooManyAborts() throws Exception {
     Table t = newTable("default", "mcottma", false);
 
-    HiveConf conf = new HiveConf();
     HiveConf.setIntVar(conf, HiveConf.ConfVars.HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD, 10);
 
     for (int i = 0; i < 11; i++) {
@@ -119,7 +117,7 @@ public class TestInitiator extends Compa
       txnHandler.abortTxn(new AbortTxnRequest(txnid));
     }
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     List<ShowCompactResponseElement> compacts = rsp.getCompacts();
@@ -134,7 +132,6 @@ public class TestInitiator extends Compa
     Table t = newTable("default", "mcoptma", true);
     Partition p = newPartition(t, "today");
 
-    HiveConf conf = new HiveConf();
     HiveConf.setIntVar(conf, HiveConf.ConfVars.HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD, 10);
 
     for (int i = 0; i < 11; i++) {
@@ -150,7 +147,7 @@ public class TestInitiator extends Compa
       txnHandler.abortTxn(new AbortTxnRequest(txnid));
     }
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     List<ShowCompactResponseElement> compacts = rsp.getCompacts();
@@ -168,7 +165,6 @@ public class TestInitiator extends Compa
       Partition p = newPartition(t, "day-" + i);
     }
 
-    HiveConf conf = new HiveConf();
     HiveConf.setIntVar(conf, HiveConf.ConfVars.HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD, 10);
 
     for (int i = 0; i < 11; i++) {
@@ -184,7 +180,7 @@ public class TestInitiator extends Compa
       txnHandler.abortTxn(new AbortTxnRequest(txnid));
     }
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     Assert.assertEquals(0, rsp.getCompactsSize());
@@ -197,8 +193,6 @@ public class TestInitiator extends Compa
     // accidently clean it too.
     Table t = newTable("default", "ceat", false);
 
-    HiveConf conf = new HiveConf();
-
     long txnid = openTxn();
     LockComponent comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.TABLE, "default");
     comp.setTablename("ceat");
@@ -216,7 +210,7 @@ public class TestInitiator extends Compa
     GetOpenTxnsResponse openTxns = txnHandler.getOpenTxns();
     Assert.assertEquals(101, openTxns.getOpen_txnsSize());
 
-    startInitiator(conf);
+    startInitiator();
 
     openTxns = txnHandler.getOpenTxns();
     Assert.assertEquals(1, openTxns.getOpen_txnsSize());
@@ -228,7 +222,6 @@ public class TestInitiator extends Compa
     parameters.put("NO_AUTO_COMPACTION", "true");
     Table t = newTable("default", "ncwncs", false, parameters);
 
-    HiveConf conf = new HiveConf();
     HiveConf.setIntVar(conf, HiveConf.ConfVars.HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD, 10);
 
     for (int i = 0; i < 11; i++) {
@@ -243,7 +236,7 @@ public class TestInitiator extends Compa
       txnHandler.abortTxn(new AbortTxnRequest(txnid));
     }
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     Assert.assertEquals(0, rsp.getCompactsSize());
@@ -253,7 +246,6 @@ public class TestInitiator extends Compa
   public void noCompactWhenCompactAlreadyScheduled() throws Exception {
     Table t = newTable("default", "ncwcas", false);
 
-    HiveConf conf = new HiveConf();
     HiveConf.setIntVar(conf, HiveConf.ConfVars.HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD, 10);
 
     for (int i = 0; i < 11; i++) {
@@ -277,7 +269,7 @@ public class TestInitiator extends Compa
     Assert.assertEquals("initiated", compacts.get(0).getState());
     Assert.assertEquals("ncwcas", compacts.get(0).getTablename());
 
-    startInitiator(conf);
+    startInitiator();
 
     rsp = txnHandler.showCompact(new ShowCompactRequest());
     compacts = rsp.getCompacts();
@@ -291,11 +283,9 @@ public class TestInitiator extends Compa
   public void compactTableHighDeltaPct() throws Exception {
     Table t = newTable("default", "cthdp", false);
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, null, 20L, 20);
-    addDeltaFile(conf, t, null, 21L, 22L, 2);
-    addDeltaFile(conf, t, null, 23L, 24L, 2);
+    addBaseFile(t, null, 20L, 20);
+    addDeltaFile(t, null, 21L, 22L, 2);
+    addDeltaFile(t, null, 23L, 24L, 2);
 
     burnThroughTransactions(23);
 
@@ -309,7 +299,7 @@ public class TestInitiator extends Compa
     LockResponse res = txnHandler.lock(req);
     txnHandler.commitTxn(new CommitTxnRequest(txnid));
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     List<ShowCompactResponseElement> compacts = rsp.getCompacts();
@@ -324,11 +314,9 @@ public class TestInitiator extends Compa
     Table t = newTable("default", "cphdp", true);
     Partition p = newPartition(t, "today");
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, p, 20L, 20);
-    addDeltaFile(conf, t, p, 21L, 22L, 2);
-    addDeltaFile(conf, t, p, 23L, 24L, 2);
+    addBaseFile(t, p, 20L, 20);
+    addDeltaFile(t, p, 21L, 22L, 2);
+    addDeltaFile(t, p, 23L, 24L, 2);
 
     burnThroughTransactions(23);
 
@@ -343,7 +331,7 @@ public class TestInitiator extends Compa
     LockResponse res = txnHandler.lock(req);
     txnHandler.commitTxn(new CommitTxnRequest(txnid));
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     List<ShowCompactResponseElement> compacts = rsp.getCompacts();
@@ -358,11 +346,9 @@ public class TestInitiator extends Compa
   public void noCompactTableDeltaPctNotHighEnough() throws Exception {
     Table t = newTable("default", "nctdpnhe", false);
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, null, 50L, 50);
-    addDeltaFile(conf, t, null, 21L, 22L, 2);
-    addDeltaFile(conf, t, null, 23L, 24L, 2);
+    addBaseFile(t, null, 50L, 50);
+    addDeltaFile(t, null, 21L, 22L, 2);
+    addDeltaFile(t, null, 23L, 24L, 2);
 
     burnThroughTransactions(53);
 
@@ -376,7 +362,7 @@ public class TestInitiator extends Compa
     LockResponse res = txnHandler.lock(req);
     txnHandler.commitTxn(new CommitTxnRequest(txnid));
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     Assert.assertEquals(0, rsp.getCompactsSize());
@@ -386,20 +372,18 @@ public class TestInitiator extends Compa
   public void compactTableTooManyDeltas() throws Exception {
     Table t = newTable("default", "cttmd", false);
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, null, 200L, 200);
-    addDeltaFile(conf, t, null, 201L, 201L, 1);
-    addDeltaFile(conf, t, null, 202L, 202L, 1);
-    addDeltaFile(conf, t, null, 203L, 203L, 1);
-    addDeltaFile(conf, t, null, 204L, 204L, 1);
-    addDeltaFile(conf, t, null, 205L, 205L, 1);
-    addDeltaFile(conf, t, null, 206L, 206L, 1);
-    addDeltaFile(conf, t, null, 207L, 207L, 1);
-    addDeltaFile(conf, t, null, 208L, 208L, 1);
-    addDeltaFile(conf, t, null, 209L, 209L, 1);
-    addDeltaFile(conf, t, null, 210L, 210L, 1);
-    addDeltaFile(conf, t, null, 211L, 211L, 1);
+    addBaseFile(t, null, 200L, 200);
+    addDeltaFile(t, null, 201L, 201L, 1);
+    addDeltaFile(t, null, 202L, 202L, 1);
+    addDeltaFile(t, null, 203L, 203L, 1);
+    addDeltaFile(t, null, 204L, 204L, 1);
+    addDeltaFile(t, null, 205L, 205L, 1);
+    addDeltaFile(t, null, 206L, 206L, 1);
+    addDeltaFile(t, null, 207L, 207L, 1);
+    addDeltaFile(t, null, 208L, 208L, 1);
+    addDeltaFile(t, null, 209L, 209L, 1);
+    addDeltaFile(t, null, 210L, 210L, 1);
+    addDeltaFile(t, null, 211L, 211L, 1);
 
     burnThroughTransactions(210);
 
@@ -413,7 +397,7 @@ public class TestInitiator extends Compa
     LockResponse res = txnHandler.lock(req);
     txnHandler.commitTxn(new CommitTxnRequest(txnid));
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     List<ShowCompactResponseElement> compacts = rsp.getCompacts();
@@ -428,20 +412,18 @@ public class TestInitiator extends Compa
     Table t = newTable("default", "cptmd", true);
     Partition p = newPartition(t, "today");
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, p, 200L, 200);
-    addDeltaFile(conf, t, p, 201L, 201L, 1);
-    addDeltaFile(conf, t, p, 202L, 202L, 1);
-    addDeltaFile(conf, t, p, 203L, 203L, 1);
-    addDeltaFile(conf, t, p, 204L, 204L, 1);
-    addDeltaFile(conf, t, p, 205L, 205L, 1);
-    addDeltaFile(conf, t, p, 206L, 206L, 1);
-    addDeltaFile(conf, t, p, 207L, 207L, 1);
-    addDeltaFile(conf, t, p, 208L, 208L, 1);
-    addDeltaFile(conf, t, p, 209L, 209L, 1);
-    addDeltaFile(conf, t, p, 210L, 210L, 1);
-    addDeltaFile(conf, t, p, 211L, 211L, 1);
+    addBaseFile(t, p, 200L, 200);
+    addDeltaFile(t, p, 201L, 201L, 1);
+    addDeltaFile(t, p, 202L, 202L, 1);
+    addDeltaFile(t, p, 203L, 203L, 1);
+    addDeltaFile(t, p, 204L, 204L, 1);
+    addDeltaFile(t, p, 205L, 205L, 1);
+    addDeltaFile(t, p, 206L, 206L, 1);
+    addDeltaFile(t, p, 207L, 207L, 1);
+    addDeltaFile(t, p, 208L, 208L, 1);
+    addDeltaFile(t, p, 209L, 209L, 1);
+    addDeltaFile(t, p, 210L, 210L, 1);
+    addDeltaFile(t, p, 211L, 211L, 1);
 
     burnThroughTransactions(210);
 
@@ -456,7 +438,7 @@ public class TestInitiator extends Compa
     LockResponse res = txnHandler.lock(req);
     txnHandler.commitTxn(new CommitTxnRequest(txnid));
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     List<ShowCompactResponseElement> compacts = rsp.getCompacts();
@@ -471,11 +453,9 @@ public class TestInitiator extends Compa
   public void noCompactTableNotEnoughDeltas() throws Exception {
     Table t = newTable("default", "nctned", false);
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, null, 200L, 200);
-    addDeltaFile(conf, t, null, 201L, 205L, 5);
-    addDeltaFile(conf, t, null, 206L, 211L, 6);
+    addBaseFile(t, null, 200L, 200);
+    addDeltaFile(t, null, 201L, 205L, 5);
+    addDeltaFile(t, null, 206L, 211L, 6);
 
     burnThroughTransactions(210);
 
@@ -489,7 +469,7 @@ public class TestInitiator extends Compa
     LockResponse res = txnHandler.lock(req);
     txnHandler.commitTxn(new CommitTxnRequest(txnid));
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     Assert.assertEquals(0, rsp.getCompactsSize());
@@ -499,20 +479,18 @@ public class TestInitiator extends Compa
   public void chooseMajorOverMinorWhenBothValid() throws Exception {
     Table t = newTable("default", "cmomwbv", false);
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, null, 200L, 200);
-    addDeltaFile(conf, t, null, 201L, 211L, 11);
-    addDeltaFile(conf, t, null, 212L, 222L, 11);
-    addDeltaFile(conf, t, null, 223L, 233L, 11);
-    addDeltaFile(conf, t, null, 234L, 244L, 11);
-    addDeltaFile(conf, t, null, 245L, 255L, 11);
-    addDeltaFile(conf, t, null, 256L, 266L, 11);
-    addDeltaFile(conf, t, null, 267L, 277L, 11);
-    addDeltaFile(conf, t, null, 278L, 288L, 11);
-    addDeltaFile(conf, t, null, 289L, 299L, 11);
-    addDeltaFile(conf, t, null, 300L, 310L, 11);
-    addDeltaFile(conf, t, null, 311L, 321L, 11);
+    addBaseFile(t, null, 200L, 200);
+    addDeltaFile(t, null, 201L, 211L, 11);
+    addDeltaFile(t, null, 212L, 222L, 11);
+    addDeltaFile(t, null, 223L, 233L, 11);
+    addDeltaFile(t, null, 234L, 244L, 11);
+    addDeltaFile(t, null, 245L, 255L, 11);
+    addDeltaFile(t, null, 256L, 266L, 11);
+    addDeltaFile(t, null, 267L, 277L, 11);
+    addDeltaFile(t, null, 278L, 288L, 11);
+    addDeltaFile(t, null, 289L, 299L, 11);
+    addDeltaFile(t, null, 300L, 310L, 11);
+    addDeltaFile(t, null, 311L, 321L, 11);
 
     burnThroughTransactions(320);
 
@@ -526,7 +504,7 @@ public class TestInitiator extends Compa
     LockResponse res = txnHandler.lock(req);
     txnHandler.commitTxn(new CommitTxnRequest(txnid));
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     List<ShowCompactResponseElement> compacts = rsp.getCompacts();
@@ -541,19 +519,17 @@ public class TestInitiator extends Compa
     Table t = newTable("default", "ednb", true);
     Partition p = newPartition(t, "today");
 
-    HiveConf conf = new HiveConf();
-
-    addDeltaFile(conf, t, p, 1L, 201L, 200);
-    addDeltaFile(conf, t, p, 202L, 202L, 1);
-    addDeltaFile(conf, t, p, 203L, 203L, 1);
-    addDeltaFile(conf, t, p, 204L, 204L, 1);
-    addDeltaFile(conf, t, p, 205L, 205L, 1);
-    addDeltaFile(conf, t, p, 206L, 206L, 1);
-    addDeltaFile(conf, t, p, 207L, 207L, 1);
-    addDeltaFile(conf, t, p, 208L, 208L, 1);
-    addDeltaFile(conf, t, p, 209L, 209L, 1);
-    addDeltaFile(conf, t, p, 210L, 210L, 1);
-    addDeltaFile(conf, t, p, 211L, 211L, 1);
+    addDeltaFile(t, p, 1L, 201L, 200);
+    addDeltaFile(t, p, 202L, 202L, 1);
+    addDeltaFile(t, p, 203L, 203L, 1);
+    addDeltaFile(t, p, 204L, 204L, 1);
+    addDeltaFile(t, p, 205L, 205L, 1);
+    addDeltaFile(t, p, 206L, 206L, 1);
+    addDeltaFile(t, p, 207L, 207L, 1);
+    addDeltaFile(t, p, 208L, 208L, 1);
+    addDeltaFile(t, p, 209L, 209L, 1);
+    addDeltaFile(t, p, 210L, 210L, 1);
+    addDeltaFile(t, p, 211L, 211L, 1);
 
     burnThroughTransactions(210);
 
@@ -568,7 +544,7 @@ public class TestInitiator extends Compa
     LockResponse res = txnHandler.lock(req);
     txnHandler.commitTxn(new CommitTxnRequest(txnid));
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     List<ShowCompactResponseElement> compacts = rsp.getCompacts();
@@ -584,11 +560,9 @@ public class TestInitiator extends Compa
     Table t = newTable("default", "ttospgocr", true);
     Partition p = newPartition(t, "today");
 
-    HiveConf conf = new HiveConf();
-
-    addBaseFile(conf, t, p, 20L, 20);
-    addDeltaFile(conf, t, p, 21L, 22L, 2);
-    addDeltaFile(conf, t, p, 23L, 24L, 2);
+    addBaseFile(t, p, 20L, 20);
+    addDeltaFile(t, p, 21L, 22L, 2);
+    addDeltaFile(t, p, 23L, 24L, 2);
 
     burnThroughTransactions(23);
 
@@ -614,7 +588,7 @@ public class TestInitiator extends Compa
     res = txnHandler.lock(req);
     txnHandler.commitTxn(new CommitTxnRequest(txnid));
 
-    startInitiator(conf);
+    startInitiator();
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     List<ShowCompactResponseElement> compacts = rsp.getCompacts();
@@ -626,9 +600,4 @@ public class TestInitiator extends Compa
   }
 
   // TODO test compactions with legacy file types
-
-  @Before
-  public void setUpTxnDb() throws Exception {
-    TxnDbUtil.setConfValues(new HiveConf());
-  }
 }