You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/07/31 00:22:46 UTC

svn commit: r1508669 [38/39] - in /hive/branches/vectorization: ./ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/test/results/clientpositive/ data/files/ eclipse-templates/ hcatalog/build-support/ant/ hcatalog/core/src/main/java/org/ap...

Modified: hive/branches/vectorization/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java (original)
+++ hive/branches/vectorization/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java Tue Jul 30 22:22:35 2013
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.serde2.lazybinary;
 
+import java.sql.Date;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -192,7 +193,7 @@ public class TestLazyBinarySerDe extends
 
     int num = 100;
     for (int itest = 0; itest < num; itest++) {
-      int randField = r.nextInt(10);
+      int randField = r.nextInt(11);
       Byte b = randField > 0 ? null : Byte.valueOf((byte) r.nextInt());
       Short s = randField > 1 ? null : Short.valueOf((short) r.nextInt());
       Integer n = randField > 2 ? null : Integer.valueOf(r.nextInt());
@@ -202,9 +203,10 @@ public class TestLazyBinarySerDe extends
       String st = randField > 6 ? null : TestBinarySortableSerDe
           .getRandString(r);
       HiveDecimal bd = randField > 7 ? null : TestBinarySortableSerDe.getRandHiveDecimal(r);
-      MyTestInnerStruct is = randField > 7 ? null : new MyTestInnerStruct(r
+      Date date = randField > 8 ? null : TestBinarySortableSerDe.getRandDate(r);
+      MyTestInnerStruct is = randField > 9 ? null : new MyTestInnerStruct(r
           .nextInt(5) - 2, r.nextInt(5) - 2);
-      List<Integer> li = randField > 8 ? null : TestBinarySortableSerDe
+      List<Integer> li = randField > 10 ? null : TestBinarySortableSerDe
           .getRandIntegerArray(r);
       byte[] ba  = TestBinarySortableSerDe.getRandBA(r, itest);
       Map<String, List<MyTestInnerStruct>> mp = new HashMap<String, List<MyTestInnerStruct>>();
@@ -218,7 +220,7 @@ public class TestLazyBinarySerDe extends
       List<MyTestInnerStruct> value2 = getRandStructArray(r);
       mp.put(key2, value2);
 
-      MyTestClassBigger input = new MyTestClassBigger(b, s, n, l, f, d, st, bd, is,
+      MyTestClassBigger input = new MyTestClassBigger(b, s, n, l, f, d, st, bd, date, is,
           li, ba, mp);
       BytesWritable bw = (BytesWritable) serde1.serialize(input, rowOI1);
       Object output = serde2.deserialize(bw);
@@ -260,7 +262,7 @@ public class TestLazyBinarySerDe extends
 
     int num = 100;
     for (int itest = 0; itest < num; itest++) {
-      int randField = r.nextInt(11);
+      int randField = r.nextInt(12);
       Byte b = randField > 0 ? null : Byte.valueOf((byte) r.nextInt());
       Short s = randField > 1 ? null : Short.valueOf((short) r.nextInt());
       Integer n = randField > 2 ? null : Integer.valueOf(r.nextInt());
@@ -270,12 +272,13 @@ public class TestLazyBinarySerDe extends
       String st = randField > 6 ? null : TestBinarySortableSerDe
           .getRandString(r);
       HiveDecimal bd = randField > 7 ? null : TestBinarySortableSerDe.getRandHiveDecimal(r);
-      MyTestInnerStruct is = randField > 8 ? null : new MyTestInnerStruct(r
+      Date date = randField > 8 ? null : TestBinarySortableSerDe.getRandDate(r);
+      MyTestInnerStruct is = randField > 9 ? null : new MyTestInnerStruct(r
           .nextInt(5) - 2, r.nextInt(5) - 2);
-      List<Integer> li = randField > 9 ? null : TestBinarySortableSerDe
+      List<Integer> li = randField > 10 ? null : TestBinarySortableSerDe
           .getRandIntegerArray(r);
       byte[] ba = TestBinarySortableSerDe.getRandBA(r, itest);
-      MyTestClass input = new MyTestClass(b, s, n, l, f, d, st, bd, is, li, ba);
+      MyTestClass input = new MyTestClass(b, s, n, l, f, d, st, bd, date, is, li, ba);
       BytesWritable bw = (BytesWritable) serde1.serialize(input, rowOI1);
       Object output = serde2.deserialize(bw);
 
@@ -316,7 +319,7 @@ public class TestLazyBinarySerDe extends
 
     int num = 100;
     for (int itest = 0; itest < num; itest++) {
-      int randField = r.nextInt(11);
+      int randField = r.nextInt(12);
       Byte b = randField > 0 ? null : Byte.valueOf((byte) r.nextInt());
       Short s = randField > 1 ? null : Short.valueOf((short) r.nextInt());
       Integer n = randField > 2 ? null : Integer.valueOf(r.nextInt());
@@ -326,12 +329,13 @@ public class TestLazyBinarySerDe extends
       String st = randField > 6 ? null : TestBinarySortableSerDe
           .getRandString(r);
       HiveDecimal bd = randField > 7 ? null : TestBinarySortableSerDe.getRandHiveDecimal(r);
-      MyTestInnerStruct is = randField > 8 ? null : new MyTestInnerStruct(r
+      Date date = randField > 8 ? null : TestBinarySortableSerDe.getRandDate(r);
+      MyTestInnerStruct is = randField > 9 ? null : new MyTestInnerStruct(r
           .nextInt(5) - 2, r.nextInt(5) - 2);
-      List<Integer> li = randField > 9 ? null : TestBinarySortableSerDe
+      List<Integer> li = randField > 10 ? null : TestBinarySortableSerDe
           .getRandIntegerArray(r);
       byte[] ba = TestBinarySortableSerDe.getRandBA(r, itest);
-      MyTestClass input = new MyTestClass(b, s, n, l, f, d, st, bd, is, li,ba);
+      MyTestClass input = new MyTestClass(b, s, n, l, f, d, st, bd, date, is, li,ba);
       BytesWritable bw = (BytesWritable) serde1.serialize(input, rowOI1);
       Object output = serde2.deserialize(bw);
 
@@ -382,10 +386,11 @@ public class TestLazyBinarySerDe extends
       String st = randField > 6 ? null : TestBinarySortableSerDe
           .getRandString(r);
       HiveDecimal bd = randField > 7 ? null : TestBinarySortableSerDe.getRandHiveDecimal(r);
+      Date date = randField > 7 ? null : TestBinarySortableSerDe.getRandDate(r);
       MyTestInnerStruct is = randField > 7 ? null : new MyTestInnerStruct(r
           .nextInt(5) - 2, r.nextInt(5) - 2);
 
-      MyTestClassSmaller input = new MyTestClassSmaller(b, s, n, l, f, d, st, bd,
+      MyTestClassSmaller input = new MyTestClassSmaller(b, s, n, l, f, d, st, bd, date,
           is);
       BytesWritable bw = (BytesWritable) serde1.serialize(input, rowOI1);
       Object output = serde2.deserialize(bw);
@@ -415,13 +420,13 @@ public class TestLazyBinarySerDe extends
     StructObjectInspector soi1 = (StructObjectInspector) serdeOI;
     List<? extends StructField> fields1 = soi1.getAllStructFieldRefs();
     LazyBinaryMapObjectInspector lazympoi = (LazyBinaryMapObjectInspector) fields1
-        .get(11).getFieldObjectInspector();
+        .get(12).getFieldObjectInspector();
     ObjectInspector lazympkeyoi = lazympoi.getMapKeyObjectInspector();
     ObjectInspector lazympvalueoi = lazympoi.getMapValueObjectInspector();
 
     StructObjectInspector soi2 = rowOI;
     List<? extends StructField> fields2 = soi2.getAllStructFieldRefs();
-    MapObjectInspector inputmpoi = (MapObjectInspector) fields2.get(11)
+    MapObjectInspector inputmpoi = (MapObjectInspector) fields2.get(12)
         .getFieldObjectInspector();
     ObjectInspector inputmpkeyoi = inputmpoi.getMapKeyObjectInspector();
     ObjectInspector inputmpvalueoi = inputmpoi.getMapValueObjectInspector();
@@ -441,10 +446,10 @@ public class TestLazyBinarySerDe extends
       }
 
       MyTestClassBigger input = new MyTestClassBigger(null, null, null, null,
-						      null, null, null, null, null, null, null, mp);
+						      null, null, null, null, null, null, null, null, mp);
       BytesWritable bw = (BytesWritable) serde.serialize(input, rowOI);
       Object output = serde.deserialize(bw);
-      Object lazyobj = soi1.getStructFieldData(output, fields1.get(11));
+      Object lazyobj = soi1.getStructFieldData(output, fields1.get(12));
       Map<?, ?> outputmp = lazympoi.getMap(lazyobj);
 
       if (outputmp.size() != mp.size()) {
@@ -491,7 +496,7 @@ public class TestLazyBinarySerDe extends
       Random r = new Random(1234);
       MyTestClass rows[] = new MyTestClass[num];
       for (int i = 0; i < num; i++) {
-        int randField = r.nextInt(11);
+        int randField = r.nextInt(12);
         Byte b = randField > 0 ? null : Byte.valueOf((byte) r.nextInt());
         Short s = randField > 1 ? null : Short.valueOf((short) r.nextInt());
         Integer n = randField > 2 ? null : Integer.valueOf(r.nextInt());
@@ -500,13 +505,14 @@ public class TestLazyBinarySerDe extends
         Double d = randField > 5 ? null : Double.valueOf(r.nextDouble());
         String st = randField > 6 ? null : TestBinarySortableSerDe
             .getRandString(r);
-        HiveDecimal bd = randField > 7 ? null : TestBinarySortableSerDe.getRandHiveDecimal(r);      
-        MyTestInnerStruct is = randField > 8 ? null : new MyTestInnerStruct(r
+        HiveDecimal bd = randField > 7 ? null : TestBinarySortableSerDe.getRandHiveDecimal(r);
+        Date date = randField > 8 ? null : TestBinarySortableSerDe.getRandDate(r);
+        MyTestInnerStruct is = randField > 9 ? null : new MyTestInnerStruct(r
             .nextInt(5) - 2, r.nextInt(5) - 2);
-        List<Integer> li = randField > 9 ? null : TestBinarySortableSerDe
+        List<Integer> li = randField > 10 ? null : TestBinarySortableSerDe
             .getRandIntegerArray(r);
         byte[] ba = TestBinarySortableSerDe.getRandBA(r, i);
-        MyTestClass t = new MyTestClass(b, s, n, l, f, d, st, bd, is, li, ba);
+        MyTestClass t = new MyTestClass(b, s, n, l, f, d, st, bd, date, is, li, ba);
         rows[i] = t;
       }
 

Modified: hive/branches/vectorization/service/if/TCLIService.thrift
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/if/TCLIService.thrift?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/service/if/TCLIService.thrift (original)
+++ hive/branches/vectorization/service/if/TCLIService.thrift Tue Jul 30 22:22:35 2013
@@ -59,6 +59,7 @@ enum TTypeId {
   USER_DEFINED_TYPE,
   DECIMAL_TYPE,
   NULL_TYPE,
+  DATE_TYPE
 }
   
 const set<TTypeId> PRIMITIVE_TYPES = [
@@ -74,6 +75,7 @@ const set<TTypeId> PRIMITIVE_TYPES = [
   TTypeId.BINARY_TYPE,
   TTypeId.DECIMAL_TYPE,
   TTypeId.NULL_TYPE
+  TTypeId.DATE_TYPE
 ]
 
 const set<TTypeId> COMPLEX_TYPES = [
@@ -106,6 +108,7 @@ const map<TTypeId,string> TYPE_NAMES = {
   TTypeId.UNION_TYPE: "UNIONTYPE",
   TTypeId.DECIMAL_TYPE: "DECIMAL",
   TTypeId.NULL_TYPE: "NULL"
+  TTypeId.DATE_TYPE: "DATE"
 }
 
 // Thrift does not support recursively defined types or forward declarations,

Modified: hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp (original)
+++ hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp Tue Jul 30 22:22:35 2013
@@ -23,6 +23,7 @@ TCLIServiceConstants::TCLIServiceConstan
   PRIMITIVE_TYPES.insert((TTypeId::type)9);
   PRIMITIVE_TYPES.insert((TTypeId::type)15);
   PRIMITIVE_TYPES.insert((TTypeId::type)16);
+  PRIMITIVE_TYPES.insert((TTypeId::type)17);
 
   COMPLEX_TYPES.insert((TTypeId::type)10);
   COMPLEX_TYPES.insert((TTypeId::type)11);
@@ -49,6 +50,7 @@ TCLIServiceConstants::TCLIServiceConstan
   TYPE_NAMES.insert(std::make_pair((TTypeId::type)13, "UNIONTYPE"));
   TYPE_NAMES.insert(std::make_pair((TTypeId::type)15, "DECIMAL"));
   TYPE_NAMES.insert(std::make_pair((TTypeId::type)16, "NULL"));
+  TYPE_NAMES.insert(std::make_pair((TTypeId::type)17, "DATE"));
 
 }
 

Modified: hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp (original)
+++ hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp Tue Jul 30 22:22:35 2013
@@ -35,7 +35,8 @@ int _kTTypeIdValues[] = {
   TTypeId::UNION_TYPE,
   TTypeId::USER_DEFINED_TYPE,
   TTypeId::DECIMAL_TYPE,
-  TTypeId::NULL_TYPE
+  TTypeId::NULL_TYPE,
+  TTypeId::DATE_TYPE
 };
 const char* _kTTypeIdNames[] = {
   "BOOLEAN_TYPE",
@@ -54,9 +55,10 @@ const char* _kTTypeIdNames[] = {
   "UNION_TYPE",
   "USER_DEFINED_TYPE",
   "DECIMAL_TYPE",
-  "NULL_TYPE"
+  "NULL_TYPE",
+  "DATE_TYPE"
 };
-const std::map<int, const char*> _TTypeId_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(17, _kTTypeIdValues, _kTTypeIdNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
+const std::map<int, const char*> _TTypeId_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(18, _kTTypeIdValues, _kTTypeIdNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
 
 int _kTStatusCodeValues[] = {
   TStatusCode::SUCCESS_STATUS,

Modified: hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_types.h
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_types.h?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_types.h (original)
+++ hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_types.h Tue Jul 30 22:22:35 2013
@@ -42,7 +42,8 @@ struct TTypeId {
     UNION_TYPE = 13,
     USER_DEFINED_TYPE = 14,
     DECIMAL_TYPE = 15,
-    NULL_TYPE = 16
+    NULL_TYPE = 16,
+    DATE_TYPE = 17
   };
 };
 

Modified: hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIServiceConstants.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIServiceConstants.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIServiceConstants.java (original)
+++ hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIServiceConstants.java Tue Jul 30 22:22:35 2013
@@ -47,6 +47,7 @@ public class TCLIServiceConstants {
     PRIMITIVE_TYPES.add(org.apache.hive.service.cli.thrift.TTypeId.BINARY_TYPE);
     PRIMITIVE_TYPES.add(org.apache.hive.service.cli.thrift.TTypeId.DECIMAL_TYPE);
     PRIMITIVE_TYPES.add(org.apache.hive.service.cli.thrift.TTypeId.NULL_TYPE);
+    PRIMITIVE_TYPES.add(org.apache.hive.service.cli.thrift.TTypeId.DATE_TYPE);
   }
 
   public static final Set<TTypeId> COMPLEX_TYPES = new HashSet<TTypeId>();
@@ -82,6 +83,7 @@ public class TCLIServiceConstants {
     TYPE_NAMES.put(org.apache.hive.service.cli.thrift.TTypeId.UNION_TYPE, "UNIONTYPE");
     TYPE_NAMES.put(org.apache.hive.service.cli.thrift.TTypeId.DECIMAL_TYPE, "DECIMAL");
     TYPE_NAMES.put(org.apache.hive.service.cli.thrift.TTypeId.NULL_TYPE, "NULL");
+    TYPE_NAMES.put(org.apache.hive.service.cli.thrift.TTypeId.DATE_TYPE, "DATE");
   }
 
 }

Modified: hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeId.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeId.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeId.java (original)
+++ hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeId.java Tue Jul 30 22:22:35 2013
@@ -28,7 +28,8 @@ public enum TTypeId implements org.apach
   UNION_TYPE(13),
   USER_DEFINED_TYPE(14),
   DECIMAL_TYPE(15),
-  NULL_TYPE(16);
+  NULL_TYPE(16),
+  DATE_TYPE(17);
 
   private final int value;
 
@@ -83,6 +84,8 @@ public enum TTypeId implements org.apach
         return DECIMAL_TYPE;
       case 16:
         return NULL_TYPE;
+      case 17:
+        return DATE_TYPE;
       default:
         return null;
     }

Modified: hive/branches/vectorization/service/src/gen/thrift/gen-py/TCLIService/constants.py
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/gen/thrift/gen-py/TCLIService/constants.py?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/gen/thrift/gen-py/TCLIService/constants.py (original)
+++ hive/branches/vectorization/service/src/gen/thrift/gen-py/TCLIService/constants.py Tue Jul 30 22:22:35 2013
@@ -22,6 +22,7 @@ PRIMITIVE_TYPES = set([
     9,
     15,
     16,
+    17,
 ])
 COMPLEX_TYPES = set([
     10,
@@ -51,4 +52,5 @@ TYPE_NAMES = {
     13 : "UNIONTYPE",
     15 : "DECIMAL",
     16 : "NULL",
+    17 : "DATE",
 }

Modified: hive/branches/vectorization/service/src/gen/thrift/gen-py/TCLIService/ttypes.py
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/gen/thrift/gen-py/TCLIService/ttypes.py?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/gen/thrift/gen-py/TCLIService/ttypes.py (original)
+++ hive/branches/vectorization/service/src/gen/thrift/gen-py/TCLIService/ttypes.py Tue Jul 30 22:22:35 2013
@@ -45,6 +45,7 @@ class TTypeId:
   USER_DEFINED_TYPE = 14
   DECIMAL_TYPE = 15
   NULL_TYPE = 16
+  DATE_TYPE = 17
 
   _VALUES_TO_NAMES = {
     0: "BOOLEAN_TYPE",
@@ -64,6 +65,7 @@ class TTypeId:
     14: "USER_DEFINED_TYPE",
     15: "DECIMAL_TYPE",
     16: "NULL_TYPE",
+    17: "DATE_TYPE",
   }
 
   _NAMES_TO_VALUES = {
@@ -84,6 +86,7 @@ class TTypeId:
     "USER_DEFINED_TYPE": 14,
     "DECIMAL_TYPE": 15,
     "NULL_TYPE": 16,
+    "DATE_TYPE": 17,
   }
 
 class TStatusCode:

Modified: hive/branches/vectorization/service/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb (original)
+++ hive/branches/vectorization/service/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb Tue Jul 30 22:22:35 2013
@@ -20,6 +20,7 @@ PRIMITIVE_TYPES = Set.new([
     9,
     15,
     16,
+    17,
 ])
 
 COMPLEX_TYPES = Set.new([
@@ -52,5 +53,6 @@ TYPE_NAMES = {
     13 => %q"UNIONTYPE",
     15 => %q"DECIMAL",
     16 => %q"NULL",
+    17 => %q"DATE",
 }
 

Modified: hive/branches/vectorization/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb (original)
+++ hive/branches/vectorization/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb Tue Jul 30 22:22:35 2013
@@ -30,8 +30,9 @@ module TTypeId
   USER_DEFINED_TYPE = 14
   DECIMAL_TYPE = 15
   NULL_TYPE = 16
-  VALUE_MAP = {0 => "BOOLEAN_TYPE", 1 => "TINYINT_TYPE", 2 => "SMALLINT_TYPE", 3 => "INT_TYPE", 4 => "BIGINT_TYPE", 5 => "FLOAT_TYPE", 6 => "DOUBLE_TYPE", 7 => "STRING_TYPE", 8 => "TIMESTAMP_TYPE", 9 => "BINARY_TYPE", 10 => "ARRAY_TYPE", 11 => "MAP_TYPE", 12 => "STRUCT_TYPE", 13 => "UNION_TYPE", 14 => "USER_DEFINED_TYPE", 15 => "DECIMAL_TYPE", 16 => "NULL_TYPE"}
-  VALID_VALUES = Set.new([BOOLEAN_TYPE, TINYINT_TYPE, SMALLINT_TYPE, INT_TYPE, BIGINT_TYPE, FLOAT_TYPE, DOUBLE_TYPE, STRING_TYPE, TIMESTAMP_TYPE, BINARY_TYPE, ARRAY_TYPE, MAP_TYPE, STRUCT_TYPE, UNION_TYPE, USER_DEFINED_TYPE, DECIMAL_TYPE, NULL_TYPE]).freeze
+  DATE_TYPE = 17
+  VALUE_MAP = {0 => "BOOLEAN_TYPE", 1 => "TINYINT_TYPE", 2 => "SMALLINT_TYPE", 3 => "INT_TYPE", 4 => "BIGINT_TYPE", 5 => "FLOAT_TYPE", 6 => "DOUBLE_TYPE", 7 => "STRING_TYPE", 8 => "TIMESTAMP_TYPE", 9 => "BINARY_TYPE", 10 => "ARRAY_TYPE", 11 => "MAP_TYPE", 12 => "STRUCT_TYPE", 13 => "UNION_TYPE", 14 => "USER_DEFINED_TYPE", 15 => "DECIMAL_TYPE", 16 => "NULL_TYPE", 17 => "DATE_TYPE"}
+  VALID_VALUES = Set.new([BOOLEAN_TYPE, TINYINT_TYPE, SMALLINT_TYPE, INT_TYPE, BIGINT_TYPE, FLOAT_TYPE, DOUBLE_TYPE, STRING_TYPE, TIMESTAMP_TYPE, BINARY_TYPE, ARRAY_TYPE, MAP_TYPE, STRUCT_TYPE, UNION_TYPE, USER_DEFINED_TYPE, DECIMAL_TYPE, NULL_TYPE, DATE_TYPE]).freeze
 end
 
 module TStatusCode

Modified: hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/ColumnValue.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/ColumnValue.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/ColumnValue.java (original)
+++ hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/ColumnValue.java Tue Jul 30 22:22:35 2013
@@ -18,6 +18,7 @@
 
 package org.apache.hive.service.cli;
 
+import java.sql.Date;
 import java.sql.Timestamp;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
@@ -117,6 +118,14 @@ public class ColumnValue {
     return new ColumnValue(TColumnValue.stringVal(tStringValue));
   }
 
+  public static ColumnValue dateValue(Date value) {
+    TStringValue tStringValue = new TStringValue();
+    if (value != null) {
+      tStringValue.setValue(value.toString());
+    }
+    return new ColumnValue(TColumnValue.stringVal(tStringValue));
+  }
+
   public static ColumnValue timestampValue(Timestamp value) {
     TStringValue tStringValue = new TStringValue();
     if (value != null) {
@@ -151,6 +160,8 @@ public class ColumnValue {
       return doubleValue((Double)value);
     case STRING_TYPE:
       return stringValue((String)value);
+    case DATE_TYPE:
+      return dateValue((Date)value);
     case TIMESTAMP_TYPE:
       return timestampValue((Timestamp)value);
     case DECIMAL_TYPE:

Modified: hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/Type.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/Type.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/Type.java (original)
+++ hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/Type.java Tue Jul 30 22:22:35 2013
@@ -54,6 +54,9 @@ public enum Type {
   STRING_TYPE("STRING",
       java.sql.Types.VARCHAR,
       TTypeId.STRING_TYPE),
+  DATE_TYPE("DATE",
+      java.sql.Types.DATE,
+      TTypeId.DATE_TYPE),
   TIMESTAMP_TYPE("TIMESTAMP",
       java.sql.Types.TIMESTAMP,
       TTypeId.TIMESTAMP_TYPE),
@@ -213,6 +216,7 @@ public enum Type {
     switch (this) {
     case BOOLEAN_TYPE:
     case STRING_TYPE:
+    case DATE_TYPE:
     case TIMESTAMP_TYPE:
     case TINYINT_TYPE:
     case SMALLINT_TYPE:
@@ -247,6 +251,8 @@ public enum Type {
     case STRING_TYPE:
     case BINARY_TYPE:
       return Integer.MAX_VALUE;
+    case DATE_TYPE:
+      return 10;
     case TIMESTAMP_TYPE:
       return 30;
     default:

Modified: hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java (original)
+++ hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java Tue Jul 30 22:22:35 2013
@@ -106,6 +106,8 @@ public interface DelegationTokenStore ex
    * and a potential scalability improvement would be to partition by master key id
    * @return
    */
-  List<DelegationTokenIdentifier> getAllDelegationTokenIdentifiers();
+  List<DelegationTokenIdentifier> getAllDelegationTokenIdentifiers() throws TokenStoreException;
+
+  void setStore(Object hmsHandler) throws TokenStoreException;
 
 }

Modified: hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java (original)
+++ hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java Tue Jul 30 22:22:35 2013
@@ -333,7 +333,7 @@ import static org.apache.hadoop.fs.Commo
      }
 
      @Override
-     public void startDelegationTokenSecretManager(Configuration conf)
+     public void startDelegationTokenSecretManager(Configuration conf, Object hms)
      throws IOException{
        long secretKeyInterval =
          conf.getLong(DELEGATION_KEY_UPDATE_INTERVAL_KEY,
@@ -345,10 +345,12 @@ import static org.apache.hadoop.fs.Commo
            conf.getLong(DELEGATION_TOKEN_RENEW_INTERVAL_KEY,
                         DELEGATION_TOKEN_RENEW_INTERVAL_DEFAULT);
 
+       DelegationTokenStore dts = getTokenStore(conf);
+       dts.setStore(hms);
        secretManager = new TokenStoreDelegationTokenSecretManager(secretKeyInterval,
              tokenMaxLifetime,
              tokenRenewInterval,
-             DELEGATION_TOKEN_GC_INTERVAL, getTokenStore(conf));
+             DELEGATION_TOKEN_GC_INTERVAL, dts);
        secretManager.startThreads();
      }
 

Modified: hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java (original)
+++ hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java Tue Jul 30 22:22:35 2013
@@ -107,4 +107,9 @@ public class MemoryTokenStore implements
     //no-op
   }
 
+  @Override
+  public void setStore(Object hmsHandler) throws TokenStoreException {
+    // no-op
+  }
+
 }

Modified: hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java (original)
+++ hive/branches/vectorization/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java Tue Jul 30 22:22:35 2013
@@ -74,7 +74,7 @@ public class ZooKeeperTokenStore impleme
         }
       }
     }
-    
+
   }
 
   /**
@@ -107,7 +107,7 @@ public class ZooKeeperTokenStore impleme
 
   /**
    * Create a ZooKeeper session that is in connected state.
-   * 
+   *
    * @param connectString ZooKeeper connect String
    * @param sessionTimeout ZooKeeper session timeout
    * @param connectTimeout milliseconds to wait for connection, 0 or negative value means no wait
@@ -147,7 +147,7 @@ public class ZooKeeperTokenStore impleme
     }
     return zk;
   }
-  
+
   /**
    * Create a path if it does not already exist ("mkdir -p")
    * @param zk ZooKeeper session
@@ -460,4 +460,9 @@ public class ZooKeeperTokenStore impleme
     }
   }
 
+  @Override
+  public void setStore(Object hmsHandler) throws TokenStoreException {
+    // no-op.
+  }
+
 }

Modified: hive/branches/vectorization/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java (original)
+++ hive/branches/vectorization/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java Tue Jul 30 22:22:35 2013
@@ -26,6 +26,7 @@ import org.apache.thrift.TProcessor;
 import org.apache.thrift.transport.TTransport;
 import org.apache.thrift.transport.TTransportException;
 import org.apache.thrift.transport.TTransportFactory;
+
  /**
   * This class is only overridden by the secure hadoop shim. It allows
   * the Thrift SASL support to bridge to Hadoop's UserGroupInformation
@@ -74,8 +75,9 @@ import org.apache.thrift.transport.TTran
      public abstract TProcessor wrapProcessor(TProcessor processor);
      public abstract TProcessor wrapNonAssumingProcessor(TProcessor processor);
      public abstract InetAddress getRemoteAddress();
+     public abstract void startDelegationTokenSecretManager(Configuration conf,
+       Object hmsHandler) throws IOException;
      public abstract String getRemoteUser();
-     public abstract void startDelegationTokenSecretManager(Configuration conf) throws IOException;
      public abstract String getDelegationToken(String owner, String renewer) 
      throws IOException, InterruptedException;
      public abstract long renewDelegationToken(String tokenStrForm) throws IOException;

Modified: hive/branches/vectorization/testutils/ptest2/README.md
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/README.md?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/README.md (original)
+++ hive/branches/vectorization/testutils/ptest2/README.md Tue Jul 30 22:22:35 2013
@@ -25,10 +25,10 @@ On all slaves add the following to /etc/
     MaxSessions 100
     MaxStartups 100
 
-# Install git, java, ant, and maven
+# Install git, svn, make, patch, java, ant, and maven
 
-Recent version os java, ant and maven should be installed. Additionally environment variables
-such as MAVEN_OPTS and ANT_OPTS should be configured with large leap sizes:
+Recent version of git, svn, make, patch, java, ant and maven should be installed. Additionally
+environment variables such as MAVEN_OPTS and ANT_OPTS should be configured with large leap sizes:
 
     $ for item in java maven ant; do echo $item; cat /etc/profile.d/${item}.sh;done
     java
@@ -73,7 +73,8 @@ Ensure the user running the tests has st
 
    $ cat ~/.ssh/config
    StrictHostKeyChecking no
-
+   ConnectTimeout 20
+   ServerAliveInterval 1
 
 # Configure properties file
 
@@ -82,4 +83,4 @@ See conf/example-apache-trunk.properties
 # Execute
 
     mvn dependency:copy-dependencies
-    java -Xms4g -Xmx4g -cp "conf/:/home/hiveptest/hive-ptest/target/hive-test-1.0.jar:target/dependency/*" org.apache.hive.ptest.RunTests --properties apache-trunk.properties
+    java -Xms4g -Xmx4g -cp "target/hive-ptest-1.0-classes.jar:target/dependency/*" org.apache.hive.ptest.execution.PTest --properties apache-trunk.properties

Modified: hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/client/PTestClient.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/client/PTestClient.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/client/PTestClient.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/client/PTestClient.java Tue Jul 30 22:22:35 2013
@@ -80,6 +80,7 @@ public class PTestClient {
   private static final String JIRA = "jira";
   private static final String OUTPUT_DIR = "outputDir";
   private static final String TEST_HANDLE = "testHandle";
+  private static final String CLEAR_LIBRARY_CACHE = "clearLibraryCache";
   private final String mApiEndPoint;
   private final String mLogsEndpoint;
   private final ObjectMapper mMapper;
@@ -102,7 +103,7 @@ public class PTestClient {
         new UsernamePasswordCredentials("hive", password));
   }
   public boolean testStart(String profile, String testHandle,
-      String jira, String patch, String testOutputDir)
+      String jira, String patch, String testOutputDir, boolean clearLibraryCache)
   throws Exception {
     patch = Strings.nullToEmpty(patch).trim();
     if(!patch.isEmpty()) {
@@ -111,7 +112,7 @@ public class PTestClient {
         throw new IllegalArgumentException("Patch " + patch + " was zero bytes");
       }
     }
-    TestStartRequest startRequest = new TestStartRequest(profile, testHandle, jira, patch);
+    TestStartRequest startRequest = new TestStartRequest(profile, testHandle, jira, patch, clearLibraryCache);
     post(startRequest);
     boolean result = false;
     try {
@@ -256,7 +257,8 @@ public class PTestClient {
     options.addOption(null, JIRA, true, "JIRA to post the results to e.g.: HIVE-XXXX");
     options.addOption(null, TEST_HANDLE, true, "Server supplied test handle. (Required for testStop and testTailLog)");
     options.addOption(null, OUTPUT_DIR, true, "Directory to download and save test-results.tar.gz to. (Optional for testStart)");
-
+    options.addOption(null, CLEAR_LIBRARY_CACHE, false, "Before starting the test, delete the ivy and maven directories (Optional for testStart)");
+    
     CommandLine commandLine = parser.parse(options, args);
 
     if(commandLine.hasOption(HELP_SHORT)) {
@@ -278,7 +280,8 @@ public class PTestClient {
           TEST_HANDLE
         });
       result = client.testStart(commandLine.getOptionValue(PROFILE), commandLine.getOptionValue(TEST_HANDLE),
-          commandLine.getOptionValue(JIRA), commandLine.getOptionValue(PATCH), commandLine.getOptionValue(OUTPUT_DIR));
+          commandLine.getOptionValue(JIRA), commandLine.getOptionValue(PATCH), commandLine.getOptionValue(OUTPUT_DIR),
+          commandLine.hasOption(CLEAR_LIBRARY_CACHE));
     } else if("testTailLog".equalsIgnoreCase(command)) {
       result = client.testTailLog(commandLine.getOptionValue(TEST_HANDLE));
     } else if("testList".equalsIgnoreCase(command)) {

Modified: hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/request/TestStartRequest.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/request/TestStartRequest.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/request/TestStartRequest.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/request/TestStartRequest.java Tue Jul 30 22:22:35 2013
@@ -23,16 +23,18 @@ public class TestStartRequest {
   private String testHandle;
   private String patchURL;
   private String jiraName;
+  private boolean clearLibraryCache;
 
   public TestStartRequest() {
 
   }
   public TestStartRequest(String profile, String testHandle, 
-      String jiraName, String patchURL) {
+      String jiraName, String patchURL, boolean clearLibraryCache) {
     this.profile = profile;
     this.testHandle = testHandle;
     this.jiraName = jiraName;
     this.patchURL = patchURL;
+    this.clearLibraryCache = clearLibraryCache;
   }
   public String getProfile() {
     return profile;
@@ -45,8 +47,13 @@ public class TestStartRequest {
   }
   public void setPatchURL(String patchURL) {
     this.patchURL = patchURL;
+  }  
+  public boolean isClearLibraryCache() {
+    return clearLibraryCache;
+  }
+  public void setClearLibraryCache(boolean clearLibraryCache) {
+    this.clearLibraryCache = clearLibraryCache;
   }
-  
   public String getJiraName() {
     return jiraName;
   }
@@ -64,6 +71,6 @@ public class TestStartRequest {
   public String toString() {
     return "TestStartRequest [profile=" + profile + ", testHandle="
         + testHandle + ", patchURL=" + patchURL + ", jiraName=" + jiraName
-        + "]";
+        + ", clearLibraryCache=" + clearLibraryCache + "]";
   }
 }

Modified: hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/server/TestExecutor.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/server/TestExecutor.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/server/TestExecutor.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/server/TestExecutor.java Tue Jul 30 22:22:35 2013
@@ -110,6 +110,7 @@ public void run() {
             TestConfiguration testConfiguration = TestConfiguration.fromFile(profileConfFile, logger);
             testConfiguration.setPatch(startRequest.getPatchURL());
             testConfiguration.setJiraName(startRequest.getJiraName());
+            testConfiguration.setClearLibraryCache(startRequest.isClearLibraryCache());
             PTest ptest = mPTestBuilder.build(testConfiguration, mExecutionContext,
                 test.getStartRequest().getTestHandle(), logDir,
                 new LocalCommandFactory(logger), new SSHCommandExecutor(logger),

Modified: hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ExecutionPhase.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ExecutionPhase.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ExecutionPhase.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ExecutionPhase.java Tue Jul 30 22:22:35 2013
@@ -20,8 +20,8 @@ package org.apache.hive.ptest.execution;
 
 import java.io.File;
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.BlockingQueue;
@@ -41,31 +41,38 @@ import com.google.common.util.concurrent
 
 public class ExecutionPhase extends Phase {
 
+  private final File succeededLogDir;
   private final File failedLogDir;
   private final BlockingQueue<TestBatch> parallelWorkQueue;
   private final BlockingQueue<TestBatch> isolatedWorkQueue;
+  private final Set<String> executedTests;
   private final Set<String> failedTests;
   private final Supplier<List<TestBatch>> testBatchSupplier;
-  private final List<TestBatch> failedTestResults;
+  private final Set<TestBatch> failedTestResults;
 
   public ExecutionPhase(ImmutableList<HostExecutor> hostExecutors,
       LocalCommandFactory localCommandFactory,
       ImmutableMap<String, String> templateDefaults,
-      File failedLogDir, Supplier<List<TestBatch>> testBatchSupplier,
-      Set<String> failedTests, Logger logger) throws IOException {
+      File succeededLogDir, File failedLogDir, Supplier<List<TestBatch>> testBatchSupplier,
+      Set<String> executedTests, Set<String> failedTests, Logger logger)
+          throws IOException {
     super(hostExecutors, localCommandFactory, templateDefaults, logger);
+    this.succeededLogDir = succeededLogDir;
     this.failedLogDir = failedLogDir;
     this.testBatchSupplier = testBatchSupplier;
+    this.executedTests = executedTests;
     this.failedTests = failedTests;
     this.parallelWorkQueue = new LinkedBlockingQueue<TestBatch>();
     this.isolatedWorkQueue = new LinkedBlockingQueue<TestBatch>();
     this.failedTestResults = Collections.
-        synchronizedList(new ArrayList<TestBatch>());
+        synchronizedSet(new HashSet<TestBatch>());
   }
   @Override
 public void execute() throws Throwable {
     long start = System.currentTimeMillis();
+    List<TestBatch> testBatches = Lists.newArrayList();
     for(TestBatch batch : testBatchSupplier.get()) {
+      testBatches.add(batch);
       if(batch.isParallel()) {
         parallelWorkQueue.add(batch);
       } else {
@@ -74,16 +81,17 @@ public void execute() throws Throwable {
     }
     try {
       do {
-        double numberBadHosts = 0d;
+        float numberBadHosts = 0f;
         for(HostExecutor hostExecutor : hostExecutors) {
           if(hostExecutor.remainingDrones() == 0) {
             numberBadHosts++;
           }
         }
         Preconditions.checkState(hostExecutors.size() > 0, "Host executors cannot be empty");
-        if((numberBadHosts / (double)hostExecutors.size()) > 0.30d) {
-          throw new IllegalStateException("Too many bad hosts: " + (int)numberBadHosts + 
-              " bad hosts out of " + hostExecutors.size() + " is greater than threshold of 30%");
+        float percentBadHosts = numberBadHosts / (float)hostExecutors.size();
+        if(percentBadHosts > 0.50f) {
+          throw new IllegalStateException("Too many bad hosts: " + percentBadHosts + "% (" + (int)numberBadHosts + 
+              " / " + hostExecutors.size() + ") is greater than threshold of 50%");
         }
         List<ListenableFuture<Void>> results = Lists.newArrayList();
         for(HostExecutor hostExecutor : getHostExecutors()) {
@@ -93,14 +101,16 @@ public void execute() throws Throwable {
       } while(!(parallelWorkQueue.isEmpty() && isolatedWorkQueue.isEmpty()));
       Preconditions.checkState(parallelWorkQueue.isEmpty(), "Parallel work queue is not empty. All drones must have aborted.");
       Preconditions.checkState(isolatedWorkQueue.isEmpty(), "Isolated work queue is not empty. All drones must have aborted.");
-      if(!failedTestResults.isEmpty()) {
-        for(TestBatch failure : failedTestResults) {
-          File batchLogDir = new File(failedLogDir, failure.getName());
-          JUnitReportParser parser = new JUnitReportParser(logger, batchLogDir);
-          for(String failedTest : parser.getFailedTests()) {
-            failedTests.add(failedTest);
-          }
-        }
+      for(TestBatch batch : testBatches) {
+       File batchLogDir;
+       if(failedTestResults.contains(batch)) {
+         batchLogDir = new File(failedLogDir, batch.getName());
+       } else {
+         batchLogDir = new File(succeededLogDir, batch.getName());
+       }
+       JUnitReportParser parser = new JUnitReportParser(logger, batchLogDir);
+       executedTests.addAll(parser.getExecutedTests());
+       failedTests.addAll(parser.getFailedTests());
       }
     } finally {
       long elapsed = System.currentTimeMillis() - start;

Modified: hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java Tue Jul 30 22:22:35 2013
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.io.PrintWriter;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.Callable;
 import java.util.concurrent.CopyOnWriteArrayList;
@@ -91,7 +92,7 @@ class HostExecutor {
    * @return failed tests
    */
   ListenableFuture<Void> submitTests(final BlockingQueue<TestBatch> parallelWorkQueue,
-      final BlockingQueue<TestBatch> isolatedWorkQueue, final List<TestBatch> failedTestResults) {
+      final BlockingQueue<TestBatch> isolatedWorkQueue, final Set<TestBatch> failedTestResults) {
     return mExecutor.submit(new Callable<Void>() {
       @Override
       public Void call() throws Exception {
@@ -116,7 +117,7 @@ class HostExecutor {
    * are removed the host will be replaced before the next run.
    */
   private void executeTests(final BlockingQueue<TestBatch> parallelWorkQueue,
-      final BlockingQueue<TestBatch> isolatedWorkQueue, final List<TestBatch> failedTestResults)
+      final BlockingQueue<TestBatch> isolatedWorkQueue, final Set<TestBatch> failedTestResults)
           throws Exception {
     mLogger.info("Starting parallel execution on " + mHost.getName());
     List<ListenableFuture<Void>> droneResults = Lists.newArrayList();
@@ -129,7 +130,7 @@ class HostExecutor {
             do {
               batch = parallelWorkQueue.poll(mNumPollSeconds, TimeUnit.SECONDS);
               if(batch != null) {
-                if(!executeTestBatch(drone, batch)) {
+                if(!executeTestBatch(drone, batch, failedTestResults.size())) {
                   failedTestResults.add(batch);
                 }
               }
@@ -154,7 +155,7 @@ class HostExecutor {
         do {
           batch = isolatedWorkQueue.poll(mNumPollSeconds, TimeUnit.SECONDS);
           if(batch != null) {
-            if(!executeTestBatch(drone, batch)) {
+            if(!executeTestBatch(drone, batch, failedTestResults.size())) {
               failedTestResults.add(batch);
             }
           }
@@ -173,7 +174,7 @@ class HostExecutor {
    * Executes the test batch on the drone in question. If the command
    * exits with a status code of 255 throw an AbortDroneException.
    */
-  private boolean executeTestBatch(Drone drone, TestBatch batch)
+  private boolean executeTestBatch(Drone drone, TestBatch batch, int numOfFailedTests)
       throws IOException, SSHExecutionException, AbortDroneException {
     String scriptName = "hiveptest-" + batch.getName() + ".sh";
     File script = new File(mLocalScratchDirectory, scriptName);
@@ -183,6 +184,7 @@ class HostExecutor {
     templateVariables.put("testArguments", batch.getTestArguments());
     templateVariables.put("localDir", drone.getLocalDirectory());
     templateVariables.put("logDir", drone.getLocalLogDirectory());
+    templateVariables.put("numOfFailedTests", String.valueOf(numOfFailedTests));
     String command = Templates.getTemplateResult("bash $localDir/$instanceName/scratch/" + script.getName(),
         templateVariables);
     Templates.writeTemplateResult("batch-exec.vm", script, templateVariables);
@@ -347,4 +349,4 @@ class HostExecutor {
     }
     return result;
   }
-}
\ No newline at end of file
+}

Modified: hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/JIRAService.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/JIRAService.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/JIRAService.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/JIRAService.java Tue Jul 30 22:22:35 2013
@@ -21,8 +21,11 @@ package org.apache.hive.ptest.execution;
 import java.io.IOException;
 import java.net.URL;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 
+import org.apache.hive.ptest.api.server.TestLogger;
+import org.apache.hive.ptest.execution.conf.Context;
 import org.apache.hive.ptest.execution.conf.TestConfiguration;
 import org.apache.http.HttpException;
 import org.apache.http.HttpHost;
@@ -44,11 +47,14 @@ import org.apache.http.impl.client.Defau
 import org.apache.http.protocol.BasicHttpContext;
 import org.apache.http.protocol.ExecutionContext;
 import org.apache.http.protocol.HttpContext;
+import org.codehaus.jackson.map.ObjectMapper;
 import org.slf4j.Logger;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Joiner;
 import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 
 class JIRAService {
   private final Logger mLogger;
@@ -71,7 +77,7 @@ class JIRAService {
     mJenkinsURL = configuration.getJenkinsURL();
   }
 
-  void postComment(boolean error, Set<String> failedTests,
+  void postComment(boolean error, int numExecutesTests, Set<String> failedTests,
       List<String> messages) { 
     DefaultHttpClient httpClient = new DefaultHttpClient();    
     try {
@@ -79,7 +85,11 @@ class JIRAService {
       List<String> comments = Lists.newArrayList();
       comments.add("");
       comments.add("");
-      if (error || !failedTests.isEmpty()) {
+      if(!failedTests.isEmpty()) {
+        comments.add("{color:red}Overall{color}: -1 at least one tests failed");
+      } else if(numExecutesTests == 0) {
+        comments.add("{color:red}Overall{color}: -1 no tests executed");
+      } else if (error) {
         comments.add("{color:red}Overall{color}: -1 build exited with an error");
       } else {
         comments.add("{color:green}Overall{color}: +1 all checks pass");
@@ -90,26 +100,32 @@ class JIRAService {
         comments.add(mPatch);
       }
       comments.add("");
-      if (failedTests.isEmpty()) {
-        comments.add(formatSuccess("+1 all tests passed"));
-      } else {
-        comments.add(formatError("-1 due to " + failedTests.size()
-            + " failed/errored test(s)"));
-        comments.add("Failed tests:");
-        comments.addAll(failedTests);
+      if(numExecutesTests > 0) {
+        if (failedTests.isEmpty()) {
+          comments.add(formatSuccess("+1 "+ numExecutesTests + " tests passed"));
+        } else {
+          comments.add(formatError("-1 due to " + failedTests.size()
+              + " failed/errored test(s), " + numExecutesTests + " tests executed"));
+          comments.add("*Failed tests:*");
+          comments.add("{noformat}");
+          comments.addAll(failedTests);
+          comments.add("{noformat}");
+        }
+        comments.add("");        
       }
-      comments.add("");
       comments.add("Test results: " + mJenkinsURL + "/" + buildTag + "/testReport");
       comments.add("Console output: " + mJenkinsURL + "/" + buildTag + "/console");
       comments.add("");
-      comments.add("Messages:");
-      for (String message : messages) {
-        comments.add(message.replaceAll("\n", "\\n"));
+      if(!messages.isEmpty()) {
+        comments.add("Messages:");
+        comments.add("{noformat}");
+        comments.addAll(messages);
+        comments.add("{noformat}");
+        comments.add("");        
       }
-      comments.add("");
       comments.add("This message is automatically generated.");
-      mLogger.info("Comment: " + Joiner.on("\n").join(comments));
-      String body = Joiner.on("\\n").join(comments);
+      mLogger.info("Comment: " + Joiner.on("\n").join(comments));      
+      String body = Joiner.on("\n").join(comments);
       String url = String.format("%s/rest/api/2/issue/%s/comment", mUrl, mName);
       URL apiURL = new URL(mUrl);
       httpClient.getCredentialsProvider()
@@ -121,8 +137,8 @@ class JIRAService {
       localcontext.setAttribute("preemptive-auth", new BasicScheme());
       httpClient.addRequestInterceptor(new PreemptiveAuth(), 0);      
       HttpPost request = new HttpPost(url);
-      StringEntity params = new StringEntity(String.format(
-          "{\"body\": \"%s\"}", body));
+      ObjectMapper mapper = new ObjectMapper();
+      StringEntity params = new StringEntity(mapper.writeValueAsString(new Body(body)));
       request.addHeader("Content-Type", "application/json");
       request.setEntity(params);
       HttpResponse httpResponse = httpClient.execute(request, localcontext);
@@ -140,6 +156,23 @@ class JIRAService {
     }
   }
   
+  @SuppressWarnings("unused")  
+  private static class Body {
+    private String body;
+    public Body() {
+      
+    }
+    public Body(String body) {
+      this.body = body;
+    }
+    public String getBody() {
+      return body;
+    }
+    public void setBody(String body) {
+      this.body = body;
+    }    
+  }
+  
   /**
    * Hive-Build-123 to Hive-Build/123
    */
@@ -182,4 +215,22 @@ class JIRAService {
       }
     }
   }
-}
\ No newline at end of file
+  
+  public static void main(String[] args) throws Exception {
+    TestLogger logger = new TestLogger(System.err, TestLogger.LEVEL.TRACE);
+    Map<String, String> context = Maps.newHashMap();
+    context.put("jiraUrl", "https://issues.apache.org/jira");
+    context.put("jiraUser", "hiveqa");
+    context.put("jiraPassword", "password goes here");
+    context.put("branch", "trunk");
+    context.put("repository", "repo");
+    context.put("repositoryName", "repoName");
+    context.put("antArgs", "-Dsome=thing");
+    TestConfiguration configuration = new TestConfiguration(new Context(context), logger);
+    configuration.setJiraName("HIVE-4892");
+    JIRAService service = new JIRAService(logger, configuration, "test-123");
+    List<String> messages = Lists.newArrayList("msg1", "msg2");
+    Set<String> failedTests = Sets.newHashSet("failed");
+    service.postComment(false, 5, failedTests, messages);
+  }
+}

Modified: hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/JUnitReportParser.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/JUnitReportParser.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/JUnitReportParser.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/JUnitReportParser.java Tue Jul 30 22:22:35 2013
@@ -36,9 +36,15 @@ import com.google.common.collect.Sets;
 public class JUnitReportParser {
   private final File directory;
   private final Logger logger;
+  private final Set<String> executedTests;
+  private final Set<String> failedTests;
+  private boolean parsed;
   public JUnitReportParser(Logger logger, File directory) throws Exception {
     this.logger = logger;
     this.directory = directory;
+    executedTests = Sets.newHashSet();
+    failedTests =  Sets.newHashSet();
+    parsed = false;
   }
 
   private Set<File> getFiles(File directory) {
@@ -51,15 +57,26 @@ public class JUnitReportParser {
           if(name.startsWith("TEST-") && name.endsWith(".xml")) {
             result.add(file);
           }
-        } else if(file.isDirectory()) {
-          result.addAll(getFiles(file));
         }
       }
     }
     return result;
   }
+  public Set<String> getExecutedTests() {
+    if(!parsed) {
+      parse();
+      parsed = true;
+    }
+    return executedTests;
+  }
   public Set<String> getFailedTests() {
-    final Set<String> failedTests = Sets.newHashSet();
+    if(!parsed) {
+      parse();
+      parsed = true;
+    }
+    return failedTests;
+  }
+  private void parse() {
     for(File file : getFiles(directory)) {
       FileInputStream stream = null;
       try {
@@ -86,8 +103,11 @@ public class JUnitReportParser {
           @Override
         public void endElement(String uri, String localName, String qName)  {
             if ("testcase".equals(qName)) {
-              if(failedOrErrored && name != null) {
-                failedTests.add(name);
+              if(name != null) {
+                executedTests.add(name);
+                if(failedOrErrored) {
+                  failedTests.add(name);
+                }
               }
             }
           }
@@ -104,6 +124,5 @@ public class JUnitReportParser {
         }
       }
     }
-    return failedTests;
   }
-}
\ No newline at end of file
+}

Modified: hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java Tue Jul 30 22:22:35 2013
@@ -67,6 +67,7 @@ public class PTest {
 
   private final TestConfiguration mConfiguration;
   private final ListeningExecutorService mExecutor;
+  private final Set<String> mExecutedTests;
   private final Set<String> mFailedTests;
   private final List<Phase> mPhases;
   private final ExecutionContext mExecutionContext;
@@ -81,6 +82,7 @@ public class PTest {
     mConfiguration = configuration;
     mLogger = logger;
     mBuildTag = buildTag;
+    mExecutedTests = Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
     mFailedTests = Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
     mExecutionContext = executionContext;
     mExecutor = MoreExecutors.listeningDecorator(Executors.newCachedThreadPool());
@@ -99,6 +101,7 @@ public class PTest {
         put("repositoryName", configuration.getRepositoryName()).
         put("repositoryType", configuration.getRepositoryType()).
         put("branch", configuration.getBranch()).
+        put("clearLibraryCache", String.valueOf(configuration.isClearLibraryCache())).
         put("workingDir", mExecutionContext.getLocalWorkingDirectory()).
         put("antArgs", configuration.getAntArgs()).
         put("buildTag", buildTag).
@@ -120,7 +123,7 @@ public class PTest {
     mPhases.add(new CleanupPhase(mHostExecutors, localCommandFactory, templateDefaults, logger));
     mPhases.add(new PrepPhase(mHostExecutors, localCommandFactory, templateDefaults, scratchDir, patchFile, logger));
     mPhases.add(new ExecutionPhase(mHostExecutors, localCommandFactory, templateDefaults,
-        failedLogDir, testParser.parse(), mFailedTests, logger));
+        succeededLogDir, failedLogDir, testParser.parse(), mExecutedTests, mFailedTests, logger));
     mPhases.add(new ReportingPhase(mHostExecutors, localCommandFactory, templateDefaults, logger));
   }
   public int run() {
@@ -143,11 +146,6 @@ public class PTest {
           elapsedTimes.put(phase.getClass().getSimpleName(), elapsedTime);
         }
       }
-      for(HostExecutor hostExecutor : mHostExecutors) {
-        if(hostExecutor.remainingDrones() == 0) {
-          mExecutionContext.addBadHost(hostExecutor.getHost());
-        }
-      }
       if(!mFailedTests.isEmpty()) {
         throw new TestsFailedException(mFailedTests.size() + " tests failed");
       }
@@ -156,6 +154,11 @@ public class PTest {
       messages.add("Tests failed with: " + throwable.getClass().getSimpleName() + ": " + throwable.getMessage());
       error = true;
     } finally {
+      for(HostExecutor hostExecutor : mHostExecutors) {
+        if(hostExecutor.remainingDrones() == 0) {
+          mExecutionContext.addBadHost(hostExecutor.getHost());
+        }
+      }
       mExecutor.shutdownNow();
       if(mFailedTests.isEmpty()) {
         mLogger.info(String.format("%d failed tests", mFailedTests.size()));
@@ -165,6 +168,7 @@ public class PTest {
       for(String failingTestName : mFailedTests) {
         mLogger.warn(failingTestName);
       }
+      mLogger.info("Executed " + mExecutedTests.size() + " tests");
       for(Map.Entry<String, Long> entry : elapsedTimes.entrySet()) {
         mLogger.info(String.format("PERF: Phase %s took %d minutes", entry.getKey(), entry.getValue()));
       }
@@ -194,7 +198,7 @@ public class PTest {
       return;
     }
     JIRAService jira = new JIRAService(mLogger, mConfiguration, mBuildTag);
-    jira.postComment(error, mFailedTests, messages);
+    jira.postComment(error, mExecutedTests.size(), mFailedTests, messages);
   }
 
   public static class Builder {
@@ -302,4 +306,4 @@ public class PTest {
     }
     System.exit(exitCode);
   }
-}
\ No newline at end of file
+}

Modified: hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ReportingPhase.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ReportingPhase.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ReportingPhase.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ReportingPhase.java Tue Jul 30 22:22:35 2013
@@ -36,7 +36,7 @@ public class ReportingPhase extends Phas
   @Override
 public void execute() throws Exception {
     execLocally("mkdir $logDir/test-results");
-    execLocally("find $logDir/{failed,succeeded} -name 'TEST*.xml' -exec cp {} $logDir/test-results \\; 2>/dev/null");
+    execLocally("find $logDir/{failed,succeeded} -maxdepth 2 -name 'TEST*.xml' -exec cp {} $logDir/test-results \\; 2>/dev/null");
     execLocally("cd $logDir/ && tar -zvcf test-results.tar.gz test-results/");
   }
-}
\ No newline at end of file
+}

Modified: hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/QFileTestBatch.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/QFileTestBatch.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/QFileTestBatch.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/QFileTestBatch.java Tue Jul 30 22:22:35 2013
@@ -66,4 +66,49 @@ public class QFileTestBatch implements T
   public boolean isParallel() {
     return isParallel;
   }
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + ((driver == null) ? 0 : driver.hashCode());
+    result = prime * result + (isParallel ? 1231 : 1237);
+    result = prime * result + ((name == null) ? 0 : name.hashCode());
+    result = prime * result
+        + ((queryFilesProperty == null) ? 0 : queryFilesProperty.hashCode());
+    result = prime * result + ((tests == null) ? 0 : tests.hashCode());
+    return result;
+  }
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    QFileTestBatch other = (QFileTestBatch) obj;
+    if (driver == null) {
+      if (other.driver != null)
+        return false;
+    } else if (!driver.equals(other.driver))
+      return false;
+    if (isParallel != other.isParallel)
+      return false;
+    if (name == null) {
+      if (other.name != null)
+        return false;
+    } else if (!name.equals(other.name))
+      return false;
+    if (queryFilesProperty == null) {
+      if (other.queryFilesProperty != null)
+        return false;
+    } else if (!queryFilesProperty.equals(other.queryFilesProperty))
+      return false;
+    if (tests == null) {
+      if (other.tests != null)
+        return false;
+    } else if (!tests.equals(other.tests))
+      return false;
+    return true;
+  }  
 }

Modified: hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestConfiguration.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestConfiguration.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestConfiguration.java Tue Jul 30 22:22:35 2013
@@ -60,6 +60,7 @@ public class TestConfiguration {
   private final String jiraUser;
   private final String jiraPassword;
   private String jiraName;
+  private boolean clearLibraryCache;
 
   @VisibleForTesting
   public TestConfiguration(Context context, Logger logger)
@@ -98,7 +99,12 @@ public class TestConfiguration {
   public void setJiraName(String jiraName) {
     this.jiraName = Strings.nullToEmpty(jiraName);
   }
-
+  public boolean isClearLibraryCache() {
+    return clearLibraryCache; 
+   }
+  public void setClearLibraryCache(boolean clearLibraryCache) {
+    this.clearLibraryCache = clearLibraryCache;
+  }
   public String getJiraUrl() {
     return jiraUrl;
   }

Modified: hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/UnitTestBatch.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/UnitTestBatch.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/UnitTestBatch.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/UnitTestBatch.java Tue Jul 30 22:22:35 2013
@@ -44,4 +44,30 @@ public class UnitTestBatch implements Te
   public boolean isParallel() {
     return isParallel;
   }
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + (isParallel ? 1231 : 1237);
+    result = prime * result + ((testName == null) ? 0 : testName.hashCode());
+    return result;
+  }
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    UnitTestBatch other = (UnitTestBatch) obj;
+    if (isParallel != other.isParallel)
+      return false;
+    if (testName == null) {
+      if (other.testName != null)
+        return false;
+    } else if (!testName.equals(other.testName))
+      return false;
+    return true;
+  }
 }

Modified: hive/branches/vectorization/testutils/ptest2/src/main/resources/batch-exec.vm
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/resources/batch-exec.vm?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/resources/batch-exec.vm (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/resources/batch-exec.vm Tue Jul 30 22:22:35 2013
@@ -40,7 +40,12 @@ cd $localDir/$instanceName/${repositoryN
 ret=$?
 if [[ $ret -ne 0 ]]
 then
-  cp -R $localDir/$instanceName/${repositoryName}-source $logDir/source
+  if [[ $numOfFailedTests -lt 5 ]]
+  then
+    cp -R $localDir/$instanceName/${repositoryName}-source $logDir/source
+  else
+    echo "Number of failed tests $numOfFailedTests exceeded threshold, not copying source"
+  fi
 fi
 if [[ -f $localDir/$instanceName/${repositoryName}-source/build/ql/tmp/hive.log ]]
 then
@@ -57,4 +62,4 @@ then
 else
   echo "$logDir/.log does not exist"
 fi
-exit $ret
\ No newline at end of file
+exit $ret

Modified: hive/branches/vectorization/testutils/ptest2/src/main/resources/source-prep.vm
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/main/resources/source-prep.vm?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/main/resources/source-prep.vm (original)
+++ hive/branches/vectorization/testutils/ptest2/src/main/resources/source-prep.vm Tue Jul 30 22:22:35 2013
@@ -42,7 +42,7 @@ cd $workingDir/
     fi
     cd ${repositoryName}-source
     svn revert -R .
-    rm -rf $(svn status --no-ignore)
+    rm -rf $(svn status --no-ignore | egrep -v '^X|^Performing status on external' | awk '{print $2}')
     svn update
   elif [[ "${repositoryType}" = "git" ]]
   then
@@ -77,7 +77,12 @@ cd $workingDir/
     chmod +x $patchCommandPath
     $patchCommandPath $patchFilePath
   fi
+  if [[ "$clearLibraryCache" == "true" ]]
+  then
+  	rm -rf $workingDir/ivy $workingDir/maven
+  	mkdir $workingDir/ivy $workingDir/maven
+  fi
   ant $antArgs -Divy.default.ivy.user.dir=$workingDir/ivy -Dmvn.local.repo=$workingDir/maven clean package
   ant $antArgs -Divy.default.ivy.user.dir=$workingDir/ivy -Dmvn.local.repo=$workingDir/maven -Dtestcase=nothing test
 ) 2>&1 | tee $logDir/source-prep.txt
-exit ${PIPESTATUS[0]}
\ No newline at end of file
+exit ${PIPESTATUS[0]}

Modified: hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestExecutionPhase.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestExecutionPhase.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestExecutionPhase.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestExecutionPhase.java Tue Jul 30 22:22:35 2013
@@ -48,6 +48,7 @@ public class TestExecutionPhase extends 
   private static final String QFILENAME = "sometest";
   private ExecutionPhase phase;
   private File testDir;
+  private Set<String> executedTests;
   private Set<String> failedTests;
   private List<TestBatch> testBatches;
   private TestBatch testBatch;
@@ -55,13 +56,14 @@ public class TestExecutionPhase extends 
   @Before
   public void setup() throws Exception {
     initialize(getClass().getSimpleName());
+    executedTests = Sets.newHashSet();
     failedTests = Sets.newHashSet();
   }
   private ExecutionPhase getPhase() throws IOException {
     createHostExecutor();
     phase = new ExecutionPhase(hostExecutors, localCommandFactory, templateDefaults,
-        failedLogDir, Suppliers.ofInstance(testBatches),
-        failedTests, logger);
+        succeededLogDir, failedLogDir, Suppliers.ofInstance(testBatches),
+        executedTests, failedTests, logger);
     return phase;
   }
   private void setupQFile(boolean isParallel) throws Exception {
@@ -74,6 +76,12 @@ public class TestExecutionPhase extends 
     testBatch = new UnitTestBatch(DRIVER, false);
     testBatches = Collections.singletonList(testBatch);
   }
+  private void copyTestOutput(String resource, File directory, String name) throws Exception {
+    String junitOutput = Templates.readResource(resource);
+    File junitOutputFile = new File(Dirs.create(
+        new File(directory, name)), "TEST-SomeTest.xml");
+    Files.write(junitOutput.getBytes(Charsets.UTF_8), junitOutputFile);
+  }
   @After
   public void teardown() {
     FileUtils.deleteQuietly(baseDir);
@@ -81,8 +89,10 @@ public class TestExecutionPhase extends 
   @Test
   public void testPassingQFileTest() throws Throwable {
     setupQFile(true);
+    copyTestOutput("SomeTest-success.xml", succeededLogDir, testBatch.getName());
     getPhase().execute();
     Approvals.verify(getExecutedCommands());
+    Assert.assertEquals(Sets.newHashSet("SomeTest." + QFILENAME), executedTests);
     Assert.assertEquals(Sets.newHashSet(), failedTests);
   }
   @Test
@@ -90,19 +100,19 @@ public class TestExecutionPhase extends 
     setupQFile(true);
     sshCommandExecutor.putFailure("bash " + LOCAL_DIR + "/" + HOST + "-" + USER +
         "-0/scratch/hiveptest-" + DRIVER + "-" + QFILENAME + ".sh", 1);
-    String junitOutput = Templates.readResource("TEST-SomeTest-failure.xml");
-    File driverFailureDir = Dirs.create(new File(failedLogDir, testBatch.getName()));
-    File junitOutputFile = new File(driverFailureDir, "TEST-SomeTest-failure.xml");
-    Files.write(junitOutput.getBytes(Charsets.UTF_8), junitOutputFile);
+    copyTestOutput("SomeTest-failure.xml", failedLogDir, testBatch.getName());
     getPhase().execute();
     Approvals.verify(getExecutedCommands());
+    Assert.assertEquals(Sets.newHashSet("SomeTest." + QFILENAME), executedTests);
     Assert.assertEquals(Sets.newHashSet("SomeTest." + QFILENAME), failedTests);
   }
   @Test
   public void testPassingUnitTest() throws Throwable {
     setupUnitTest();
+    copyTestOutput("SomeTest-success.xml", succeededLogDir, testBatch.getName());
     getPhase().execute();
     Approvals.verify(getExecutedCommands());
+    Assert.assertEquals(Sets.newHashSet("SomeTest." + QFILENAME), executedTests);
     Assert.assertEquals(Sets.newHashSet(), failedTests);
   }
   @Test
@@ -110,12 +120,10 @@ public class TestExecutionPhase extends 
     setupUnitTest();
     sshCommandExecutor.putFailure("bash " + LOCAL_DIR + "/" + HOST + "-" + USER +
         "-0/scratch/hiveptest-" + DRIVER + ".sh", 1);
-    String junitOutput = Templates.readResource("TEST-SomeTest-failure.xml");
-    File driverFailureDir = Dirs.create(new File(failedLogDir, testBatch.getName()));
-    File junitOutputFile = new File(driverFailureDir, "TEST-SomeTest-failure.xml");
-    Files.write(junitOutput.getBytes(Charsets.UTF_8), junitOutputFile);
+    copyTestOutput("SomeTest-failure.xml", failedLogDir, testBatch.getName());
     getPhase().execute();
     Approvals.verify(getExecutedCommands());
+    Assert.assertEquals(Sets.newHashSet("SomeTest." + QFILENAME), executedTests);
     Assert.assertEquals(Sets.newHashSet("SomeTest." + QFILENAME), failedTests);
   }
-}
\ No newline at end of file
+}

Modified: hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.java Tue Jul 30 22:22:35 2013
@@ -24,6 +24,7 @@ import static org.mockito.Mockito.spy;
 import java.io.File;
 import java.util.Collections;
 import java.util.List;
+import java.util.Set;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.Executors;
 import java.util.concurrent.LinkedBlockingQueue;
@@ -46,6 +47,7 @@ import org.slf4j.LoggerFactory;
 import com.google.common.base.Joiner;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
 import com.google.common.util.concurrent.ListeningExecutorService;
 import com.google.common.util.concurrent.MoreExecutors;
 
@@ -86,7 +88,7 @@ public class TestHostExecutor {
   private Logger logger;
   private BlockingQueue<TestBatch> parallelWorkQueue;
   private BlockingQueue<TestBatch> isolatedWorkQueue;
-  private List<TestBatch> failedTestResults;
+  private Set<TestBatch> failedTestResults;
   private TestBatch testBatchParallel1;
   private TestBatch testBatchParallel2;
   private TestBatch testBatchIsolated1;
@@ -101,7 +103,7 @@ public class TestHostExecutor {
     failedLogDir = Dirs.create(new File(logDir, "failed"));
     parallelWorkQueue = new LinkedBlockingQueue<TestBatch>();
     isolatedWorkQueue = new LinkedBlockingQueue<TestBatch>();
-    failedTestResults = Lists.newArrayList();
+    failedTestResults = Sets.newHashSet();
     testBatchParallel1 = new UnitTestBatch(DRIVER_PARALLEL_1, true);
     testBatchParallel2 = new UnitTestBatch(DRIVER_PARALLEL_2, true);
     testBatchIsolated1 = new UnitTestBatch(DRIVER_ISOLATED_1, false);
@@ -151,7 +153,7 @@ public class TestHostExecutor {
     parallelWorkQueue.addAll(Lists.newArrayList(testBatchParallel1, testBatchParallel2));
     parallelWorkQueue.addAll(Lists.newArrayList(testBatchIsolated1, testBatchIsolated2));
     executor.submitTests(parallelWorkQueue, isolatedWorkQueue, failedTestResults).get();
-    Assert.assertEquals(Collections.emptyList(),  failedTestResults);
+    Assert.assertEquals(Collections.emptySet(),  failedTestResults);
     Approvals.verify(getExecutedCommands());
   }
   @Test
@@ -162,7 +164,7 @@ public class TestHostExecutor {
     HostExecutor executor = createHostExecutor();
     parallelWorkQueue.addAll(Lists.newArrayList(testBatchParallel1));
     executor.submitTests(parallelWorkQueue, isolatedWorkQueue, failedTestResults).get();
-    Assert.assertEquals(Collections.emptyList(),  failedTestResults);
+    Assert.assertEquals(Collections.emptySet(),  failedTestResults);
     Assert.assertTrue(parallelWorkQueue.toString(), parallelWorkQueue.isEmpty());
     Approvals.verify(getExecutedCommands());
   }
@@ -174,7 +176,7 @@ public class TestHostExecutor {
     HostExecutor executor = createHostExecutor();
     isolatedWorkQueue.addAll(Lists.newArrayList(testBatchIsolated1));
     executor.submitTests(parallelWorkQueue, isolatedWorkQueue, failedTestResults).get();
-    Assert.assertEquals(Collections.emptyList(),  failedTestResults);
+    Assert.assertEquals(Collections.emptySet(),  failedTestResults);
     Assert.assertTrue(isolatedWorkQueue.toString(), parallelWorkQueue.isEmpty());
     Approvals.verify(getExecutedCommands());
   }
@@ -186,7 +188,7 @@ public class TestHostExecutor {
     HostExecutor executor = createHostExecutor();
     parallelWorkQueue.addAll(Lists.newArrayList(testBatchParallel1));
     executor.submitTests(parallelWorkQueue, isolatedWorkQueue, failedTestResults).get();
-    Assert.assertEquals(Collections.emptyList(),  failedTestResults);
+    Assert.assertEquals(Collections.emptySet(),  failedTestResults);
     Assert.assertTrue(parallelWorkQueue.toString(), parallelWorkQueue.isEmpty());
     Approvals.verify(getExecutedCommands());
   }
@@ -198,7 +200,7 @@ public class TestHostExecutor {
     HostExecutor executor = createHostExecutor();
     isolatedWorkQueue.addAll(Lists.newArrayList(testBatchIsolated1));
     executor.submitTests(parallelWorkQueue, isolatedWorkQueue, failedTestResults).get();
-    Assert.assertEquals(Collections.emptyList(),  failedTestResults);
+    Assert.assertEquals(Collections.emptySet(),  failedTestResults);
     Assert.assertTrue(isolatedWorkQueue.toString(), parallelWorkQueue.isEmpty());
     Approvals.verify(getExecutedCommands());
   }
@@ -210,7 +212,7 @@ public class TestHostExecutor {
     HostExecutor executor = createHostExecutor();
     isolatedWorkQueue.addAll(Lists.newArrayList(testBatchIsolated1));
     executor.submitTests(parallelWorkQueue, isolatedWorkQueue, failedTestResults).get();
-    Assert.assertEquals(Collections.emptyList(),  failedTestResults);
+    Assert.assertEquals(Collections.emptySet(),  failedTestResults);
     Assert.assertTrue(isolatedWorkQueue.toString(), parallelWorkQueue.isEmpty());
     Approvals.verify(getExecutedCommands());
   }

Modified: hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestReportParser.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestReportParser.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestReportParser.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestReportParser.java Tue Jul 30 22:22:35 2013
@@ -22,19 +22,41 @@ import java.io.File;
 
 import junit.framework.Assert;
 
+import org.apache.commons.io.FileUtils;
+import org.junit.After;
+import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Sets;
+import com.google.common.io.Files;
 
 public class TestReportParser {
   private static final Logger LOG = LoggerFactory
       .getLogger(TestReportParser.class);
+  private File baseDir;
+  @Before
+  public void setup() {
+    baseDir = Files.createTempDir();
+  }
+  @After
+  public void teardown() {
+    if(baseDir != null) {
+      FileUtils.deleteQuietly(baseDir);
+    }
+  }
   @Test
   public void test() throws Exception {
     File reportDir = new File("src/test/resources/test-outputs");
-    JUnitReportParser parser = new JUnitReportParser(LOG, reportDir);
+    for(File file : reportDir.listFiles()) {
+      if(file.getName().endsWith(".xml")) {
+        Files.copy(file, new File(baseDir, "TEST-" + file.getName()));
+      } else {
+        Files.copy(file, new File(baseDir, file.getName()));
+      }
+    }
+    JUnitReportParser parser = new JUnitReportParser(LOG, baseDir);
     Assert.assertEquals(3, parser.getFailedTests().size());
     Assert.assertEquals(Sets.
         newHashSet("org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_skewjoin_union_remove_1",

Modified: hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestReportingPhase.testExecute.approved.txt
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestReportingPhase.testExecute.approved.txt?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestReportingPhase.testExecute.approved.txt (original)
+++ hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestReportingPhase.testExecute.approved.txt Tue Jul 30 22:22:35 2013
@@ -1,3 +1,3 @@
 cd /tmp/hive-ptest-units/TestReportingPhase/logs/ && tar -zvcf test-results.tar.gz test-results/
-find /tmp/hive-ptest-units/TestReportingPhase/logs/{failed,succeeded} -name 'TEST*.xml' -exec cp {} /tmp/hive-ptest-units/TestReportingPhase/logs/test-results \; 2>/dev/null
-mkdir /tmp/hive-ptest-units/TestReportingPhase/logs/test-results
\ No newline at end of file
+find /tmp/hive-ptest-units/TestReportingPhase/logs/{failed,succeeded} -maxdepth 2 -name 'TEST*.xml' -exec cp {} /tmp/hive-ptest-units/TestReportingPhase/logs/test-results \; 2>/dev/null
+mkdir /tmp/hive-ptest-units/TestReportingPhase/logs/test-results

Modified: hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.java (original)
+++ hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.java Tue Jul 30 22:22:35 2013
@@ -67,7 +67,9 @@ public class TestScripts  {
     templateVariables.put("logDir", "/some/log/dir");
     templateVariables.put("instanceName", "instance-1");
     templateVariables.put("batchName","batch-1");
+    templateVariables.put("numOfFailedTests", "20");
     templateVariables.put("testArguments", "-Dtest=arg1");
+    templateVariables.put("clearLibraryCache", "true");
     templateVariables.put("javaHome", "/usr/java/jdk1.7");
     templateVariables.put("antEnvOpts", "-Dhttp.proxyHost=somehost -Dhttp.proxyPort=3128");
     String template = readResource("batch-exec.vm");
@@ -86,6 +88,7 @@ public class TestScripts  {
     templateVariables.put("buildTag", "build-1");
     templateVariables.put("logDir", "/some/log/dir");
     templateVariables.put("testArguments", "-Dtest=arg1");
+    templateVariables.put("clearLibraryCache", "false");
     templateVariables.put("javaHome", "/usr/java/jdk1.7");
     templateVariables.put("antEnvOpts", "-Dhttp.proxyHost=somehost -Dhttp.proxyPort=3128");
     String template = readResource("source-prep.vm");
@@ -104,6 +107,7 @@ public class TestScripts  {
     templateVariables.put("buildTag", "build-1");
     templateVariables.put("logDir", "/some/log/dir");
     templateVariables.put("testArguments", "-Dtest=arg1");
+    templateVariables.put("clearLibraryCache", "true");
     templateVariables.put("javaHome", "/usr/java/jdk1.7");
     templateVariables.put("antEnvOpts", "-Dhttp.proxyHost=somehost -Dhttp.proxyPort=3128");
     templateVariables.put("repositoryType", "git");
@@ -123,6 +127,7 @@ public class TestScripts  {
     templateVariables.put("buildTag", "build-1");
     templateVariables.put("logDir", "/some/log/dir");
     templateVariables.put("testArguments", "-Dtest=arg1");
+    templateVariables.put("clearLibraryCache", "true");
     templateVariables.put("javaHome", "/usr/java/jdk1.7");
     templateVariables.put("antEnvOpts", "-Dhttp.proxyHost=somehost -Dhttp.proxyPort=3128");
     templateVariables.put("repositoryType", "svn");
@@ -147,4 +152,4 @@ public class TestScripts  {
     writer.close();
     return writer.toString();
   }
-}
\ No newline at end of file
+}

Modified: hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testBatch.approved.txt
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testBatch.approved.txt?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testBatch.approved.txt (original)
+++ hive/branches/vectorization/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testBatch.approved.txt Tue Jul 30 22:22:35 2013
@@ -39,7 +39,12 @@ cd /some/local/dir/instance-1/apache-sou
 ret=$?
 if [[ $ret -ne 0 ]]
 then
-  cp -R /some/local/dir/instance-1/apache-source /some/log/dir/source
+  if [[ 20 -lt 5 ]]
+  then
+    cp -R /some/local/dir/instance-1/apache-source /some/log/dir/source
+  else
+    echo "Number of failed tests 20 exceeded threshold, not copying source"
+  fi
 fi
 if [[ -f /some/local/dir/instance-1/apache-source/build/ql/tmp/hive.log ]]
 then
@@ -56,4 +61,4 @@ then
 else
   echo "/some/log/dir/.log does not exist"
 fi
-exit $ret
\ No newline at end of file
+exit $ret