You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2013/10/22 19:59:08 UTC

svn commit: r1534711 [14/15] - in /hive/branches/maven: ./ ant/src/org/apache/hadoop/hive/ant/ beeline/src/java/org/apache/hive/beeline/ bin/ bin/ext/ cli/src/java/org/apache/hadoop/hive/cli/ common/ common/src/java/org/apache/hadoop/hive/common/type/ ...

Modified: hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java (original)
+++ hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java Tue Oct 22 17:58:59 2013
@@ -315,7 +315,7 @@ public class TestTimestampWritable exten
         // decimalToTimestamp should be consistent with doubleToTimestamp for this level of
         // precision.
         assertEquals(ts, TimestampWritable.decimalToTimestamp(
-            new HiveDecimal(BigDecimal.valueOf(asDouble))));
+            HiveDecimal.create(BigDecimal.valueOf(asDouble))));
       }
     }
   }
@@ -323,7 +323,7 @@ public class TestTimestampWritable exten
   private static HiveDecimal timestampToDecimal(Timestamp ts) {
     BigDecimal d = new BigDecimal(getSeconds(ts));
     d = d.add(new BigDecimal(ts.getNanos()).divide(new BigDecimal(BILLION)));
-    return new HiveDecimal(d);
+    return HiveDecimal.create(d);
   }
 
   public void testDecimalToTimestampRandomly() {

Modified: hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java (original)
+++ hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java Tue Oct 22 17:58:59 2013
@@ -54,6 +54,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaBinaryObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.BytesWritable;
 
 /**
@@ -603,7 +604,7 @@ public class TestLazyBinarySerDe extends
     inpBARef.setData(inpBArray);
 
     AbstractPrimitiveLazyObjectInspector<?> binInspector = LazyPrimitiveObjectInspectorFactory
-    .getLazyObjectInspector(PrimitiveCategory.BINARY, false, (byte)0);
+    .getLazyObjectInspector(TypeInfoFactory.binaryTypeInfo, false, (byte)0);
 
     //create LazyBinary initialed with inputBA
     LazyBinary lazyBin = (LazyBinary) LazyFactory.createLazyObject(binInspector);

Modified: hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java (original)
+++ hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java Tue Oct 22 17:58:59 2013
@@ -27,7 +27,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -149,7 +149,7 @@ public class TestObjectInspectorConverte
           PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector,
           PrimitiveObjectInspectorFactory.writableStringObjectInspector);
       assertEquals("TextConverter", new Text("100.001"), textConverter
-	  .convert(new HiveDecimal("100.001")));
+	  .convert(HiveDecimal.create("100.001")));
       assertEquals("TextConverter", null, textConverter.convert(null));
 
       // Binary
@@ -189,7 +189,7 @@ public class TestObjectInspectorConverte
     // output OI should have varchar type params
     PrimitiveObjectInspector poi = (PrimitiveObjectInspector)
         ObjectInspectorConverters.getConvertedOI(varchar10OI, varchar5OI);
-    VarcharTypeParams vcParams = (VarcharTypeParams) poi.getTypeParams();
-    assertEquals("varchar length doesn't match", 5, vcParams.length);
+    VarcharTypeInfo vcParams = (VarcharTypeInfo) poi.getTypeInfo();
+    assertEquals("varchar length doesn't match", 5, vcParams.getLength());
   }
 }

Modified: hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java (original)
+++ hive/branches/maven/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java Tue Oct 22 17:58:59 2013
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java Tue Oct 22 17:58:59 2013
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hive.service.auth;
 
 import java.net.Socket;

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/auth/TUGIContainingProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/auth/TUGIContainingProcessor.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/auth/TUGIContainingProcessor.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/auth/TUGIContainingProcessor.java Tue Oct 22 17:58:59 2013
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hive.service.auth;
 
 import java.io.IOException;

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java Tue Oct 22 17:58:59 2013
@@ -20,10 +20,7 @@ package org.apache.hive.service.cli;
 
 import java.util.List;
 
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hive.service.cli.thrift.TPrimitiveTypeEntry;
 import org.apache.hive.service.cli.thrift.TTypeDesc;
@@ -57,11 +54,8 @@ public class TypeDescriptor {
     if (this.type.isComplexType()) {
       this.typeName = typeName;
     } else if (this.type.isQualifiedType()) {
-      TypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(typeName);
-      BaseTypeParams typeParams = ((PrimitiveTypeInfo) pti).getTypeParams();
-      if (typeParams != null) {
-        setTypeQualifiers(TypeQualifiers.fromBaseTypeParams(typeParams));
-      }
+      PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(typeName);
+      setTypeQualifiers(TypeQualifiers.fromTypeInfo(pti));
     }
   }
 

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/cli/TypeQualifiers.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/cli/TypeQualifiers.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/cli/TypeQualifiers.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/cli/TypeQualifiers.java Tue Oct 22 17:58:59 2013
@@ -21,7 +21,8 @@ package org.apache.hive.service.cli;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hive.service.cli.thrift.TCLIServiceConstants;
 import org.apache.hive.service.cli.thrift.TTypeQualifierValue;
 import org.apache.hive.service.cli.thrift.TTypeQualifiers;
@@ -73,14 +74,14 @@ public class TypeQualifiers {
     return ret;
   }
 
-  public static TypeQualifiers fromBaseTypeParams(BaseTypeParams typeParams) {
-    TypeQualifiers ret = null;
-    if (typeParams != null) {
-      ret = new TypeQualifiers();
-      if (typeParams.hasCharacterMaximumLength()) {
-        ret.setCharacterMaximumLength(typeParams.getCharacterMaximumLength());
-      }
+  public static TypeQualifiers fromTypeInfo(PrimitiveTypeInfo pti) {
+    if (pti instanceof VarcharTypeInfo) {
+      TypeQualifiers ret = new TypeQualifiers();
+      ret.setCharacterMaximumLength(((VarcharTypeInfo)pti).getLength());
+      return ret;
+    } else {
+      return null;
     }
-    return ret;
   }
+
 }

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/AddResourceOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/AddResourceOperation.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/AddResourceOperation.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/AddResourceOperation.java Tue Oct 22 17:58:59 2013
@@ -16,23 +16,3 @@
  * limitations under the License.
  */
 
-package org.apache.hive.service.cli.operation;
-
-import java.util.Map;
-
-import org.apache.hadoop.hive.ql.processors.AddResourceProcessor;
-import org.apache.hive.service.cli.session.HiveSession;
-
-/**
- * HiveAddResourceOperation.
- *
- */
-public class AddResourceOperation extends HiveCommandOperation {
-
-  protected AddResourceOperation(HiveSession parentSession, String statement,
-      Map<String, String> confOverlay) {
-    super(parentSession, statement, confOverlay);
-    setCommandProcessor(new AddResourceProcessor());
-  }
-
-}

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/DeleteResourceOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/DeleteResourceOperation.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/DeleteResourceOperation.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/DeleteResourceOperation.java Tue Oct 22 17:58:59 2013
@@ -16,23 +16,3 @@
  * limitations under the License.
  */
 
-package org.apache.hive.service.cli.operation;
-
-import java.util.Map;
-
-import org.apache.hadoop.hive.ql.processors.DeleteResourceProcessor;
-import org.apache.hive.service.cli.session.HiveSession;
-
-/**
- * HiveDeleteResourceOperation.
- *
- */
-public class DeleteResourceOperation extends HiveCommandOperation {
-
-  protected DeleteResourceOperation(HiveSession parentSession, String statement,
-      Map<String, String> confOverlay) {
-    super(parentSession, statement, confOverlay);
-    setCommandProcessor(new DeleteResourceProcessor());
-  }
-
-}

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/DfsOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/DfsOperation.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/DfsOperation.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/DfsOperation.java Tue Oct 22 17:58:59 2013
@@ -16,23 +16,3 @@
  * limitations under the License.
  */
 
-package org.apache.hive.service.cli.operation;
-
-import java.util.Map;
-
-import org.apache.hadoop.hive.ql.processors.DfsProcessor;
-import org.apache.hive.service.cli.session.HiveSession;
-
-/**
- * HiveDfsCommandOperation.
- *
- */
-public class DfsOperation extends HiveCommandOperation {
-
-  protected DfsOperation(HiveSession parentSession, String statement,
-      Map<String, String> confOverlay) {
-    super(parentSession, statement, confOverlay);
-    setCommandProcessor(new DfsProcessor(parentSession.getHiveConf()));
-  }
-
-}

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java Tue Oct 22 17:58:59 2013
@@ -20,8 +20,16 @@ package org.apache.hive.service.cli.oper
 
 
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
+import java.util.Set;
+import java.sql.SQLException;
 
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory;
+import org.apache.hadoop.hive.ql.processors.CommandProcessor;
+import org.apache.hadoop.hive.ql.processors.HiveCommand;
+import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.OperationType;
 import org.apache.hive.service.cli.session.HiveSession;
 
@@ -40,20 +48,19 @@ public abstract class ExecuteStatementOp
   }
 
   public static ExecuteStatementOperation newExecuteStatementOperation(
-      HiveSession parentSession, String statement, Map<String, String> confOverlay, boolean runAsync) {
+      HiveSession parentSession, String statement, Map<String, String> confOverlay, boolean runAsync)
+      throws HiveSQLException {
     String[] tokens = statement.trim().split("\\s+");
     String command = tokens[0].toLowerCase();
-
-    if ("set".equals(command)) {
-      return new SetOperation(parentSession, statement, confOverlay);
-    } else if ("dfs".equals(command)) {
-      return new DfsOperation(parentSession, statement, confOverlay);
-    } else if ("add".equals(command)) {
-      return new AddResourceOperation(parentSession, statement, confOverlay);
-    } else if ("delete".equals(command)) {
-      return new DeleteResourceOperation(parentSession, statement, confOverlay);
-    } else {
+    CommandProcessor processor = null;
+    try {
+      processor = CommandProcessorFactory.getForHiveCommand(tokens[0], parentSession.getHiveConf());
+    } catch (SQLException e) {
+      throw new HiveSQLException(e.getMessage(), e.getSQLState(), e);
+    }
+    if (processor == null) {
       return new SQLOperation(parentSession, statement, confOverlay, runAsync);
     }
+    return new HiveCommandOperation(parentSession, statement, processor, confOverlay);
   }
 }

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java Tue Oct 22 17:58:59 2013
@@ -43,10 +43,9 @@ import org.apache.hive.service.cli.Table
 import org.apache.hive.service.cli.session.HiveSession;
 
 /**
- * HiveCommandOperation.
- *
+ * Executes a HiveCommand
  */
-public abstract class HiveCommandOperation extends ExecuteStatementOperation {
+public class HiveCommandOperation extends ExecuteStatementOperation {
   private CommandProcessorResponse response;
   private CommandProcessor commandProcessor;
   private TableSchema resultSchema = null;
@@ -58,8 +57,10 @@ public abstract class HiveCommandOperati
   private BufferedReader resultReader;
 
 
-  protected HiveCommandOperation(HiveSession parentSession, String statement, Map<String, String> confOverlay) {
+  protected HiveCommandOperation(HiveSession parentSession, String statement,
+      CommandProcessor commandProcessor, Map<String, String> confOverlay) {
     super(parentSession, statement, confOverlay);
+    this.commandProcessor = commandProcessor;
     setupSessionIO(parentSession.getSessionState());
   }
 
@@ -104,10 +105,12 @@ public abstract class HiveCommandOperati
       String[] tokens = statement.split("\\s");
       String commandArgs = command.substring(tokens[0].length()).trim();
 
-      response = getCommandProcessor().run(commandArgs);
+      response = commandProcessor.run(commandArgs);
       int returnCode = response.getResponseCode();
-      String sqlState = response.getSQLState();
-      String errorMessage = response.getErrorMessage();
+      if (returnCode != 0) {
+        throw new HiveSQLException("Error while processing statement: "
+            + response.getErrorMessage(), response.getSQLState(), response.getResponseCode());
+      }
       Schema schema = response.getSchema();
       if (schema != null) {
         setHasResultSet(true);
@@ -116,6 +119,9 @@ public abstract class HiveCommandOperati
         setHasResultSet(false);
         resultSchema = new TableSchema();
       }
+    } catch (HiveSQLException e) {
+      setState(OperationState.ERROR);
+      throw e;
     } catch (Exception e) {
       setState(OperationState.ERROR);
       throw new HiveSQLException("Error running query: " + e.toString(), e);
@@ -201,12 +207,4 @@ public abstract class HiveCommandOperati
       resultReader = null;
     }
   }
-
-  protected CommandProcessor getCommandProcessor() {
-    return commandProcessor;
-  }
-
-  protected void setCommandProcessor(CommandProcessor commandProcessor) {
-    this.commandProcessor = commandProcessor;
-  }
 }

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java Tue Oct 22 17:58:59 2013
@@ -66,7 +66,8 @@ public class OperationManager extends Ab
   }
 
   public ExecuteStatementOperation newExecuteStatementOperation(HiveSession parentSession,
-      String statement, Map<String, String> confOverlay, boolean runAsync) {
+      String statement, Map<String, String> confOverlay, boolean runAsync)
+    throws HiveSQLException {
     ExecuteStatementOperation executeStatementOperation = ExecuteStatementOperation
         .newExecuteStatementOperation(parentSession, statement, confOverlay, runAsync);
     addOperation(executeStatementOperation);

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java Tue Oct 22 17:58:59 2013
@@ -124,6 +124,7 @@ public class SQLOperation extends Execut
       // TODO explain should use a FetchTask for reading
       for (Task<? extends Serializable> task: driver.getPlan().getRootTasks()) {
         if (task.getClass() == ExplainTask.class) {
+          resultSchema = new TableSchema(mResultSchema);
           setHasResultSet(true);
           break;
         }

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/SetOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/SetOperation.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/SetOperation.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/cli/operation/SetOperation.java Tue Oct 22 17:58:59 2013
@@ -16,23 +16,3 @@
  * limitations under the License.
  */
 
-package org.apache.hive.service.cli.operation;
-
-import java.util.Map;
-
-import org.apache.hadoop.hive.ql.processors.SetProcessor;
-import org.apache.hive.service.cli.session.HiveSession;
-
-/**
- * HiveSetCommandOperation.
- *
- */
-public class SetOperation extends HiveCommandOperation {
-
-  protected SetOperation(HiveSession parentSession, String statement,
-      Map<String, String> confOverlay) {
-    super(parentSession, statement, confOverlay);
-    setCommandProcessor(new SetProcessor());
-  }
-
-}

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/server/HiveServer2.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/server/HiveServer2.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/server/HiveServer2.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/server/HiveServer2.java Tue Oct 22 17:58:59 2013
@@ -82,25 +82,26 @@ public class HiveServer2 extends Composi
    * @param args
    */
   public static void main(String[] args) {
-    //NOTE: It is critical to do this here so that log4j is reinitialized
-    // before any of the other core hive classes are loaded
-    try {
-      LogUtils.initHiveLog4j();
-    } catch (LogInitializationException e) {
-      LOG.warn(e.getMessage());
-    }
-
-    HiveStringUtils.startupShutdownMessage(HiveServer2.class, args, LOG);
     try {
       ServerOptionsProcessor oproc = new ServerOptionsProcessor("hiveserver2");
       if (!oproc.process(args)) {
-        LOG.fatal("Error starting HiveServer2 with given arguments");
+        System.err.println("Error starting HiveServer2 with given arguments");
         System.exit(-1);
       }
+
+      //NOTE: It is critical to do this here so that log4j is reinitialized
+      // before any of the other core hive classes are loaded
+      LogUtils.initHiveLog4j();
+
+      HiveStringUtils.startupShutdownMessage(HiveServer2.class, args, LOG);
+      //log debug message from "oproc" after log4j initialize properly
+      LOG.debug(oproc.getDebugMessage().toString());
       HiveConf hiveConf = new HiveConf();
       HiveServer2 server = new HiveServer2();
       server.init(hiveConf);
       server.start();
+    } catch (LogInitializationException e) {
+      LOG.warn(e.getMessage());
     } catch (Throwable t) {
       LOG.fatal("Error starting HiveServer2", t);
       System.exit(-1);

Modified: hive/branches/maven/service/src/java/org/apache/hive/service/server/ServerOptionsProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/java/org/apache/hive/service/server/ServerOptionsProcessor.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/java/org/apache/hive/service/server/ServerOptionsProcessor.java (original)
+++ hive/branches/maven/service/src/java/org/apache/hive/service/server/ServerOptionsProcessor.java Tue Oct 22 17:58:59 2013
@@ -39,6 +39,7 @@ public class ServerOptionsProcessor {
   private final Options options = new Options();
   private org.apache.commons.cli.CommandLine commandLine;
   private final String serverName;
+  private StringBuilder debugMessage = new StringBuilder();
 
 
   @SuppressWarnings("static-access")
@@ -67,7 +68,8 @@ public class ServerOptionsProcessor {
       //get hiveconf param values and set the System property values
       Properties confProps = commandLine.getOptionProperties("hiveconf");
       for (String propKey : confProps.stringPropertyNames()) {
-        LOG.debug("Setting " + propKey + "=" + confProps.getProperty(propKey) + ";");
+        //save logging message for log4j output latter after log4j initialize properly
+        debugMessage.append("Setting " + propKey + "=" + confProps.getProperty(propKey) + ";\n");
         System.setProperty(propKey, confProps.getProperty(propKey));
       }
     } catch (ParseException e) {
@@ -78,6 +80,10 @@ public class ServerOptionsProcessor {
     return true;
   }
 
+  public StringBuilder getDebugMessage() {
+    return debugMessage;
+  }
+
   private void printUsage() {
     new HelpFormatter().printHelp(serverName, options);
   }

Modified: hive/branches/maven/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java (original)
+++ hive/branches/maven/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java Tue Oct 22 17:58:59 2013
@@ -197,6 +197,9 @@ public abstract class ThriftCLIServiceTe
     assertEquals("Query should be finished",
         OperationState.FINISHED, OperationState.getOperationState(opStatusResp.getOperationState()));
 
+    queryString = "DROP TABLE TEST_EXEC_THRIFT";
+    executeQuerySync(queryString, sessHandle);
+
     // Close the session; ignore exception if any
     TCloseSessionReq closeReq = new TCloseSessionReq(sessHandle);
     client.CloseSession(closeReq);

Modified: hive/branches/maven/shims/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/maven/shims/ivy.xml?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/shims/ivy.xml (original)
+++ hive/branches/maven/shims/ivy.xml Tue Oct 22 17:58:59 2013
@@ -125,6 +125,15 @@
       <include type="jar"/>
       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+      <exclude org="commons-codec" module="commons-codec" /><!--ignore commons-codec 1.3 to use 1.4-->
+      <exclude org="org.apache.commons" module="commons-codec" /><!--ignore commons-codec 1.3 to use 1.4-->
+    </dependency>
+
+    <dependency org="commons-codec" name="commons-codec" 
+                rev="${commons-codec.version}" conf="hadoop0.20.shim->default">
+      <include type="jar"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
     </dependency>
 
     <dependency org="org.apache.hadoop" name="hadoop-core"
@@ -133,6 +142,8 @@
       <include type="jar"/>
       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+      <exclude org="commons-codec" module="commons-codec" /><!--ignore commons-codec 1.3 to use 1.4-->
+      <exclude org="org.apache.commons" module="commons-codec" /><!--ignore commons-codec 1.3 to use 1.4-->
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-tools"
                 rev="${hadoop-0.20.version}"
@@ -140,6 +151,8 @@
       <include type="jar"/>
       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+      <exclude org="commons-codec" module="commons-codec" /><!--ignore commons-codec 1.3 to use 1.4-->
+      <exclude org="org.apache.commons" module="commons-codec" /><!--ignore commons-codec 1.3 to use 1.4-->
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-test"
                 rev="${hadoop-0.20.version}"
@@ -147,6 +160,8 @@
       <include type="jar"/>
       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+      <exclude org="commons-codec" module="commons-codec" /><!--ignore commons-codec 1.3 to use 1.4-->
+      <exclude org="org.apache.commons" module="commons-codec" /><!--ignore commons-codec 1.3 to use 1.4-->
     </dependency>
 
     <!-- Hadoop 0.20S (or 1.0.0) shim dependencies. Used for building 0.20S shims. -->

Modified: hive/branches/maven/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/branches/maven/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Tue Oct 22 17:58:59 2013
@@ -43,6 +43,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.fs.ProxyFileSystem;
 import org.apache.hadoop.fs.Trash;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
@@ -783,4 +784,8 @@ public class Hadoop20Shims implements Ha
   public WebHCatJTShim getWebHCatShim(Configuration conf, UserGroupInformation ugi) throws IOException {
       throw new UnsupportedOperationException("WebHCat does not support Hadoop 0.20.x");
   }
+  @Override
+  public FileSystem createProxyFileSystem(FileSystem fs, URI uri) {
+    return new ProxyFileSystem(fs, uri);
+  }
 }

Modified: hive/branches/maven/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/branches/maven/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Tue Oct 22 17:58:59 2013
@@ -22,6 +22,7 @@ import java.net.InetSocketAddress;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.Iterator;
+import java.net.URI;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.filecache.DistributedCache;
@@ -30,6 +31,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.fs.ProxyFileSystem;
 import org.apache.hadoop.fs.Trash;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapred.JobTracker;
@@ -372,4 +374,8 @@ public class Hadoop20SShims extends Hado
     return fs.getFileBlockLocations(status, 0, status.getLen());
   }
 
+  @Override
+  public FileSystem createProxyFileSystem(FileSystem fs, URI uri) {
+    return new ProxyFileSystem(fs, uri);
+  }
 }

Modified: hive/branches/maven/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/branches/maven/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Tue Oct 22 17:58:59 2013
@@ -24,6 +24,8 @@ import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.Iterator;
 import java.util.Map;
+import java.net.URI;
+import java.io.FileNotFoundException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -33,6 +35,9 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.fs.ProxyFileSystem;
+import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.Trash;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -407,4 +412,40 @@ public class Hadoop23Shims extends Hadoo
     }
   }
 
+  class ProxyFileSystem23 extends ProxyFileSystem {
+    public ProxyFileSystem23(FileSystem fs) {
+      super(fs);
+    }
+    public ProxyFileSystem23(FileSystem fs, URI uri) {
+      super(fs, uri);
+    }
+
+    @Override
+    public RemoteIterator<LocatedFileStatus> listLocatedStatus(final Path f)
+      throws FileNotFoundException, IOException {
+      return new RemoteIterator<LocatedFileStatus>() {
+        private RemoteIterator<LocatedFileStatus> stats =
+            ProxyFileSystem23.super.listLocatedStatus(
+                ProxyFileSystem23.super.swizzleParamPath(f));
+
+        @Override
+        public boolean hasNext() throws IOException {
+          return stats.hasNext();
+        }
+
+        @Override
+        public LocatedFileStatus next() throws IOException {
+          LocatedFileStatus result = stats.next();
+          return new LocatedFileStatus(
+              ProxyFileSystem23.super.swizzleFileStatus(result, false),
+              result.getBlockLocations());
+        }
+      };
+    }
+  }
+
+  @Override
+  public FileSystem createProxyFileSystem(FileSystem fs, URI uri) {
+    return new ProxyFileSystem23(fs, uri);
+  }
 }

Modified: hive/branches/maven/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original)
+++ hive/branches/maven/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Tue Oct 22 17:58:59 2013
@@ -622,4 +622,7 @@ public abstract class HadoopShimsSecure 
   @Override
   abstract public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf)
           throws IOException;
+
+  @Override
+  abstract public FileSystem createProxyFileSystem(FileSystem fs, URI uri);
 }

Modified: hive/branches/maven/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DBTokenStore.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DBTokenStore.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DBTokenStore.java (original)
+++ hive/branches/maven/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DBTokenStore.java Tue Oct 22 17:58:59 2013
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hive.thrift;
 
 import java.io.IOException;

Modified: hive/branches/maven/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestDBTokenStore.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestDBTokenStore.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestDBTokenStore.java (original)
+++ hive/branches/maven/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestDBTokenStore.java Tue Oct 22 17:58:59 2013
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hive.thrift;
 
 import java.io.IOException;

Modified: hive/branches/maven/shims/src/common/java/org/apache/hadoop/fs/ProxyFileSystem.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/shims/src/common/java/org/apache/hadoop/fs/ProxyFileSystem.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/shims/src/common/java/org/apache/hadoop/fs/ProxyFileSystem.java (original)
+++ hive/branches/maven/shims/src/common/java/org/apache/hadoop/fs/ProxyFileSystem.java Tue Oct 22 17:58:59 2013
@@ -45,7 +45,7 @@ public class ProxyFileSystem extends Fil
 
 
 
-  private Path swizzleParamPath(Path p) {
+  protected Path swizzleParamPath(Path p) {
     String pathUriString = p.toUri().toString();
     URI newPathUri = URI.create(pathUriString);
     return new Path (realScheme, realAuthority, newPathUri.getPath());
@@ -57,7 +57,7 @@ public class ProxyFileSystem extends Fil
     return new Path (myScheme, myAuthority, newPathUri.getPath());
   }
 
-  private FileStatus swizzleFileStatus(FileStatus orig, boolean isParam) {
+  protected FileStatus swizzleFileStatus(FileStatus orig, boolean isParam) {
     FileStatus ret =
       new FileStatus(orig.getLen(), orig.isDir(), orig.getReplication(),
                      orig.getBlockSize(), orig.getModificationTime(),

Modified: hive/branches/maven/shims/src/common/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/shims/src/common/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/shims/src/common/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java (original)
+++ hive/branches/maven/shims/src/common/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java Tue Oct 22 17:58:59 2013
@@ -23,6 +23,8 @@ import java.net.URI;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.HadoopShims;
 
 /****************************************************************
  * A Proxy for LocalFileSystem
@@ -61,7 +63,9 @@ public class ProxyLocalFileSystem extend
 
     String authority = name.getAuthority() != null ? name.getAuthority() : "";
     String proxyUriString = nameUriString + "://" + authority + "/";
-    fs = new ProxyFileSystem(localFs, URI.create(proxyUriString));
+
+    fs = ShimLoader.getHadoopShims().createProxyFileSystem(
+        localFs, URI.create(proxyUriString));
 
     fs.initialize(name, conf);
   }

Modified: hive/branches/maven/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/branches/maven/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java Tue Oct 22 17:58:59 2013
@@ -562,4 +562,10 @@ public interface HadoopShims {
      */
     public void close();
   }
+
+  /**
+   * Create a proxy file system that can serve a given scheme/authority using some
+   * other file system.
+   */
+  public FileSystem createProxyFileSystem(FileSystem fs, URI uri);
 }

Modified: hive/branches/maven/shims/src/common/java/org/apache/hadoop/hive/shims/HiveEventCounter.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/shims/src/common/java/org/apache/hadoop/hive/shims/HiveEventCounter.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/shims/src/common/java/org/apache/hadoop/hive/shims/HiveEventCounter.java (original)
+++ hive/branches/maven/shims/src/common/java/org/apache/hadoop/hive/shims/HiveEventCounter.java Tue Oct 22 17:58:59 2013
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hive.shims;
 
 import org.apache.log4j.Appender;

Modified: hive/branches/maven/shims/src/common/java/org/apache/hadoop/hive/thrift/TUGIContainingTransport.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/shims/src/common/java/org/apache/hadoop/hive/thrift/TUGIContainingTransport.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/shims/src/common/java/org/apache/hadoop/hive/thrift/TUGIContainingTransport.java (original)
+++ hive/branches/maven/shims/src/common/java/org/apache/hadoop/hive/thrift/TUGIContainingTransport.java Tue Oct 22 17:58:59 2013
@@ -82,8 +82,15 @@ public class TUGIContainingTransport ext
 
       // UGI information is not available at connection setup time, it will be set later
       // via set_ugi() rpc.
-      transMap.putIfAbsent(trans, new TUGIContainingTransport(trans));
-      return transMap.get(trans);
+      TUGIContainingTransport tugiTrans = transMap.get(trans);
+      if (tugiTrans == null) {
+        tugiTrans = new TUGIContainingTransport(trans);
+        TUGIContainingTransport prev = transMap.putIfAbsent(trans, tugiTrans);
+        if (prev != null) {
+          return prev;
+        }
+      }
+      return tugiTrans;
     }
   }
 }

Modified: hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java (original)
+++ hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java Tue Oct 22 17:58:59 2013
@@ -187,7 +187,8 @@ class HostExecutor {
     File script = new File(mLocalScratchDirectory, scriptName);
     Map<String, String> templateVariables = Maps.newHashMap(mTemplateDefaults);
     templateVariables.put("instanceName", drone.getInstanceName());
-    templateVariables.put("batchName",batch.getName());
+    templateVariables.put("batchName", batch.getName());
+    templateVariables.put("testClass", batch.getTestClass());
     templateVariables.put("testArguments", batch.getTestArguments());
     templateVariables.put("localDir", drone.getLocalDirectory());
     templateVariables.put("logDir", drone.getLocalLogDirectory());

Modified: hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java (original)
+++ hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java Tue Oct 22 17:58:59 2013
@@ -103,18 +103,23 @@ public class PTest {
     put("repository", configuration.getRepository()).
     put("repositoryName", configuration.getRepositoryName()).
     put("repositoryType", configuration.getRepositoryType()).
+    put("buildTool", configuration.getBuildTool()).
     put("branch", configuration.getBranch()).
     put("clearLibraryCache", String.valueOf(configuration.isClearLibraryCache())).
     put("workingDir", mExecutionContext.getLocalWorkingDirectory()).
-    put("antArgs", configuration.getAntArgs()).
-    put("antTestArgs", configuration.getAntTestArgs()).
     put("buildTag", buildTag).
     put("logDir", logDir.getAbsolutePath()).
     put("javaHome", configuration.getJavaHome()).
     put("javaHomeForTests", configuration.getJavaHomeForTests()).
-    put("antEnvOpts", configuration.getAntEnvOpts());
+    put("antEnvOpts", configuration.getAntEnvOpts()).
+    put("antArgs", configuration.getAntArgs()).
+    put("antTestArgs", configuration.getAntTestArgs()).
+    put("antTestTarget", configuration.getAntTestTarget()).
+    put("mavenEnvOpts", configuration.getMavenEnvOpts()).
+    put("mavenArgs", configuration.getMavenArgs()).
+    put("mavenTestArgs", configuration.getMavenTestArgs());
     final ImmutableMap<String, String> templateDefaults = templateDefaultsBuilder.build();
-    TestParser testParser = new TestParser(configuration.getContext(),
+    TestParser testParser = new TestParser(configuration.getContext(), configuration.getTestCasePropertyName(),
         new File(mExecutionContext.getLocalWorkingDirectory(), configuration.getRepositoryName() + "-source"),
         logger);
 
@@ -231,6 +236,7 @@ public class PTest {
   private static final String JAVA_HOME_TEST = TestConfiguration.JAVA_HOME_TEST;
   private static final String ANT_TEST_ARGS = TestConfiguration.ANT_TEST_ARGS;
   private static final String ANT_ENV_OPTS = TestConfiguration.ANT_ENV_OPTS;
+  private static final String ANT_TEST_TARGET = TestConfiguration.ANT_TEST_TARGET;
   /**
    * All args override properties file settings except
    * for this one which is additive.
@@ -300,6 +306,10 @@ public class PTest {
         if(!antEnvOpts.isEmpty()) {
           conf.setAntEnvOpts(antEnvOpts);
         }
+        String antTestTarget = Strings.nullToEmpty(commandLine.getOptionValue(ANT_TEST_TARGET)).trim();
+        if(!antTestTarget.isEmpty()) {
+          conf.setAntTestTarget(antTestTarget);
+        }
         String[] supplementalAntArgs = commandLine.getOptionValues(ANT_ARG);
         if(supplementalAntArgs != null && supplementalAntArgs.length > 0) {
           String antArgs = Strings.nullToEmpty(conf.getAntArgs());

Modified: hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/QFileTestBatch.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/QFileTestBatch.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/QFileTestBatch.java (original)
+++ hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/QFileTestBatch.java Tue Oct 22 17:58:59 2013
@@ -25,13 +25,15 @@ import com.google.common.collect.Iterato
 
 public class QFileTestBatch implements TestBatch {
 
+  private final String testCasePropertyName;
   private final String driver;
   private final String queryFilesProperty;
   private final String name;
   private final Set<String> tests;
   private final boolean isParallel;
-  public QFileTestBatch(String driver, String queryFilesProperty,
-      Set<String> tests, boolean isParallel) {
+  public QFileTestBatch(String testCasePropertyName, String driver, 
+      String queryFilesProperty, Set<String> tests, boolean isParallel) {
+    this.testCasePropertyName = testCasePropertyName;
     this.driver = driver;
     this.queryFilesProperty = queryFilesProperty;
     this.tests = tests;
@@ -51,8 +53,12 @@ public class QFileTestBatch implements T
     return name;
   }
   @Override
+  public String getTestClass() {
+    return driver;
+  }
+  @Override
   public String getTestArguments() {
-    return String.format("-Dtestcase=%s -D%s=%s", driver, queryFilesProperty,
+    return String.format("-D%s=%s -D%s=%s", testCasePropertyName, driver, queryFilesProperty,
         Joiner.on(",").join(tests));
   }
 

Modified: hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestBatch.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestBatch.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestBatch.java (original)
+++ hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestBatch.java Tue Oct 22 17:58:59 2013
@@ -21,6 +21,8 @@ package org.apache.hive.ptest.execution.
 public interface TestBatch {
 
   public String getTestArguments();
+  
+  public String getTestClass();
 
   public String getName();
 

Modified: hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestConfiguration.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestConfiguration.java (original)
+++ hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestConfiguration.java Tue Oct 22 17:58:59 2013
@@ -39,20 +39,33 @@ public class TestConfiguration {
   public static final String JAVA_HOME_TEST = "javaHomeForTests";
   public static final String ANT_ENV_OPTS = "antEnvOpts";
   public static final String ANT_TEST_ARGS = "antTestArgs";
-  
+  public static final String ANT_TEST_TARGET = "antTestTarget";
+  public static final String MAVEN_ENV_OPTS = "mavenEnvOpts";
+  public static final String MAVEN_TEST_ARGS = "mavenTestArgs";
+  public static final String MAVEN_TEST_TARGET = "mavenTestTarget";
+
   private static final String REPOSITORY_TYPE = "repositoryType";
   private static final String GIT = "git";
   private static final String SVN = "svn";
+  private static final String ANT = "ant";
+  private static final String MAVEN = "maven";
+  private static final String MAVEN_ARGS = "mavenArgs";
   private static final String ANT_ARGS = "antArgs";
   private static final String JIRA_URL = "jiraUrl";
   private static final String JIRA_USER = "jiraUser";
   private static final String JIRA_PASSWORD = "jiraPassword";
   private static final String JENKINS_URL = "jenkinsURL";
-
+  private static final String TEST_CASE_PROPERTY_NAME = "testCasePropertyName";
+  private static final String BUILD_TOOL = "buildTool";
+  
   private final Context context;
   private String antArgs;
   private String antTestArgs;
   private String antEnvOpts;
+  private String antTestTarget;
+  private String mavenArgs;
+  private String mavenTestArgs;
+  private String mavenEnvOpts;
   private String repositoryType;
   private String repository;
   private String repositoryName;
@@ -64,6 +77,9 @@ public class TestConfiguration {
   private final String jiraUrl;
   private final String jiraUser;
   private final String jiraPassword;
+  private final String testCasePropertyName;
+  private final String buildTool;
+  
   private String jiraName;
   private boolean clearLibraryCache;
 
@@ -81,9 +97,17 @@ public class TestConfiguration {
     } else {
       throw new IllegalArgumentException("Unkown repository type '" + repositoryType + "'");
     }
-    antArgs =  Preconditions.checkNotNull(context.getString(ANT_ARGS), ANT_ARGS).trim();
+    buildTool = context.getString(BUILD_TOOL, ANT).trim();
+    if(!(MAVEN.endsWith(buildTool) || ANT.equals(buildTool))) {
+      throw new IllegalArgumentException("Unkown build tool type '" + buildTool + "'");
+    }
+    antArgs =  context.getString(ANT_ARGS, "").trim();
     antTestArgs =  context.getString(ANT_TEST_ARGS, "").trim();
     antEnvOpts =  context.getString(ANT_ENV_OPTS, "").trim();
+    antTestTarget = context.getString(ANT_TEST_TARGET, "test").trim();
+    mavenArgs =  context.getString(MAVEN_ARGS, "").trim();
+    mavenTestArgs =  context.getString(MAVEN_TEST_ARGS, "").trim();
+    mavenEnvOpts =  context.getString(MAVEN_ENV_OPTS, "").trim();
     javaHome =  context.getString(JAVA_HOME, "").trim();
     javaHomeForTests = context.getString(JAVA_HOME_TEST, "").trim();
     patch = Strings.nullToEmpty(null);
@@ -92,7 +116,7 @@ public class TestConfiguration {
     jiraUser = context.getString(JIRA_USER, "").trim();
     jiraPassword = context.getString(JIRA_PASSWORD, "").trim();
     jenkinsURL = context.getString(JENKINS_URL, "https://builds.apache.org/job").trim();
-
+    testCasePropertyName = context.getString(TEST_CASE_PROPERTY_NAME, "testcase").trim();
   }
   public Context getContext() {
     return context;
@@ -112,6 +136,9 @@ public class TestConfiguration {
   public void setClearLibraryCache(boolean clearLibraryCache) {
     this.clearLibraryCache = clearLibraryCache;
   }
+  public String getBuildTool() {
+    return buildTool;
+  }
   public String getJiraUrl() {
     return jiraUrl;
   }
@@ -142,6 +169,18 @@ public class TestConfiguration {
   public String getAntEnvOpts() {
     return antEnvOpts;
   }
+  public String getAntTestTarget() {
+    return antTestTarget;
+  }
+  public String getMavenArgs() {
+    return mavenArgs;
+  }
+  public String getMavenTestArgs() {
+    return mavenTestArgs;
+  }
+  public String getMavenEnvOpts() {
+    return mavenEnvOpts;
+  }
   public String getJavaHome() {
     return javaHome;
   }
@@ -151,6 +190,9 @@ public class TestConfiguration {
   public String getPatch() {
     return patch;
   }
+  public String getTestCasePropertyName() {
+    return testCasePropertyName;
+  }
   public void setPatch(String patch) {
     this.patch = Strings.nullToEmpty(patch);
   }
@@ -178,16 +220,32 @@ public class TestConfiguration {
   public void setAntEnvOpts(String antEnvOpts) {
     this.antEnvOpts = Strings.nullToEmpty(antEnvOpts);
   }
+  public void setAntTestTarget(String antTestTarget) {
+    this.antTestTarget = Strings.nullToEmpty(antTestTarget);
+  }
+  public void setMavenArgs(String mavenArgs) {
+    this.mavenArgs = Strings.nullToEmpty(mavenArgs);
+  }
+  public void setMavenTestArgs(String mavenTestArgs) {
+    this.mavenTestArgs = mavenTestArgs;
+  }
+  public void setMavenEnvOpts(String mavenEnvOpts) {
+    this.mavenEnvOpts = Strings.nullToEmpty(mavenEnvOpts);
+  }
   @Override
   public String toString() {
-    return "TestConfiguration [antArgs=" + antArgs + ", antEnvOpts="
-        + antEnvOpts + ", repositoryType=" + repositoryType + ", repository="
-        + repository + ", repositoryName=" + repositoryName + ", patch="
-        + patch + ", javaHome=" + javaHome + ", javaHomeForTests="
-        + javaHomeForTests + ", branch=" + branch + ", jenkinsURL="
-        + jenkinsURL + ", jiraUrl=" + jiraUrl + ", jiraUser=" + jiraUser
-        + ", jiraName=" + jiraName + ", clearLibraryCache=" + clearLibraryCache
-        + "]";
+    return "TestConfiguration [antArgs=" + antArgs + ", antTestArgs="
+        + antTestArgs + ", antEnvOpts=" + antEnvOpts + ", antTestTarget="
+        + antTestTarget + ", mavenArgs=" + mavenArgs + ", mavenTestArgs="
+        + mavenTestArgs + ", mavenEnvOpts=" + mavenEnvOpts
+        + ", repositoryType=" + repositoryType + ", repository=" + repository
+        + ", repositoryName=" + repositoryName + ", patch=" + patch
+        + ", javaHome=" + javaHome + ", javaHomeForTests=" + javaHomeForTests
+        + ", branch=" + branch + ", jenkinsURL=" + jenkinsURL + ", jiraUrl="
+        + jiraUrl + ", jiraUser=" + jiraUser + ", jiraPassword=" + jiraPassword
+        + ", testCasePropertyName=" + testCasePropertyName + ", buildTool="
+        + buildTool + ", jiraName=" + jiraName + ", clearLibraryCache="
+        + clearLibraryCache + "]";
   }
   public static TestConfiguration fromInputStream(InputStream inputStream, Logger logger)
       throws IOException {

Modified: hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestParser.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestParser.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestParser.java (original)
+++ hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestParser.java Tue Oct 22 17:58:59 2013
@@ -37,11 +37,14 @@ import com.google.common.collect.Sets;
 public class TestParser {
 
   private final Context context;
+  private final String testCasePropertyName;
   private final File sourceDirectory;
   private final Logger logger;
 
-  public TestParser(Context context, File sourceDirectory, Logger logger) {
+  public TestParser(Context context, String testCasePropertyName, 
+      File sourceDirectory, Logger logger) {
     this.context = context;
+    this.testCasePropertyName = testCasePropertyName;
     this.sourceDirectory = sourceDirectory;
     this.logger = logger;
   }
@@ -82,10 +85,10 @@ public class TestParser {
           } else if(included.isEmpty() || included.contains(testName)) {
             if(isolated.contains(testName)) {
               logger.info("Executing isolated unit test " + testName);
-              result.add(new UnitTestBatch(testName, false));
+              result.add(new UnitTestBatch(testCasePropertyName, testName, false));
             } else {
               logger.info("Executing parallel unit test " + testName);
-              result.add(new UnitTestBatch(testName, true));
+              result.add(new UnitTestBatch(testCasePropertyName, testName, true));
             }
           }
         }
@@ -157,17 +160,17 @@ public class TestParser {
         logger.info("Exlcuding test " + driver + " " + test);
       } else if(isolated.contains(test)) {
         logger.info("Executing isolated test " + driver + " " + test);
-        testBatches.add(new QFileTestBatch(driver, queryFilesProperty, Sets.newHashSet(test), isParallel));
+        testBatches.add(new QFileTestBatch(testCasePropertyName, driver, queryFilesProperty, Sets.newHashSet(test), isParallel));
       } else {
         if(testBatch.size() >= batchSize) {
-          testBatches.add(new QFileTestBatch(driver, queryFilesProperty, Sets.newHashSet(testBatch), isParallel));
+          testBatches.add(new QFileTestBatch(testCasePropertyName, driver, queryFilesProperty, Sets.newHashSet(testBatch), isParallel));
           testBatch = Lists.newArrayList();
         }
         testBatch.add(test);
       }
     }
     if(!testBatch.isEmpty()) {
-      testBatches.add(new QFileTestBatch(driver, queryFilesProperty, Sets.newHashSet(testBatch), isParallel));
+      testBatches.add(new QFileTestBatch(testCasePropertyName, driver, queryFilesProperty, Sets.newHashSet(testBatch), isParallel));
     }
     return testBatches;
   }

Modified: hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/UnitTestBatch.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/UnitTestBatch.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/UnitTestBatch.java (original)
+++ hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/UnitTestBatch.java Tue Oct 22 17:58:59 2013
@@ -20,15 +20,18 @@ package org.apache.hive.ptest.execution.
 
 public class UnitTestBatch implements TestBatch {
 
+  private final String testCasePropertyName;
   private final String testName;
   private final boolean isParallel;
-  public UnitTestBatch(String testName, boolean isParallel) {
+
+  public UnitTestBatch(String testCasePropertyName, String testName, boolean isParallel) {
+    this.testCasePropertyName = testCasePropertyName;
     this.testName = testName;
     this.isParallel = isParallel;
   }
   @Override
   public String getTestArguments() {
-    return String.format("-Dtestcase=%s", testName);
+    return String.format("-D%s=%s", testCasePropertyName, testName);
   }
 
   @Override
@@ -36,6 +39,10 @@ public class UnitTestBatch implements Te
     return testName;
   }
   @Override
+  public String getTestClass() {
+    return testName;
+  }
+  @Override
   public String toString() {
     return "UnitTestBatch [testName=" + testName + ", isParallel=" + isParallel
         + "]";

Modified: hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/RSyncCommandExecutor.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/RSyncCommandExecutor.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/RSyncCommandExecutor.java (original)
+++ hive/branches/maven/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/RSyncCommandExecutor.java Tue Oct 22 17:58:59 2013
@@ -53,12 +53,12 @@ public class RSyncCommandExecutor {
         retry = false;
         if(command.getType() == RSyncCommand.Type.TO_LOCAL) {
           cmd = new LocalCommand(mLogger, collector,
-              String.format("timeout 1h rsync -qaPe \"ssh -i %s\" --timeout 600 %s@%s:%s %s",
+              String.format("timeout 1h rsync -vaPe \"ssh -i %s\" --timeout 600 %s@%s:%s %s",
                   command.getPrivateKey(), command.getUser(), command.getHost(),
                   command.getRemoteFile(), command.getLocalFile()));
         } else if(command.getType() == RSyncCommand.Type.FROM_LOCAL) {
           cmd = new LocalCommand(mLogger, collector,
-              String.format("timeout 1h rsync -qaPe \"ssh -i %s\" --timeout 600 --delete --delete-during --force %s %s@%s:%s",
+              String.format("timeout 1h rsync -vaPe \"ssh -i %s\" --timeout 600 --delete --delete-during --force %s %s@%s:%s",
                   command.getPrivateKey(), command.getLocalFile(), command.getUser(), command.getHost(),
                   command.getRemoteFile()));
         } else {

Modified: hive/branches/maven/testutils/ptest2/src/main/resources/batch-exec.vm
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/main/resources/batch-exec.vm?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/main/resources/batch-exec.vm (original)
+++ hive/branches/maven/testutils/ptest2/src/main/resources/batch-exec.vm Tue Oct 22 17:58:59 2013
@@ -17,8 +17,9 @@
 ##### Remember, this is a velocity template 
 set -x
 umask 0022
-find $localDir/$instanceName/${repositoryName}-source/build -name 'TEST-*.xml' -delete
-find $localDir/$instanceName/${repositoryName}-source/build -name 'hive.log' -delete
+echo $$
+ps -e -o pid,pgrp,user,args
+ps x -o  "%p %r %y %x %c "
 chmod -R u+w $logDir
 rm -rf $logDir
 # makes $logDir and $logDir/tmp
@@ -33,15 +34,57 @@ then
   export PATH=$JAVA_HOME/bin/:$PATH
 fi
 export ANT_OPTS="-Xmx1g -XX:MaxPermSize=256m -Djava.io.tmpdir=$logDir/tmp ${antEnvOpts}"
+export M2_OPTS="-Xmx1g -XX:MaxPermSize=256m -Djava.io.tmpdir=$logDir/tmp ${mavenEnvOpts}"
 export HADOOP_ROOT_LOGGER=INFO,console
 export HADOOP_OPTS="-Dhive.log.dir=$logDir -Dhive.query.id=hadoop -Djava.io.tmpdir=$logDir/tmp"
-cd $localDir/$instanceName/${repositoryName}-source && \
-  timeout 2h ant test -Dtest.junit.output.format=xml $antArgs \
-   -Divy.default.ivy.user.dir=$localDir/$instanceName/ivy \
-   -Divy.default.always.check.exact.revision=false \
-   -Divy.cache.ttl.default=eternal -Divy.checkmodified=false \
-   -Dmvn.local.repo=$localDir/$instanceName/maven \
-   $antArgs $antTestArgs $testArguments 1>$logDir/ant-test.txt 2>&1
+cd $localDir/$instanceName/${repositoryName}-source || exit 1
+if [[ -s batch.pid ]]
+then
+  while read pid
+  do
+    if kill -9 -$pid 2>/dev/null
+    then
+      echo "Killed process group $pid"
+      sleep 1
+    fi
+  done < batch.pid
+fi
+echo "$$" > batch.pid
+find ./ -name 'TEST-*.xml' -delete
+find ./ -name 'hive.log' -delete
+find ./ -name junit_metastore_db | xargs -r rm -rf
+ret=0
+if [[ "${buildTool}" == "maven" ]]
+then
+  testModule=$(find ./ -name '${testClass}.java' | awk -F'/' '{print $2}')
+  if [[ -z "$testModule" ]]
+  then
+  	testModule=./
+  fi
+  pushd $testModule
+  timeout 2h mvn -B -o test -Dmaven.repo.local=$localDir/$instanceName/maven \
+    $mavenArgs $mavenTestArgs $testArguments 1>$logDir/maven-test.txt 2>&1 </dev/null &
+#[[
+  pid=$!
+]]#
+  popd
+elif [[ "${buildTool}" == "ant" ]]
+then
+  timeout 2h ant ${antTestTarget} -Dtest.junit.output.format=xml \
+    -Divy.default.ivy.user.dir=$localDir/$instanceName/ivy \
+    -Divy.default.always.check.exact.revision=false \
+    -Divy.cache.ttl.default=eternal -Divy.checkmodified=false \
+    -Dmvn.local.repo=$localDir/$instanceName/maven \
+    $antArgs $antTestArgs $testArguments 1>$logDir/ant-test.txt 2>&1 </dev/null &
+#[[
+  pid=$!
+]]#
+else
+  echo "Unknown build tool ${buildTool}"
+  exit 127
+fi
+echo $pid >> batch.pid
+wait $pid
 ret=$?
 if [[ $ret -ne 0 ]]
 then
@@ -52,19 +95,12 @@ then
     echo "Number of failed tests $numOfFailedTests exceeded threshold, not copying source"
   fi
 fi
-if [[ -f $localDir/$instanceName/${repositoryName}-source/build/ql/tmp/hive.log ]]
-then
-  mv $localDir/$instanceName/${repositoryName}-source/build/ql/tmp/hive.log $logDir/hive.log
-fi
-batchName=$batchName
-for testOutputFile in $(find $localDir/$instanceName/${repositoryName}-source/ -name 'TEST-*.xml')
-do
-  mv "$testOutputFile" "$logDir/TEST-$batchName-$(basename $testOutputFile)"
-done
+find ./ -type f -name hive.log | \
+  xargs -I {} sh -c 'f=$(basename {}); test -f ${logDir}/$f && f=$f-$(uuidgen); mv {} ${logDir}/$f'
+find ./ -type f -name 'TEST-*.xml' | \
+  xargs -I {} sh -c 'f=TEST-${batchName}-$(basename {}); test -f ${logDir}/$f && f=$f-$(uuidgen); mv {} ${logDir}/$f'
 if [[ -f $logDir/.log ]]
 then
   mv $logDir/.log $logDir/dot.log
-else
-  echo "$logDir/.log does not exist"
 fi
 exit $ret

Modified: hive/branches/maven/testutils/ptest2/src/main/resources/smart-apply-patch.sh
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/main/resources/smart-apply-patch.sh?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/main/resources/smart-apply-patch.sh (original)
+++ hive/branches/maven/testutils/ptest2/src/main/resources/smart-apply-patch.sh Tue Oct 22 17:58:59 2013
@@ -84,7 +84,7 @@ elif $PATCH -p1 -E --dry-run < $PATCH_FI
 elif $PATCH -p2 -E --dry-run < $PATCH_FILE 2>&1 > /dev/null; then
   PLEVEL=2
 else
-  echo "The patch does not appear to apply with p0 to p2";
+  echo "The patch does not appear to apply with p0, p1, or p2";
   cleanup 1;
 fi
 

Modified: hive/branches/maven/testutils/ptest2/src/main/resources/source-prep.vm
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/main/resources/source-prep.vm?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/main/resources/source-prep.vm (original)
+++ hive/branches/maven/testutils/ptest2/src/main/resources/source-prep.vm Tue Oct 22 17:58:59 2013
@@ -22,9 +22,14 @@ then
   export PATH=$JAVA_HOME/bin/:$PATH
 fi
 export ANT_OPTS="-Xmx1g -XX:MaxPermSize=256m ${antEnvOpts}"
+export M2_OPTS="-Xmx1g -XX:MaxPermSize=256m ${mavenEnvOpts}"
 cd $workingDir/
 (
-  mkdir -p {maven,ivy}
+  if [[ "$clearLibraryCache" == "true" ]]
+  then
+    rm -rf ivy maven
+  fi
+  mkdir -p maven ivy
   if [[ "${repositoryType}" = "svn" ]]
   then
     if [[ -n "$branch" ]]
@@ -77,11 +82,22 @@ cd $workingDir/
     chmod +x $patchCommandPath
     $patchCommandPath $patchFilePath
   fi
-  if [[ "$clearLibraryCache" == "true" ]]
+  if [[ "${buildTool}" == "maven" ]]
   then
-    rm -rf $workingDir/ivy $workingDir/maven
-    mkdir $workingDir/ivy $workingDir/maven
-  fi
-  ant $antArgs -Divy.default.ivy.user.dir=$workingDir/ivy -Dmvn.local.repo=$workingDir/maven clean package test -Dtestcase=nothing
+    rm -rf $workingDir/maven/org/apache/hive
+    mvn -B clean install -DskipTests -Dmaven.repo.local=$workingDir/maven
+    mvn -B test -Dmaven.repo.local=$workingDir/maven -Dtest=TestDummy
+    cd itests
+    mvn -B clean install -DskipTests -Dmaven.repo.local=$workingDir/maven
+    mvn -B test -Dmaven.repo.local=$workingDir/maven -Dtest=TestDummy
+  elif [[ "${buildTool}" == "ant" ]]
+  then
+    ant $antArgs -Divy.default.ivy.user.dir=$workingDir/ivy \
+      -Dmvn.local.repo=$workingDir/maven clean package test \
+      -Dtestcase=nothing
+   else
+     echo "Unknown build tool ${buildTool}"
+     exit 127
+   fi
 ) 2>&1 | tee $logDir/source-prep.txt
-exit ${PIPESTATUS[0]}
\ No newline at end of file
+exit ${PIPESTATUS[0]}

Modified: hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestExecutionPhase.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestExecutionPhase.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestExecutionPhase.java (original)
+++ hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestExecutionPhase.java Tue Oct 22 17:58:59 2013
@@ -69,11 +69,11 @@ public class TestExecutionPhase extends 
   private void setupQFile(boolean isParallel) throws Exception {
     testDir = Dirs.create( new File(baseDir, "test"));
     Assert.assertTrue(new File(testDir, QFILENAME).createNewFile());
-    testBatch = new QFileTestBatch(DRIVER, "qfile", Sets.newHashSet(QFILENAME), isParallel);
+    testBatch = new QFileTestBatch("testcase", DRIVER, "qfile", Sets.newHashSet(QFILENAME), isParallel);
     testBatches = Collections.singletonList(testBatch);
   }
   private void setupUnitTest() throws Exception {
-    testBatch = new UnitTestBatch(DRIVER, false);
+    testBatch = new UnitTestBatch("testcase", DRIVER, false);
     testBatches = Collections.singletonList(testBatch);
   }
   private void copyTestOutput(String resource, File directory, String name) throws Exception {

Modified: hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.java (original)
+++ hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.java Tue Oct 22 17:58:59 2013
@@ -104,10 +104,10 @@ public class TestHostExecutor {
     parallelWorkQueue = new LinkedBlockingQueue<TestBatch>();
     isolatedWorkQueue = new LinkedBlockingQueue<TestBatch>();
     failedTestResults = Sets.newHashSet();
-    testBatchParallel1 = new UnitTestBatch(DRIVER_PARALLEL_1, true);
-    testBatchParallel2 = new UnitTestBatch(DRIVER_PARALLEL_2, true);
-    testBatchIsolated1 = new UnitTestBatch(DRIVER_ISOLATED_1, false);
-    testBatchIsolated2 = new UnitTestBatch(DRIVER_ISOLATED_2, false);
+    testBatchParallel1 = new UnitTestBatch("testcase", DRIVER_PARALLEL_1, true);
+    testBatchParallel2 = new UnitTestBatch("testcase", DRIVER_PARALLEL_2, true);
+    testBatchIsolated1 = new UnitTestBatch("testcase", DRIVER_ISOLATED_1, false);
+    testBatchIsolated2 = new UnitTestBatch("testcase", DRIVER_ISOLATED_2, false);
     executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(2));
     localCommandFactory = new MockLocalCommandFactory(LOG);
     localCommand = mock(LocalCommand.class);

Modified: hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.java?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.java (original)
+++ hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.java Tue Oct 22 17:58:59 2013
@@ -53,7 +53,6 @@ public class TestScripts  {
       FileUtils.deleteQuietly(baseDir);
     }
   }
-
   @Test
   public void testBatch() throws Throwable {
     Map<String, String> templateVariables = Maps.newHashMap();
@@ -62,7 +61,9 @@ public class TestScripts  {
     templateVariables.put("branch", "branch-1");
     templateVariables.put("localDir", "/some/local/dir");
     templateVariables.put("workingDir", "/some/working/dir");
+    templateVariables.put("buildTool", "maven");
     templateVariables.put("antArgs", "-Dant=arg1");
+    templateVariables.put("testClass", "TestCliDriver");
     templateVariables.put("buildTag", "build-1");
     templateVariables.put("logDir", "/some/log/dir");
     templateVariables.put("instanceName", "instance-1");
@@ -74,6 +75,7 @@ public class TestScripts  {
     templateVariables.put("javaHome", "/usr/java/jdk1.7");
     templateVariables.put("antEnvOpts", "-Dhttp.proxyHost=somehost -Dhttp.proxyPort=3128");
     templateVariables.put("antTestArgs", "-DgrammarBuild.notRequired=true -Dskip.javadoc=true");
+    templateVariables.put("antTestTarget", "testonly");
     String template = readResource("batch-exec.vm");
     String actual = getTemplateResult(template, templateVariables);
     Approvals.verify(actual);
@@ -86,6 +88,8 @@ public class TestScripts  {
     templateVariables.put("branch", "branch-1");
     templateVariables.put("localDir", "/some/local/dir");
     templateVariables.put("workingDir", "/some/working/dir");
+    templateVariables.put("buildTool", "ant");
+    templateVariables.put("testClass", "TestCliDriver");
     templateVariables.put("antArgs", "-Dant=arg1");
     templateVariables.put("buildTag", "build-1");
     templateVariables.put("logDir", "/some/log/dir");
@@ -111,6 +115,7 @@ public class TestScripts  {
     templateVariables.put("branch", "branch-1");
     templateVariables.put("localDir", "/some/local/dir");
     templateVariables.put("workingDir", "/some/working/dir");
+    templateVariables.put("buildTool", "ant");
     templateVariables.put("antArgs", "-Dant=arg1");
     templateVariables.put("buildTag", "build-1");
     templateVariables.put("logDir", "/some/log/dir");
@@ -150,6 +155,7 @@ public class TestScripts  {
     templateVariables.put("branch", "");
     templateVariables.put("localDir", "/some/local/dir");
     templateVariables.put("workingDir", "/some/working/dir");
+    templateVariables.put("buildTool", "maven");
     templateVariables.put("antArgs", "-Dant=arg1");
     templateVariables.put("buildTag", "build-1");
     templateVariables.put("logDir", "/some/log/dir");

Modified: hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testAlternativeTestJVM.approved.txt
URL: http://svn.apache.org/viewvc/hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testAlternativeTestJVM.approved.txt?rev=1534711&r1=1534710&r2=1534711&view=diff
==============================================================================
--- hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testAlternativeTestJVM.approved.txt (original)
+++ hive/branches/maven/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testAlternativeTestJVM.approved.txt Tue Oct 22 17:58:59 2013
@@ -16,8 +16,9 @@
 
 set -x
 umask 0022
-find /some/local/dir/instance-1/apache-source/build -name 'TEST-*.xml' -delete
-find /some/local/dir/instance-1/apache-source/build -name 'hive.log' -delete
+echo $$
+ps -e -o pid,pgrp,user,args
+ps x -o  "%p %r %y %x %c "
 chmod -R u+w /some/log/dir
 rm -rf /some/log/dir
 # makes /some/log/dir and /some/log/dir/tmp
@@ -32,15 +33,57 @@ then
   export PATH=$JAVA_HOME/bin/:$PATH
 fi
 export ANT_OPTS="-Xmx1g -XX:MaxPermSize=256m -Djava.io.tmpdir=/some/log/dir/tmp -Dhttp.proxyHost=somehost -Dhttp.proxyPort=3128"
+export M2_OPTS="-Xmx1g -XX:MaxPermSize=256m -Djava.io.tmpdir=/some/log/dir/tmp ${mavenEnvOpts}"
 export HADOOP_ROOT_LOGGER=INFO,console
 export HADOOP_OPTS="-Dhive.log.dir=/some/log/dir -Dhive.query.id=hadoop -Djava.io.tmpdir=/some/log/dir/tmp"
-cd /some/local/dir/instance-1/apache-source && \
-  timeout 2h ant test -Dtest.junit.output.format=xml -Dant=arg1 \
-   -Divy.default.ivy.user.dir=/some/local/dir/instance-1/ivy \
-   -Divy.default.always.check.exact.revision=false \
-   -Divy.cache.ttl.default=eternal -Divy.checkmodified=false \
-   -Dmvn.local.repo=/some/local/dir/instance-1/maven \
-   -Dant=arg1  -Dtest=arg1 1>/some/log/dir/ant-test.txt 2>&1
+cd /some/local/dir/instance-1/apache-source || exit 1
+if [[ -s batch.pid ]]
+then
+  while read pid
+  do
+    if kill -9 -$pid 2>/dev/null
+    then
+      echo "Killed process group $pid"
+      sleep 1
+    fi
+  done < batch.pid
+fi
+echo "$$" > batch.pid
+find ./ -name 'TEST-*.xml' -delete
+find ./ -name 'hive.log' -delete
+find ./ -name junit_metastore_db | xargs -r rm -rf
+ret=0
+if [[ "ant" == "maven" ]]
+then
+  testModule=$(find ./ -name 'TestCliDriver.java' | awk -F'/' '{print $2}')
+  if [[ -z "$testModule" ]]
+  then
+  	testModule=./
+  fi
+  pushd $testModule
+  timeout 2h mvn -o test -Dmaven.repo.local=/some/local/dir/instance-1/maven \
+    $mavenArgs $mavenTestArgs -Dtest=arg1 1>/some/log/dir/maven-test.txt 2>&1 </dev/null &
+
+  pid=$!
+
+  popd
+elif [[ "ant" == "ant" ]]
+then
+  timeout 2h ant ${antTestTarget} -Dtest.junit.output.format=xml \
+    -Divy.default.ivy.user.dir=/some/local/dir/instance-1/ivy \
+    -Divy.default.always.check.exact.revision=false \
+    -Divy.cache.ttl.default=eternal -Divy.checkmodified=false \
+    -Dmvn.local.repo=/some/local/dir/instance-1/maven \
+    -Dant=arg1  -Dtest=arg1 1>/some/log/dir/ant-test.txt 2>&1 </dev/null &
+
+  pid=$!
+
+else
+  echo "Unknown build tool ant"
+  exit 127
+fi
+echo $pid >> batch.pid
+wait $pid
 ret=$?
 if [[ $ret -ne 0 ]]
 then
@@ -51,19 +94,12 @@ then
     echo "Number of failed tests 20 exceeded threshold, not copying source"
   fi
 fi
-if [[ -f /some/local/dir/instance-1/apache-source/build/ql/tmp/hive.log ]]
-then
-  mv /some/local/dir/instance-1/apache-source/build/ql/tmp/hive.log /some/log/dir/hive.log
-fi
-batchName=batch-1
-for testOutputFile in $(find /some/local/dir/instance-1/apache-source/ -name 'TEST-*.xml')
-do
-  mv "$testOutputFile" "/some/log/dir/TEST-$batchName-$(basename $testOutputFile)"
-done
+find ./ -type f -name hive.log | \
+  xargs -I {} sh -c 'f=$(basename {}); test -f /some/log/dir/$f && f=$f-$(uuidgen); mv {} /some/log/dir/$f'
+find ./ -type f -name 'TEST-*.xml' | \
+  xargs -I {} sh -c 'f=TEST-batch-1-$(basename {}); test -f /some/log/dir/$f && f=$f-$(uuidgen); mv {} /some/log/dir/$f'
 if [[ -f /some/log/dir/.log ]]
 then
   mv /some/log/dir/.log /some/log/dir/dot.log
-else
-  echo "/some/log/dir/.log does not exist"
 fi
 exit $ret